aboutsummaryrefslogtreecommitdiff
path: root/talk/app/webrtc
diff options
context:
space:
mode:
authorChih-hung Hsieh <chh@google.com>2015-12-01 17:07:48 +0000
committerandroid-build-merger <android-build-merger@google.com>2015-12-01 17:07:48 +0000
commita4acd9d6bc9b3b033d7d274316e75ee067df8d20 (patch)
tree672a185b294789cf991f385c3e395dd63bea9063 /talk/app/webrtc
parent3681b90ba4fe7a27232dd3e27897d5d7ed9d651c (diff)
parentfe8b4a657979b49e1701bd92f6d5814a99e0b2be (diff)
downloadwebrtc-a4acd9d6bc9b3b033d7d274316e75ee067df8d20.tar.gz
Merge changes I7bbf776e,I1b827825
am: fe8b4a6579 * commit 'fe8b4a657979b49e1701bd92f6d5814a99e0b2be': (7237 commits) WIP: Changes after merge commit 'cb3f9bd' Make the nonlinear beamformer steerable Utilize bitrate above codec max to protect video. Enable VP9 internal resize by default. Filter overlapping RTP header extensions. Make VCMEncodedFrameCallback const. MediaCodecVideoEncoder: Add number of quality resolution downscales to Encoded callback. Remove redudant encoder rate calls. Create isolate files for nonparallel tests. Register header extensions in RtpRtcpObserver to avoid log spam. Make an enum class out of NetEqDecoder, and hide the neteq_decoders_ table ACM: Move NACK functionality inside NetEq Fix chromium-style warnings in webrtc/sound/. Create a 'webrtc_nonparallel_tests' target. Update scalability structure data according to updates in the RTP payload profile. audio_coding: rename interface -> include Rewrote perform_action_on_all_files to be parallell. Update reference indices according to updates in the RTP payload profile. Disable P2PTransport...TestFailoverControlledSide on Memcheck pass clangcl compile options to ignore warnings in gflags.cc ...
Diffstat (limited to 'talk/app/webrtc')
-rw-r--r--talk/app/webrtc/OWNERS5
-rw-r--r--talk/app/webrtc/androidtests/AndroidManifest.xml29
-rw-r--r--talk/app/webrtc/androidtests/OWNERS1
-rw-r--r--talk/app/webrtc/androidtests/ant.properties18
-rw-r--r--talk/app/webrtc/androidtests/build.xml92
-rw-r--r--talk/app/webrtc/androidtests/project.properties16
-rw-r--r--talk/app/webrtc/androidtests/res/drawable-hdpi/ic_launcher.pngbin0 -> 9397 bytes
-rw-r--r--talk/app/webrtc/androidtests/res/drawable-ldpi/ic_launcher.pngbin0 -> 2729 bytes
-rw-r--r--talk/app/webrtc/androidtests/res/drawable-mdpi/ic_launcher.pngbin0 -> 5237 bytes
-rw-r--r--talk/app/webrtc/androidtests/res/drawable-xhdpi/ic_launcher.pngbin0 -> 14383 bytes
-rw-r--r--talk/app/webrtc/androidtests/res/values/strings.xml6
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java310
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/NetworkMonitorTest.java288
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/PeerConnectionAndroidTest.java48
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/RendererCommonTest.java189
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java319
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java217
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java261
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java443
-rw-r--r--talk/app/webrtc/androidvideocapturer.cc240
-rw-r--r--talk/app/webrtc/androidvideocapturer.h108
-rw-r--r--talk/app/webrtc/audiotrack.cc53
-rw-r--r--talk/app/webrtc/audiotrack.h69
-rw-r--r--talk/app/webrtc/datachannel.cc655
-rw-r--r--talk/app/webrtc/datachannel.h299
-rw-r--r--talk/app/webrtc/datachannel_unittest.cc581
-rw-r--r--talk/app/webrtc/datachannelinterface.h159
-rw-r--r--talk/app/webrtc/dtlsidentitystore.cc248
-rw-r--r--talk/app/webrtc/dtlsidentitystore.h164
-rw-r--r--talk/app/webrtc/dtlsidentitystore_unittest.cc152
-rw-r--r--talk/app/webrtc/dtmfsender.cc257
-rw-r--r--talk/app/webrtc/dtmfsender.h139
-rw-r--r--talk/app/webrtc/dtmfsender_unittest.cc359
-rw-r--r--talk/app/webrtc/dtmfsenderinterface.h105
-rw-r--r--talk/app/webrtc/fakemediacontroller.h55
-rw-r--r--talk/app/webrtc/fakemetricsobserver.cc79
-rw-r--r--talk/app/webrtc/fakemetricsobserver.h68
-rw-r--r--talk/app/webrtc/fakeportallocatorfactory.h76
-rw-r--r--talk/app/webrtc/java/README23
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java119
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java236
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java102
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/EglBase.java271
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java272
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/GlShader.java144
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/GlTextureFrameBuffer.java142
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/GlUtil.java75
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/NetworkMonitor.java228
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java424
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/OWNERS1
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/RendererCommon.java190
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java229
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java541
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java143
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java896
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java663
-rw-r--r--talk/app/webrtc/java/jni/OWNERS1
-rw-r--r--talk/app/webrtc/java/jni/androidmediacodeccommon.h113
-rw-r--r--talk/app/webrtc/java/jni/androidmediadecoder_jni.cc865
-rw-r--r--talk/app/webrtc/java/jni/androidmediadecoder_jni.h59
-rw-r--r--talk/app/webrtc/java/jni/androidmediaencoder_jni.cc967
-rw-r--r--talk/app/webrtc/java/jni/androidmediaencoder_jni.h59
-rw-r--r--talk/app/webrtc/java/jni/androidnetworkmonitor_jni.cc85
-rw-r--r--talk/app/webrtc/java/jni/androidnetworkmonitor_jni.h67
-rw-r--r--talk/app/webrtc/java/jni/androidvideocapturer_jni.cc266
-rw-r--r--talk/app/webrtc/java/jni/androidvideocapturer_jni.h110
-rw-r--r--talk/app/webrtc/java/jni/classreferenceholder.cc151
-rw-r--r--talk/app/webrtc/java/jni/classreferenceholder.h59
-rw-r--r--talk/app/webrtc/java/jni/jni_helpers.cc288
-rw-r--r--talk/app/webrtc/java/jni/jni_helpers.h141
-rw-r--r--talk/app/webrtc/java/jni/native_handle_impl.cc95
-rw-r--r--talk/app/webrtc/java/jni/native_handle_impl.h89
-rw-r--r--talk/app/webrtc/java/jni/peerconnection_jni.cc2055
-rw-r--r--talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc79
-rw-r--r--talk/app/webrtc/java/jni/surfacetexturehelper_jni.h85
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/AudioSource.java38
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/AudioTrack.java35
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/CallSessionFileRotatingLogSink.java57
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/DataChannel.java143
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/IceCandidate.java48
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java418
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java447
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaConstraints.java101
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaSource.java55
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaStream.java134
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaStreamTrack.java86
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/PeerConnection.java294
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java212
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/RtpReceiver.java63
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/RtpSender.java79
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/SdpObserver.java43
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/SessionDescription.java57
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/StatsObserver.java34
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/StatsReport.java72
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/VideoCapturer.java70
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java179
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/VideoSource.java63
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/VideoTrack.java68
-rw-r--r--talk/app/webrtc/java/testcommon/src/org/webrtc/PeerConnectionTest.java784
-rwxr-xr-xtalk/app/webrtc/javatests/libjingle_peerconnection_java_unittest.sh57
-rw-r--r--talk/app/webrtc/javatests/src/org/webrtc/PeerConnectionTestJava.java62
-rw-r--r--talk/app/webrtc/jsep.h155
-rw-r--r--talk/app/webrtc/jsepicecandidate.cc99
-rw-r--r--talk/app/webrtc/jsepicecandidate.h92
-rw-r--r--talk/app/webrtc/jsepsessiondescription.cc202
-rw-r--r--talk/app/webrtc/jsepsessiondescription.h106
-rw-r--r--talk/app/webrtc/jsepsessiondescription_unittest.cc245
-rw-r--r--talk/app/webrtc/localaudiosource.cc113
-rw-r--r--talk/app/webrtc/localaudiosource.h72
-rw-r--r--talk/app/webrtc/localaudiosource_unittest.cc133
-rw-r--r--talk/app/webrtc/mediaconstraintsinterface.cc163
-rw-r--r--talk/app/webrtc/mediaconstraintsinterface.h153
-rw-r--r--talk/app/webrtc/mediacontroller.cc96
-rw-r--r--talk/app/webrtc/mediacontroller.h55
-rw-r--r--talk/app/webrtc/mediastream.cc112
-rw-r--r--talk/app/webrtc/mediastream.h75
-rw-r--r--talk/app/webrtc/mediastream_unittest.cc163
-rw-r--r--talk/app/webrtc/mediastreamhandler.cc29
-rw-r--r--talk/app/webrtc/mediastreamhandler.h29
-rw-r--r--talk/app/webrtc/mediastreaminterface.h273
-rw-r--r--talk/app/webrtc/mediastreamprovider.h96
-rw-r--r--talk/app/webrtc/mediastreamproxy.h54
-rw-r--r--talk/app/webrtc/mediastreamsignaling.cc30
-rw-r--r--talk/app/webrtc/mediastreamsignaling.h28
-rw-r--r--talk/app/webrtc/mediastreamtrack.h81
-rw-r--r--talk/app/webrtc/mediastreamtrackproxy.h77
-rw-r--r--talk/app/webrtc/notifier.h77
-rw-r--r--talk/app/webrtc/objc/.clang-format10
-rw-r--r--talk/app/webrtc/objc/OWNERS1
-rw-r--r--talk/app/webrtc/objc/README80
-rw-r--r--talk/app/webrtc/objc/RTCAVFoundationVideoSource+Internal.h36
-rw-r--r--talk/app/webrtc/objc/RTCAVFoundationVideoSource.mm69
-rw-r--r--talk/app/webrtc/objc/RTCAudioTrack+Internal.h37
-rw-r--r--talk/app/webrtc/objc/RTCAudioTrack.mm45
-rw-r--r--talk/app/webrtc/objc/RTCDataChannel+Internal.h55
-rw-r--r--talk/app/webrtc/objc/RTCDataChannel.mm290
-rw-r--r--talk/app/webrtc/objc/RTCEAGLVideoView.m277
-rw-r--r--talk/app/webrtc/objc/RTCEnumConverter.h83
-rw-r--r--talk/app/webrtc/objc/RTCEnumConverter.mm231
-rw-r--r--talk/app/webrtc/objc/RTCFileLogger.mm157
-rw-r--r--talk/app/webrtc/objc/RTCI420Frame+Internal.h36
-rw-r--r--talk/app/webrtc/objc/RTCI420Frame.mm101
-rw-r--r--talk/app/webrtc/objc/RTCICECandidate+Internal.h39
-rw-r--r--talk/app/webrtc/objc/RTCICECandidate.mm87
-rw-r--r--talk/app/webrtc/objc/RTCICEServer+Internal.h38
-rw-r--r--talk/app/webrtc/objc/RTCICEServer.mm75
-rw-r--r--talk/app/webrtc/objc/RTCLogging.mm64
-rw-r--r--talk/app/webrtc/objc/RTCMediaConstraints+Internal.h40
-rw-r--r--talk/app/webrtc/objc/RTCMediaConstraints.mm76
-rw-r--r--talk/app/webrtc/objc/RTCMediaConstraintsNative.cc51
-rw-r--r--talk/app/webrtc/objc/RTCMediaConstraintsNative.h50
-rw-r--r--talk/app/webrtc/objc/RTCMediaSource+Internal.h40
-rw-r--r--talk/app/webrtc/objc/RTCMediaSource.mm65
-rw-r--r--talk/app/webrtc/objc/RTCMediaStream+Internal.h40
-rw-r--r--talk/app/webrtc/objc/RTCMediaStream.mm146
-rw-r--r--talk/app/webrtc/objc/RTCMediaStreamTrack+Internal.h40
-rw-r--r--talk/app/webrtc/objc/RTCMediaStreamTrack.mm127
-rw-r--r--talk/app/webrtc/objc/RTCNSGLVideoView.m158
-rw-r--r--talk/app/webrtc/objc/RTCOpenGLVideoRenderer.mm503
-rw-r--r--talk/app/webrtc/objc/RTCPair.m47
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnection+Internal.h48
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnection.mm303
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnectionFactory+Internal.h38
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnectionFactory.mm148
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h37
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnectionInterface.mm93
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnectionObserver.h75
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnectionObserver.mm108
-rw-r--r--talk/app/webrtc/objc/RTCSessionDescription+Internal.h41
-rw-r--r--talk/app/webrtc/objc/RTCSessionDescription.mm81
-rw-r--r--talk/app/webrtc/objc/RTCStatsReport+Internal.h36
-rw-r--r--talk/app/webrtc/objc/RTCStatsReport.mm69
-rw-r--r--talk/app/webrtc/objc/RTCVideoCapturer+Internal.h38
-rw-r--r--talk/app/webrtc/objc/RTCVideoCapturer.mm74
-rw-r--r--talk/app/webrtc/objc/RTCVideoRendererAdapter.h40
-rw-r--r--talk/app/webrtc/objc/RTCVideoRendererAdapter.mm79
-rw-r--r--talk/app/webrtc/objc/RTCVideoSource+Internal.h37
-rw-r--r--talk/app/webrtc/objc/RTCVideoSource.mm44
-rw-r--r--talk/app/webrtc/objc/RTCVideoTrack+Internal.h40
-rw-r--r--talk/app/webrtc/objc/RTCVideoTrack.mm122
-rw-r--r--talk/app/webrtc/objc/avfoundationvideocapturer.h79
-rw-r--r--talk/app/webrtc/objc/avfoundationvideocapturer.mm447
-rw-r--r--talk/app/webrtc/objc/public/RTCAVFoundationVideoSource.h49
-rw-r--r--talk/app/webrtc/objc/public/RTCAudioSource.h40
-rw-r--r--talk/app/webrtc/objc/public/RTCAudioTrack.h39
-rw-r--r--talk/app/webrtc/objc/public/RTCDataChannel.h118
-rw-r--r--talk/app/webrtc/objc/public/RTCEAGLVideoView.h46
-rw-r--r--talk/app/webrtc/objc/public/RTCFileLogger.h70
-rw-r--r--talk/app/webrtc/objc/public/RTCI420Frame.h55
-rw-r--r--talk/app/webrtc/objc/public/RTCICECandidate.h56
-rw-r--r--talk/app/webrtc/objc/public/RTCICEServer.h49
-rw-r--r--talk/app/webrtc/objc/public/RTCLogging.h92
-rw-r--r--talk/app/webrtc/objc/public/RTCMediaConstraints.h39
-rw-r--r--talk/app/webrtc/objc/public/RTCMediaSource.h44
-rw-r--r--talk/app/webrtc/objc/public/RTCMediaStream.h51
-rw-r--r--talk/app/webrtc/objc/public/RTCMediaStreamTrack.h59
-rw-r--r--talk/app/webrtc/objc/public/RTCNSGLVideoView.h47
-rw-r--r--talk/app/webrtc/objc/public/RTCOpenGLVideoRenderer.h73
-rw-r--r--talk/app/webrtc/objc/public/RTCPair.h45
-rw-r--r--talk/app/webrtc/objc/public/RTCPeerConnection.h127
-rw-r--r--talk/app/webrtc/objc/public/RTCPeerConnectionDelegate.h72
-rw-r--r--talk/app/webrtc/objc/public/RTCPeerConnectionFactory.h77
-rw-r--r--talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h75
-rw-r--r--talk/app/webrtc/objc/public/RTCSessionDescription.h50
-rw-r--r--talk/app/webrtc/objc/public/RTCSessionDescriptionDelegate.h49
-rw-r--r--talk/app/webrtc/objc/public/RTCStatsDelegate.h39
-rw-r--r--talk/app/webrtc/objc/public/RTCStatsReport.h45
-rw-r--r--talk/app/webrtc/objc/public/RTCTypes.h79
-rw-r--r--talk/app/webrtc/objc/public/RTCVideoCapturer.h42
-rw-r--r--talk/app/webrtc/objc/public/RTCVideoRenderer.h43
-rw-r--r--talk/app/webrtc/objc/public/RTCVideoSource.h39
-rw-r--r--talk/app/webrtc/objc/public/RTCVideoTrack.h55
-rw-r--r--talk/app/webrtc/objctests/Info.plist24
-rw-r--r--talk/app/webrtc/objctests/OWNERS1
-rw-r--r--talk/app/webrtc/objctests/README1
-rw-r--r--talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.h60
-rw-r--r--talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m263
-rw-r--r--talk/app/webrtc/objctests/RTCPeerConnectionTest.mm346
-rw-r--r--talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.h49
-rw-r--r--talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.m103
-rw-r--r--talk/app/webrtc/objctests/mac/main.mm37
-rw-r--r--talk/app/webrtc/peerconnection.cc1975
-rw-r--r--talk/app/webrtc/peerconnection.h396
-rw-r--r--talk/app/webrtc/peerconnection_unittest.cc1752
-rw-r--r--talk/app/webrtc/peerconnectionendtoend_unittest.cc398
-rw-r--r--talk/app/webrtc/peerconnectionfactory.cc322
-rw-r--r--talk/app/webrtc/peerconnectionfactory.h128
-rw-r--r--talk/app/webrtc/peerconnectionfactory_unittest.cc426
-rw-r--r--talk/app/webrtc/peerconnectionfactoryproxy.h83
-rw-r--r--talk/app/webrtc/peerconnectioninterface.h667
-rw-r--r--talk/app/webrtc/peerconnectioninterface_unittest.cc2317
-rw-r--r--talk/app/webrtc/peerconnectionproxy.h79
-rw-r--r--talk/app/webrtc/portallocatorfactory.cc92
-rw-r--r--talk/app/webrtc/portallocatorfactory.h72
-rw-r--r--talk/app/webrtc/proxy.h391
-rw-r--r--talk/app/webrtc/proxy_unittest.cc170
-rw-r--r--talk/app/webrtc/remoteaudiosource.cc72
-rw-r--r--talk/app/webrtc/remoteaudiosource.h66
-rw-r--r--talk/app/webrtc/remotevideocapturer.cc95
-rw-r--r--talk/app/webrtc/remotevideocapturer.h65
-rw-r--r--talk/app/webrtc/remotevideocapturer_unittest.cc132
-rw-r--r--talk/app/webrtc/rtpreceiver.cc106
-rw-r--r--talk/app/webrtc/rtpreceiver.h104
-rw-r--r--talk/app/webrtc/rtpreceiverinterface.h66
-rw-r--r--talk/app/webrtc/rtpsender.cc207
-rw-r--r--talk/app/webrtc/rtpsender.h140
-rw-r--r--talk/app/webrtc/rtpsenderinterface.h70
-rw-r--r--talk/app/webrtc/rtpsenderreceiver_unittest.cc283
-rw-r--r--talk/app/webrtc/sctputils.cc205
-rw-r--r--talk/app/webrtc/sctputils.h58
-rw-r--r--talk/app/webrtc/sctputils_unittest.cc178
-rw-r--r--talk/app/webrtc/statscollector.cc945
-rw-r--r--talk/app/webrtc/statscollector.h170
-rw-r--r--talk/app/webrtc/statscollector_unittest.cc1753
-rw-r--r--talk/app/webrtc/statstypes.cc782
-rw-r--r--talk/app/webrtc/statstypes.h417
-rw-r--r--talk/app/webrtc/streamcollection.h125
-rw-r--r--talk/app/webrtc/test/fakeaudiocapturemodule.cc744
-rw-r--r--talk/app/webrtc/test/fakeaudiocapturemodule.h287
-rw-r--r--talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc216
-rw-r--r--talk/app/webrtc/test/fakeconstraints.h133
-rw-r--r--talk/app/webrtc/test/fakedatachannelprovider.h161
-rw-r--r--talk/app/webrtc/test/fakedtlsidentitystore.h143
-rw-r--r--talk/app/webrtc/test/fakemediastreamsignaling.h140
-rw-r--r--talk/app/webrtc/test/fakeperiodicvideocapturer.h89
-rw-r--r--talk/app/webrtc/test/fakevideotrackrenderer.h75
-rw-r--r--talk/app/webrtc/test/mockpeerconnectionobservers.h243
-rw-r--r--talk/app/webrtc/test/peerconnectiontestwrapper.cc297
-rw-r--r--talk/app/webrtc/test/peerconnectiontestwrapper.h122
-rw-r--r--talk/app/webrtc/test/testsdpstrings.h147
-rw-r--r--talk/app/webrtc/umametrics.h128
-rw-r--r--talk/app/webrtc/videosource.cc469
-rw-r--r--talk/app/webrtc/videosource.h111
-rw-r--r--talk/app/webrtc/videosource_unittest.cc562
-rw-r--r--talk/app/webrtc/videosourceinterface.h63
-rw-r--r--talk/app/webrtc/videosourceproxy.h54
-rw-r--r--talk/app/webrtc/videotrack.cc73
-rw-r--r--talk/app/webrtc/videotrack.h64
-rw-r--r--talk/app/webrtc/videotrack_unittest.cc111
-rw-r--r--talk/app/webrtc/videotrackrenderers.cc72
-rw-r--r--talk/app/webrtc/videotrackrenderers.h67
-rw-r--r--talk/app/webrtc/webrtcsdp.cc3065
-rw-r--r--talk/app/webrtc/webrtcsdp.h81
-rw-r--r--talk/app/webrtc/webrtcsdp_unittest.cc2710
-rw-r--r--talk/app/webrtc/webrtcsession.cc2204
-rw-r--r--talk/app/webrtc/webrtcsession.h517
-rw-r--r--talk/app/webrtc/webrtcsession_unittest.cc4219
-rw-r--r--talk/app/webrtc/webrtcsessiondescriptionfactory.cc508
-rw-r--r--talk/app/webrtc/webrtcsessiondescriptionfactory.h193
289 files changed, 64242 insertions, 0 deletions
diff --git a/talk/app/webrtc/OWNERS b/talk/app/webrtc/OWNERS
new file mode 100644
index 0000000000..ffd78e1777
--- /dev/null
+++ b/talk/app/webrtc/OWNERS
@@ -0,0 +1,5 @@
+glaznev@webrtc.org
+juberti@google.com
+perkj@google.com
+tkchin@webrtc.org
+tommi@google.com
diff --git a/talk/app/webrtc/androidtests/AndroidManifest.xml b/talk/app/webrtc/androidtests/AndroidManifest.xml
new file mode 100644
index 0000000000..75b6d615d3
--- /dev/null
+++ b/talk/app/webrtc/androidtests/AndroidManifest.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.webrtc.test"
+ android:versionCode="1"
+ android:versionName="1.0" >
+ <uses-feature android:name="android.hardware.camera" />
+ <uses-feature android:name="android.hardware.camera.autofocus" />
+ <uses-feature android:glEsVersion="0x00020000" android:required="true" />
+
+ <uses-sdk android:minSdkVersion="14" android:targetSdkVersion="21" />
+
+ <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.INTERNET" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+ <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
+
+ <instrumentation
+ android:name="android.test.InstrumentationTestRunner"
+ android:targetPackage="org.webrtc.test" />
+
+ <application
+ android:icon="@drawable/ic_launcher"
+ android:label="@string/app_name" >
+ <uses-library android:name="android.test.runner" />
+ </application>
+
+</manifest>
diff --git a/talk/app/webrtc/androidtests/OWNERS b/talk/app/webrtc/androidtests/OWNERS
new file mode 100644
index 0000000000..4d31ffb663
--- /dev/null
+++ b/talk/app/webrtc/androidtests/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/talk/app/webrtc/androidtests/ant.properties b/talk/app/webrtc/androidtests/ant.properties
new file mode 100644
index 0000000000..bc05353865
--- /dev/null
+++ b/talk/app/webrtc/androidtests/ant.properties
@@ -0,0 +1,18 @@
+# This file is used to override default values used by the Ant build system.
+#
+# This file must be checked into Version Control Systems, as it is
+# integral to the build system of your project.
+
+# This file is only used by the Ant script.
+
+# You can use this to override default values such as
+# 'source.dir' for the location of your java source folder and
+# 'out.dir' for the location of your output folder.
+
+# You can also use it define how the release builds are signed by declaring
+# the following properties:
+# 'key.store' for the location of your keystore and
+# 'key.alias' for the name of the key to use.
+# The password will be asked during the build when you use the 'release' target.
+
+source.dir=../java/testcommon/src;src \ No newline at end of file
diff --git a/talk/app/webrtc/androidtests/build.xml b/talk/app/webrtc/androidtests/build.xml
new file mode 100644
index 0000000000..cb4cb7ac94
--- /dev/null
+++ b/talk/app/webrtc/androidtests/build.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="libjingle_peerconnection_android_unittest" default="help">
+
+ <!-- The local.properties file is created and updated by the 'android' tool.
+ It contains the path to the SDK. It should *NOT* be checked into
+ Version Control Systems. -->
+ <property file="local.properties" />
+
+ <!-- The ant.properties file can be created by you. It is only edited by the
+ 'android' tool to add properties to it.
+ This is the place to change some Ant specific build properties.
+ Here are some properties you may want to change/update:
+
+ source.dir
+ The name of the source directory. Default is 'src'.
+ out.dir
+ The name of the output directory. Default is 'bin'.
+
+ For other overridable properties, look at the beginning of the rules
+ files in the SDK, at tools/ant/build.xml
+
+ Properties related to the SDK location or the project target should
+ be updated using the 'android' tool with the 'update' action.
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems.
+
+ -->
+ <property file="ant.properties" />
+
+ <!-- if sdk.dir was not set from one of the property file, then
+ get it from the ANDROID_HOME env var.
+ This must be done before we load project.properties since
+ the proguard config can use sdk.dir -->
+ <property environment="env" />
+ <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
+ <isset property="env.ANDROID_SDK_ROOT" />
+ </condition>
+
+ <!-- The project.properties file is created and updated by the 'android'
+ tool, as well as ADT.
+
+ This contains project specific properties such as project target, and library
+ dependencies. Lower level build properties are stored in ant.properties
+ (or in .classpath for Eclipse projects).
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems. -->
+ <loadproperties srcFile="project.properties" />
+
+ <!-- quick check on sdk.dir -->
+ <fail
+ message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
+ unless="sdk.dir"
+ />
+
+ <!--
+ Import per project custom build rules if present at the root of the project.
+ This is the place to put custom intermediary targets such as:
+ -pre-build
+ -pre-compile
+ -post-compile (This is typically used for code obfuscation.
+ Compiled code location: ${out.classes.absolute.dir}
+ If this is not done in place, override ${out.dex.input.absolute.dir})
+ -post-package
+ -post-build
+ -pre-clean
+ -->
+ <import file="custom_rules.xml" optional="true" />
+
+ <!-- Import the actual build file.
+
+ To customize existing targets, there are two options:
+ - Customize only one target:
+ - copy/paste the target into this file, *before* the
+ <import> task.
+ - customize it to your needs.
+ - Customize the whole content of build.xml
+ - copy/paste the content of the rules files (minus the top node)
+ into this file, replacing the <import> task.
+ - customize to your needs.
+
+ ***********************
+ ****** IMPORTANT ******
+ ***********************
+ In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+ in order to avoid having your file be overridden by tools such as "android update project"
+ -->
+ <!-- version-tag: 1 -->
+ <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/talk/app/webrtc/androidtests/project.properties b/talk/app/webrtc/androidtests/project.properties
new file mode 100644
index 0000000000..a6ca533fe3
--- /dev/null
+++ b/talk/app/webrtc/androidtests/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-22
+
+java.compilerargs=-Xlint:all -Werror
diff --git a/talk/app/webrtc/androidtests/res/drawable-hdpi/ic_launcher.png b/talk/app/webrtc/androidtests/res/drawable-hdpi/ic_launcher.png
new file mode 100644
index 0000000000..96a442e5b8
--- /dev/null
+++ b/talk/app/webrtc/androidtests/res/drawable-hdpi/ic_launcher.png
Binary files differ
diff --git a/talk/app/webrtc/androidtests/res/drawable-ldpi/ic_launcher.png b/talk/app/webrtc/androidtests/res/drawable-ldpi/ic_launcher.png
new file mode 100644
index 0000000000..99238729d8
--- /dev/null
+++ b/talk/app/webrtc/androidtests/res/drawable-ldpi/ic_launcher.png
Binary files differ
diff --git a/talk/app/webrtc/androidtests/res/drawable-mdpi/ic_launcher.png b/talk/app/webrtc/androidtests/res/drawable-mdpi/ic_launcher.png
new file mode 100644
index 0000000000..359047dfa4
--- /dev/null
+++ b/talk/app/webrtc/androidtests/res/drawable-mdpi/ic_launcher.png
Binary files differ
diff --git a/talk/app/webrtc/androidtests/res/drawable-xhdpi/ic_launcher.png b/talk/app/webrtc/androidtests/res/drawable-xhdpi/ic_launcher.png
new file mode 100644
index 0000000000..71c6d760f0
--- /dev/null
+++ b/talk/app/webrtc/androidtests/res/drawable-xhdpi/ic_launcher.png
Binary files differ
diff --git a/talk/app/webrtc/androidtests/res/values/strings.xml b/talk/app/webrtc/androidtests/res/values/strings.xml
new file mode 100644
index 0000000000..dfe63f89c7
--- /dev/null
+++ b/talk/app/webrtc/androidtests/res/values/strings.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+
+ <string name="app_name">AndroidPeerConnectionTests</string>
+
+</resources>
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
new file mode 100644
index 0000000000..1c01ffa0b8
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
@@ -0,0 +1,310 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLContext;
+
+public final class GlRectDrawerTest extends ActivityTestCase {
+ // Resolution of the test image.
+ private static final int WIDTH = 16;
+ private static final int HEIGHT = 16;
+ // Seed for random pixel creation.
+ private static final int SEED = 42;
+ // When comparing pixels, allow some slack for float arithmetic and integer rounding.
+ private static final float MAX_DIFF = 1.0f;
+
+ private static float normalizedByte(byte b) {
+ return (b & 0xFF) / 255.0f;
+ }
+
+ private static float saturatedConvert(float c) {
+ return 255.0f * Math.max(0, Math.min(c, 1));
+ }
+
+ // Assert RGB ByteBuffers are pixel perfect identical.
+ private static void assertEquals(int width, int height, ByteBuffer actual, ByteBuffer expected) {
+ actual.rewind();
+ expected.rewind();
+ assertEquals(actual.remaining(), width * height * 3);
+ assertEquals(expected.remaining(), width * height * 3);
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final int actualR = actual.get() & 0xFF;
+ final int actualG = actual.get() & 0xFF;
+ final int actualB = actual.get() & 0xFF;
+ final int expectedR = expected.get() & 0xFF;
+ final int expectedG = expected.get() & 0xFF;
+ final int expectedB = expected.get() & 0xFF;
+ if (actualR != expectedR || actualG != expectedG || actualB != expectedB) {
+ fail("ByteBuffers of size " + width + "x" + height + " not equal at position "
+ + "(" + x + ", " + y + "). Expected color (R,G,B): "
+ + "(" + expectedR + ", " + expectedG + ", " + expectedB + ")"
+ + " but was: " + "(" + actualR + ", " + actualG + ", " + actualB + ").");
+ }
+ }
+ }
+ }
+
+ // Convert RGBA ByteBuffer to RGB ByteBuffer.
+ private static ByteBuffer stripAlphaChannel(ByteBuffer rgbaBuffer) {
+ rgbaBuffer.rewind();
+ assertEquals(rgbaBuffer.remaining() % 4, 0);
+ final int numberOfPixels = rgbaBuffer.remaining() / 4;
+ final ByteBuffer rgbBuffer = ByteBuffer.allocateDirect(numberOfPixels * 3);
+ while (rgbaBuffer.hasRemaining()) {
+ // Copy RGB.
+ for (int channel = 0; channel < 3; ++channel) {
+ rgbBuffer.put(rgbaBuffer.get());
+ }
+ // Drop alpha.
+ rgbaBuffer.get();
+ }
+ return rgbBuffer;
+ }
+
+ @SmallTest
+ public void testRgbRendering() {
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ eglBase.createPbufferSurface(WIDTH, HEIGHT);
+ eglBase.makeCurrent();
+
+ // Create RGB byte buffer plane with random content.
+ final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
+ final Random random = new Random(SEED);
+ random.nextBytes(rgbPlane.array());
+
+ // Upload the RGB byte buffer data as a texture.
+ final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
+ HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+ GlUtil.checkNoGLES2Error("glTexImage2D");
+
+ // Draw the RGB frame onto the pixel buffer.
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+ GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Assert rendered image is pixel perfect to source RGB.
+ assertEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
+
+ drawer.release();
+ GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
+ eglBase.release();
+ }
+
+ @SmallTest
+ public void testYuvRendering() {
+ // Create EGL base with a pixel buffer as display output.
+ EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ eglBase.createPbufferSurface(WIDTH, HEIGHT);
+ eglBase.makeCurrent();
+
+ // Create YUV byte buffer planes with random content.
+ final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+ final Random random = new Random(SEED);
+ for (int i = 0; i < 3; ++i) {
+ yuvPlanes[i] = ByteBuffer.allocateDirect(WIDTH * HEIGHT);
+ random.nextBytes(yuvPlanes[i].array());
+ }
+
+ // Generate 3 texture ids for Y/U/V.
+ final int yuvTextures[] = new int[3];
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+
+ // Upload the YUV byte buffer data as textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH,
+ HEIGHT, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
+ GlUtil.checkNoGLES2Error("glTexImage2D");
+ }
+
+ // Draw the YUV frame onto the pixel buffer.
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix());
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+ GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Compare the YUV data with the RGBA result.
+ for (int y = 0; y < HEIGHT; ++y) {
+ for (int x = 0; x < WIDTH; ++x) {
+ // YUV color space. Y in [0, 1], UV in [-0.5, 0.5]. The constants are taken from the YUV
+ // fragment shader code in GlRectDrawer.
+ final float y_luma = normalizedByte(yuvPlanes[0].get());
+ final float u_chroma = normalizedByte(yuvPlanes[1].get()) - 0.5f;
+ final float v_chroma = normalizedByte(yuvPlanes[2].get()) - 0.5f;
+ // Expected color in unrounded RGB [0.0f, 255.0f].
+ final float expectedRed = saturatedConvert(y_luma + 1.403f * v_chroma);
+ final float expectedGreen =
+ saturatedConvert(y_luma - 0.344f * u_chroma - 0.714f * v_chroma);
+ final float expectedBlue = saturatedConvert(y_luma + 1.77f * u_chroma);
+
+ // Actual color in RGB8888.
+ final int actualRed = data.get() & 0xFF;
+ final int actualGreen = data.get() & 0xFF;
+ final int actualBlue = data.get() & 0xFF;
+ final int actualAlpha = data.get() & 0xFF;
+
+ // Assert rendered image is close to pixel perfect from source YUV.
+ assertTrue(Math.abs(actualRed - expectedRed) < MAX_DIFF);
+ assertTrue(Math.abs(actualGreen - expectedGreen) < MAX_DIFF);
+ assertTrue(Math.abs(actualBlue - expectedBlue) < MAX_DIFF);
+ assertEquals(actualAlpha, 255);
+ }
+ }
+
+ drawer.release();
+ GLES20.glDeleteTextures(3, yuvTextures, 0);
+ eglBase.release();
+ }
+
+ /**
+ * The purpose here is to test GlRectDrawer.oesDraw(). Unfortunately, there is no easy way to
+ * create an OES texture, which is needed for input to oesDraw(). Most of the test is concerned
+ * with creating OES textures in the following way:
+ * - Create SurfaceTexture with help from SurfaceTextureHelper.
+ * - Create an EglBase with the SurfaceTexture as EGLSurface.
+ * - Upload RGB texture with known content.
+ * - Draw the RGB texture onto the EglBase with the SurfaceTexture as target.
+ * - Wait for an OES texture to be produced.
+ * The actual oesDraw() test is this:
+ * - Create an EglBase with a pixel buffer as target.
+ * - Render the OES texture onto the pixel buffer.
+ * - Read back the pixel buffer and compare it with the known RGB data.
+ */
+ @MediumTest
+ public void testOesRendering() throws InterruptedException {
+ /**
+ * Stub class to convert RGB ByteBuffers to OES textures by drawing onto a SurfaceTexture.
+ */
+ class StubOesTextureProducer {
+ private final EglBase eglBase;
+ private final GlRectDrawer drawer;
+ private final int rgbTexture;
+
+ public StubOesTextureProducer(
+ EGLContext sharedContext, SurfaceTexture surfaceTexture, int width, int height) {
+ eglBase = new EglBase(sharedContext, EglBase.ConfigType.PLAIN);
+ surfaceTexture.setDefaultBufferSize(width, height);
+ eglBase.createSurface(surfaceTexture);
+ assertEquals(eglBase.surfaceWidth(), width);
+ assertEquals(eglBase.surfaceHeight(), height);
+
+ drawer = new GlRectDrawer();
+
+ eglBase.makeCurrent();
+ rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+
+ public void draw(ByteBuffer rgbPlane) {
+ eglBase.makeCurrent();
+
+ // Upload RGB data to texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
+ HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+ // Draw the RGB data onto the SurfaceTexture.
+ drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
+ eglBase.swapBuffers();
+ }
+
+ public void release() {
+ eglBase.makeCurrent();
+ drawer.release();
+ GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
+ eglBase.release();
+ }
+ }
+
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ eglBase.createPbufferSurface(WIDTH, HEIGHT);
+
+ // Create resources for generating OES textures.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(eglBase.getContext());
+ final StubOesTextureProducer oesProducer = new StubOesTextureProducer(
+ eglBase.getContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT);
+ final SurfaceTextureHelperTest.MockTextureListener listener =
+ new SurfaceTextureHelperTest.MockTextureListener();
+ surfaceTextureHelper.setListener(listener);
+
+ // Create RGB byte buffer plane with random content.
+ final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
+ final Random random = new Random(SEED);
+ random.nextBytes(rgbPlane.array());
+
+ // Draw the frame and block until an OES texture is delivered.
+ oesProducer.draw(rgbPlane);
+ listener.waitForNewFrame();
+
+ // Real test starts here.
+ // Draw the OES texture on the pixel buffer.
+ eglBase.makeCurrent();
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+ GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Assert rendered image is pixel perfect to source RGB.
+ assertEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
+
+ drawer.release();
+ surfaceTextureHelper.returnTextureFrame();
+ oesProducer.release();
+ surfaceTextureHelper.disconnect();
+ eglBase.release();
+ }
+}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/NetworkMonitorTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/NetworkMonitorTest.java
new file mode 100644
index 0000000000..98a2363188
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/NetworkMonitorTest.java
@@ -0,0 +1,288 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
+import static org.webrtc.NetworkMonitorAutoDetect.ConnectivityManagerDelegate;
+import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
+import static org.webrtc.NetworkMonitorAutoDetect.NetworkState;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.content.Intent;
+import android.net.ConnectivityManager;
+import android.net.Network;
+import android.net.wifi.WifiManager;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.telephony.TelephonyManager;
+import android.test.ActivityTestCase;
+import android.test.UiThreadTest;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Tests for org.webrtc.NetworkMonitor.
+ */
+@SuppressLint("NewApi")
+public class NetworkMonitorTest extends ActivityTestCase {
+ /**
+ * Listens for alerts fired by the NetworkMonitor when network status changes.
+ */
+ private static class NetworkMonitorTestObserver
+ implements NetworkMonitor.NetworkObserver {
+ private boolean receivedNotification = false;
+
+ @Override
+ public void onConnectionTypeChanged(ConnectionType connectionType) {
+ receivedNotification = true;
+ }
+
+ public boolean hasReceivedNotification() {
+ return receivedNotification;
+ }
+
+ public void resetHasReceivedNotification() {
+ receivedNotification = false;
+ }
+ }
+
+ /**
+ * Mocks out calls to the ConnectivityManager.
+ */
+ private static class MockConnectivityManagerDelegate extends ConnectivityManagerDelegate {
+ private boolean activeNetworkExists;
+ private int networkType;
+ private int networkSubtype;
+
+ @Override
+ public NetworkState getNetworkState() {
+ return new NetworkState(activeNetworkExists, networkType, networkSubtype);
+ }
+
+ // Dummy implementations to avoid NullPointerExceptions in default implementations:
+
+ @Override
+ public int getDefaultNetId() {
+ return INVALID_NET_ID;
+ }
+
+ @Override
+ public Network[] getAllNetworks() {
+ return new Network[0];
+ }
+
+ @Override
+ public NetworkState getNetworkState(Network network) {
+ return new NetworkState(false, -1, -1);
+ }
+
+ public void setActiveNetworkExists(boolean networkExists) {
+ activeNetworkExists = networkExists;
+ }
+
+ public void setNetworkType(int networkType) {
+ this.networkType = networkType;
+ }
+
+ public void setNetworkSubtype(int networkSubtype) {
+ this.networkSubtype = networkSubtype;
+ }
+ }
+
+ /**
+ * Mocks out calls to the WifiManager.
+ */
+ private static class MockWifiManagerDelegate
+ extends NetworkMonitorAutoDetect.WifiManagerDelegate {
+ private String wifiSSID;
+
+ @Override
+ public String getWifiSSID() {
+ return wifiSSID;
+ }
+
+ public void setWifiSSID(String wifiSSID) {
+ this.wifiSSID = wifiSSID;
+ }
+ }
+
+ // A dummy NetworkMonitorAutoDetect.Observer.
+ private static class TestNetworkMonitorAutoDetectObserver
+ implements NetworkMonitorAutoDetect.Observer {
+
+ @Override
+ public void onConnectionTypeChanged(ConnectionType newConnectionType) {}
+ }
+
+ private static final Object lock = new Object();
+ private static Handler uiThreadHandler = null;
+
+ private NetworkMonitorAutoDetect receiver;
+ private MockConnectivityManagerDelegate connectivityDelegate;
+ private MockWifiManagerDelegate wifiDelegate;
+
+ private static Handler getUiThreadHandler() {
+ synchronized (lock) {
+ if (uiThreadHandler == null ) {
+ uiThreadHandler = new Handler(Looper.getMainLooper());
+ }
+ return uiThreadHandler;
+ }
+ }
+
+ /**
+ * Helper method to create a network monitor and delegates for testing.
+ */
+ private void createTestMonitor() {
+ Context context = getInstrumentation().getTargetContext();
+ NetworkMonitor.resetInstanceForTests(context);
+ NetworkMonitor.setAutoDetectConnectivityState(true);
+ receiver = NetworkMonitor.getAutoDetectorForTest();
+ assertNotNull(receiver);
+
+ connectivityDelegate = new MockConnectivityManagerDelegate();
+ connectivityDelegate.setActiveNetworkExists(true);
+ receiver.setConnectivityManagerDelegateForTests(connectivityDelegate);
+
+ wifiDelegate = new MockWifiManagerDelegate();
+ receiver.setWifiManagerDelegateForTests(wifiDelegate);
+ wifiDelegate.setWifiSSID("foo");
+ }
+
+ private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
+ final NetworkMonitorAutoDetect.NetworkState networkState =
+ receiver.getCurrentNetworkState();
+ return receiver.getCurrentConnectionType(networkState);
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ getUiThreadHandler().post(new Runnable() {
+ public void run() {
+ createTestMonitor();
+ }
+ });
+ }
+
+ /**
+ * Tests that the receiver registers for connectivity intents during construction.
+ */
+ @UiThreadTest
+ @SmallTest
+ public void testNetworkMonitorRegistersInConstructor() throws InterruptedException {
+ Context context = getInstrumentation().getTargetContext();
+
+ NetworkMonitorAutoDetect.Observer observer = new TestNetworkMonitorAutoDetectObserver();
+
+ NetworkMonitorAutoDetect receiver = new NetworkMonitorAutoDetect(observer, context);
+
+ assertTrue(receiver.isReceiverRegisteredForTesting());
+ }
+
+ /**
+ * Tests that when there is an intent indicating a change in network connectivity, it sends a
+ * notification to Java observers.
+ */
+ @UiThreadTest
+ @MediumTest
+ public void testNetworkMonitorJavaObservers() throws InterruptedException {
+ // Initialize the NetworkMonitor with a connection.
+ Intent connectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
+ receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+
+ // We shouldn't be re-notified if the connection hasn't actually changed.
+ NetworkMonitorTestObserver observer = new NetworkMonitorTestObserver();
+ NetworkMonitor.addNetworkObserver(observer);
+ receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+ assertFalse(observer.hasReceivedNotification());
+
+ // We shouldn't be notified if we're connected to non-Wifi and the Wifi SSID changes.
+ wifiDelegate.setWifiSSID("bar");
+ receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+ assertFalse(observer.hasReceivedNotification());
+
+ // We should be notified when we change to Wifi.
+ connectivityDelegate.setNetworkType(ConnectivityManager.TYPE_WIFI);
+ receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+ assertTrue(observer.hasReceivedNotification());
+ observer.resetHasReceivedNotification();
+
+ // We should be notified when the Wifi SSID changes.
+ wifiDelegate.setWifiSSID("foo");
+ receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+ assertTrue(observer.hasReceivedNotification());
+ observer.resetHasReceivedNotification();
+
+ // We shouldn't be re-notified if the Wifi SSID hasn't actually changed.
+ receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+ assertFalse(observer.hasReceivedNotification());
+
+ // Mimic that connectivity has been lost and ensure that the observer gets the notification.
+ connectivityDelegate.setActiveNetworkExists(false);
+ Intent noConnectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
+ receiver.onReceive(getInstrumentation().getTargetContext(), noConnectivityIntent);
+ assertTrue(observer.hasReceivedNotification());
+ }
+
+ /**
+ * Tests that ConnectivityManagerDelegate doesn't crash. This test cannot rely on having any
+ * active network connections so it cannot usefully check results, but it can at least check
+ * that the functions don't crash.
+ */
+ @UiThreadTest
+ @SmallTest
+ public void testConnectivityManagerDelegateDoesNotCrash() {
+ ConnectivityManagerDelegate delegate =
+ new ConnectivityManagerDelegate(getInstrumentation().getTargetContext());
+ delegate.getNetworkState();
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+ Network[] networks = delegate.getAllNetworks();
+ if (networks.length >= 1) {
+ delegate.getNetworkState(networks[0]);
+ delegate.hasInternetCapability(networks[0]);
+ }
+ delegate.getDefaultNetId();
+ }
+ }
+
+ /**
+ * Tests that NetworkMonitorAutoDetect queryable APIs don't crash. This test cannot rely
+ * on having any active network connections so it cannot usefully check results, but it can at
+ * least check that the functions don't crash.
+ */
+ @UiThreadTest
+ @SmallTest
+ public void testQueryableAPIsDoNotCrash() {
+ NetworkMonitorAutoDetect.Observer observer = new TestNetworkMonitorAutoDetectObserver();
+ NetworkMonitorAutoDetect ncn =
+ new NetworkMonitorAutoDetect(observer, getInstrumentation().getTargetContext());
+ ncn.getDefaultNetId();
+ }
+}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/PeerConnectionAndroidTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/PeerConnectionAndroidTest.java
new file mode 100644
index 0000000000..80622b07db
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/PeerConnectionAndroidTest.java
@@ -0,0 +1,48 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.test.ActivityTestCase;
+
+public class PeerConnectionAndroidTest extends ActivityTestCase {
+ private PeerConnectionTest test = new PeerConnectionTest();
+
+ @Override
+ protected void setUp() {
+ assertTrue(PeerConnectionFactory.initializeAndroidGlobals(
+ getInstrumentation().getContext(), true, true, true));
+ }
+
+ public void testCompleteSession() throws Exception {
+ // TODO(perkj): Investigate if |test.initializeThreadCheck()| can be used
+ // on android as well. Currently this check fail.
+ test.doTest();
+ }
+
+
+}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/RendererCommonTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/RendererCommonTest.java
new file mode 100644
index 0000000000..cc73fa5f98
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/RendererCommonTest.java
@@ -0,0 +1,189 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.test.ActivityTestCase;
+import android.test.MoreAsserts;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import android.graphics.Point;
+
+import static org.webrtc.RendererCommon.ScalingType.*;
+import static org.webrtc.RendererCommon.getDisplaySize;
+import static org.webrtc.RendererCommon.getLayoutMatrix;
+import static org.webrtc.RendererCommon.rotateTextureMatrix;
+
+public final class RendererCommonTest extends ActivityTestCase {
+ @SmallTest
+ static public void testDisplaySizeNoFrame() {
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 0, 0));
+ }
+
+ @SmallTest
+ public static void testDisplaySizeDegenerateAspectRatio() {
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 1280, 720));
+ }
+
+ @SmallTest
+ public static void testZeroDisplaySize() {
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 0, 0));
+ }
+
+ @SmallTest
+ public static void testDisplaySizePerfectFit() {
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 1280, 720));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 720, 1280));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 720, 1280));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 720, 1280));
+ }
+
+ @SmallTest
+ public static void testLandscapeVideoInPortraitDisplay() {
+ assertEquals(new Point(720, 405), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 720, 1280));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 720, 1280));
+ assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 720, 1280));
+ }
+
+ @SmallTest
+ public static void testPortraitVideoInLandscapeDisplay() {
+ assertEquals(new Point(405, 720), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 1280, 720));
+ assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 1280, 720));
+ }
+
+ @SmallTest
+ public static void testFourToThreeVideoInSixteenToNineDisplay() {
+ assertEquals(new Point(960, 720), getDisplaySize(SCALE_ASPECT_FIT, 4.0f / 3, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 4.0f / 3, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 4.0f / 3, 1280, 720));
+ }
+
+ // Only keep 2 rounded decimals to make float comparison robust.
+ private static double[] round(float[] array) {
+ assertEquals(16, array.length);
+ final double[] doubleArray = new double[16];
+ for (int i = 0; i < 16; ++i) {
+ doubleArray[i] = Math.round(100 * array[i]) / 100.0;
+ }
+ return doubleArray;
+ }
+
+ // Brief summary about matrix transformations:
+ // A coordinate p = [u, v, 0, 1] is transformed by matrix m like this p' = [u', v', 0, 1] = m * p.
+ // OpenGL uses column-major order, so:
+ // u' = u * m[0] + v * m[4] + m[12].
+ // v' = u * m[1] + v * m[5] + m[13].
+
+ @SmallTest
+ public static void testLayoutMatrixDefault() {
+ final float layoutMatrix[] = getLayoutMatrix(false, 1.0f, 1.0f);
+ // Assert:
+ // u' = u.
+ // v' = v.
+ MoreAsserts.assertEquals(new double[] {
+ 1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 0, 0, 1}, round(layoutMatrix));
+ }
+
+ @SmallTest
+ public static void testLayoutMatrixMirror() {
+ final float layoutMatrix[] = getLayoutMatrix(true, 1.0f, 1.0f);
+ // Assert:
+ // u' = 1 - u.
+ // v' = v.
+ MoreAsserts.assertEquals(new double[] {
+ -1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 1, 0, 0, 1}, round(layoutMatrix));
+ }
+
+ @SmallTest
+ public static void testLayoutMatrixScale() {
+ // Video has aspect ratio 2, but layout is square. This will cause only the center part of the
+ // video to be visible, i.e. the u coordinate will go from 0.25 to 0.75 instead of from 0 to 1.
+ final float layoutMatrix[] = getLayoutMatrix(false, 2.0f, 1.0f);
+ // Assert:
+ // u' = 0.25 + 0.5 u.
+ // v' = v.
+ MoreAsserts.assertEquals(new double[] {
+ 0.5, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0.25, 0, 0, 1}, round(layoutMatrix));
+ }
+
+ @SmallTest
+ public static void testRotateTextureMatrixDefault() {
+ // Test that rotation with 0 degrees returns an identical matrix.
+ final float[] matrix = new float[] {
+ 1, 2, 3, 4,
+ 5, 6, 7, 8,
+ 9, 0, 1, 2,
+ 3, 4, 5, 6
+ };
+ final float rotatedMatrix[] = rotateTextureMatrix(matrix, 0);
+ MoreAsserts.assertEquals(round(matrix), round(rotatedMatrix));
+ }
+
+ @SmallTest
+ public static void testRotateTextureMatrix90Deg() {
+ final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 90);
+ // Assert:
+ // u' = 1 - v.
+ // v' = u.
+ MoreAsserts.assertEquals(new double[] {
+ 0, 1, 0, 0,
+ -1, 0, 0, 0,
+ 0, 0, 1, 0,
+ 1, 0, 0, 1}, round(samplingMatrix));
+ }
+
+ @SmallTest
+ public static void testRotateTextureMatrix180Deg() {
+ final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 180);
+ // Assert:
+ // u' = 1 - u.
+ // v' = 1 - v.
+ MoreAsserts.assertEquals(new double[] {
+ -1, 0, 0, 0,
+ 0, -1, 0, 0,
+ 0, 0, 1, 0,
+ 1, 1, 0, 1}, round(samplingMatrix));
+ }
+}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
new file mode 100644
index 0000000000..882fde1875
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
@@ -0,0 +1,319 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import java.nio.ByteBuffer;
+
+import javax.microedition.khronos.egl.EGL10;
+
+public final class SurfaceTextureHelperTest extends ActivityTestCase {
+ /**
+ * Mock texture listener with blocking wait functionality.
+ */
+ public static final class MockTextureListener
+ implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ public int oesTextureId;
+ public float[] transformMatrix;
+ private boolean hasNewFrame = false;
+ // Thread where frames are expected to be received on.
+ private final Thread expectedThread;
+
+ MockTextureListener() {
+ this.expectedThread = null;
+ }
+
+ MockTextureListener(Thread expectedThread) {
+ this.expectedThread = expectedThread;
+ }
+
+ @Override
+ public synchronized void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ if (expectedThread != null && Thread.currentThread() != expectedThread) {
+ throw new IllegalStateException("onTextureFrameAvailable called on wrong thread.");
+ }
+ this.oesTextureId = oesTextureId;
+ this.transformMatrix = transformMatrix;
+ hasNewFrame = true;
+ notifyAll();
+ }
+
+ /**
+ * Wait indefinitely for a new frame.
+ */
+ public synchronized void waitForNewFrame() throws InterruptedException {
+ while (!hasNewFrame) {
+ wait();
+ }
+ hasNewFrame = false;
+ }
+
+ /**
+ * Wait for a new frame, or until the specified timeout elapses. Returns true if a new frame was
+ * received before the timeout.
+ */
+ public synchronized boolean waitForNewFrame(final long timeoutMs) throws InterruptedException {
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ long timeRemainingMs = timeoutMs;
+ while (!hasNewFrame && timeRemainingMs > 0) {
+ wait(timeRemainingMs);
+ final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+ timeRemainingMs = timeoutMs - elapsedTimeMs;
+ }
+ final boolean didReceiveFrame = hasNewFrame;
+ hasNewFrame = false;
+ return didReceiveFrame;
+ }
+ }
+
+ /**
+ * Test normal use by receiving three uniform texture frames. Texture frames are returned as early
+ * as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
+ * buffer and reading it back with glReadPixels().
+ */
+ @MediumTest
+ public static void testThreeConstantColorFrames() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ eglBase.createPbufferSurface(width, height);
+ final GlRectDrawer drawer = new GlRectDrawer();
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(eglBase.getContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.setListener(listener);
+ surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
+ // |surfaceTextureHelper| as the target EGLSurface.
+ final EglBase eglOesBase = new EglBase(eglBase.getContext(), EglBase.ConfigType.PLAIN);
+ eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglOesBase.surfaceWidth(), width);
+ assertEquals(eglOesBase.surfaceHeight(), height);
+
+ final int red[] = new int[] {79, 144, 185};
+ final int green[] = new int[] {66, 210, 162};
+ final int blue[] = new int[] {161, 117, 158};
+ // Draw three frames.
+ for (int i = 0; i < 3; ++i) {
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglOesBase.makeCurrent();
+ GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglOesBase.swapBuffers();
+
+ // Wait for an OES texture to arrive and draw it onto the pixel buffer.
+ listener.waitForNewFrame();
+ eglBase.makeCurrent();
+ drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+
+ surfaceTextureHelper.returnTextureFrame();
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g.
+ // Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Assert rendered image is expected constant color.
+ while (rgbaData.hasRemaining()) {
+ assertEquals(rgbaData.get() & 0xFF, red[i]);
+ assertEquals(rgbaData.get() & 0xFF, green[i]);
+ assertEquals(rgbaData.get() & 0xFF, blue[i]);
+ assertEquals(rgbaData.get() & 0xFF, 255);
+ }
+ }
+
+ drawer.release();
+ surfaceTextureHelper.disconnect();
+ eglBase.release();
+ }
+
+ /**
+ * Test disconnecting the SurfaceTextureHelper while holding a pending texture frame. The pending
+ * texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
+ * buffer and reading it back with glReadPixels().
+ */
+ @MediumTest
+ public static void testLateReturnFrame() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ eglBase.createPbufferSurface(width, height);
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(eglBase.getContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.setListener(listener);
+ surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
+ // |surfaceTextureHelper| as the target EGLSurface.
+ final EglBase eglOesBase = new EglBase(eglBase.getContext(), EglBase.ConfigType.PLAIN);
+ eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglOesBase.surfaceWidth(), width);
+ assertEquals(eglOesBase.surfaceHeight(), height);
+
+ final int red = 79;
+ final int green = 66;
+ final int blue = 161;
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglOesBase.makeCurrent();
+ GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglOesBase.swapBuffers();
+ eglOesBase.release();
+
+ // Wait for OES texture frame.
+ listener.waitForNewFrame();
+ // Diconnect while holding the frame.
+ surfaceTextureHelper.disconnect();
+
+ // Draw the pending texture frame onto the pixel buffer.
+ eglBase.makeCurrent();
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+ drawer.release();
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+ eglBase.release();
+
+ // Assert rendered image is expected constant color.
+ while (rgbaData.hasRemaining()) {
+ assertEquals(rgbaData.get() & 0xFF, red);
+ assertEquals(rgbaData.get() & 0xFF, green);
+ assertEquals(rgbaData.get() & 0xFF, blue);
+ assertEquals(rgbaData.get() & 0xFF, 255);
+ }
+ // Late frame return after everything has been disconnected and released.
+ surfaceTextureHelper.returnTextureFrame();
+ }
+
+ /**
+ * Test disconnecting the SurfaceTextureHelper, but keep trying to produce more texture frames. No
+ * frames should be delivered to the listener.
+ */
+ @MediumTest
+ public static void testDisconnect() throws InterruptedException {
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT);
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.setListener(listener);
+ // Create EglBase with the SurfaceTexture as target EGLSurface.
+ final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglBase.makeCurrent();
+ // Assert no frame has been received yet.
+ assertFalse(listener.waitForNewFrame(1));
+ // Draw and wait for one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+ listener.waitForNewFrame();
+ surfaceTextureHelper.returnTextureFrame();
+
+ // Disconnect - we should not receive any textures after this.
+ surfaceTextureHelper.disconnect();
+
+ // Draw one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ // swapBuffers() should not trigger onTextureFrameAvailable() because we are disconnected.
+ // Assert that no OES texture was delivered.
+ assertFalse(listener.waitForNewFrame(500));
+
+ eglBase.release();
+ }
+
+ /**
+ * Test disconnecting the SurfaceTextureHelper immediately after is has been setup to use a
+ * shared context. No frames should be delivered to the listener.
+ */
+ @SmallTest
+ public static void testDisconnectImmediately() {
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT);
+ surfaceTextureHelper.disconnect();
+ }
+
+ /**
+ * Test use SurfaceTextureHelper on a separate thread. A uniform texture frame is created and
+ * received on a thread separate from the test thread.
+ */
+ @MediumTest
+ public static void testFrameOnSeparateThread() throws InterruptedException {
+ final HandlerThread thread = new HandlerThread("SurfaceTextureHelperTestThread");
+ thread.start();
+ final Handler handler = new Handler(thread.getLooper());
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT, handler);
+ // Create a mock listener and expect frames to be delivered on |thread|.
+ final MockTextureListener listener = new MockTextureListener(thread);
+ surfaceTextureHelper.setListener(listener);
+
+ // Create resources for stubbing an OES texture producer. |eglOesBase| has the
+ // SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface.
+ final EglBase eglOesBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
+ eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglOesBase.makeCurrent();
+ // Draw a frame onto the SurfaceTexture.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglOesBase.swapBuffers();
+ eglOesBase.release();
+
+ // Wait for an OES texture to arrive.
+ listener.waitForNewFrame();
+
+ // Return the frame from this thread.
+ surfaceTextureHelper.returnTextureFrame();
+ surfaceTextureHelper.disconnect();
+ thread.quitSafely();
+ }
+}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
new file mode 100644
index 0000000000..47fe780124
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
@@ -0,0 +1,217 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.view.View.MeasureSpec;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.microedition.khronos.egl.EGL10;
+
+public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
+ /**
+ * List with all possible scaling types.
+ */
+ private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList(
+ RendererCommon.ScalingType.SCALE_ASPECT_FIT,
+ RendererCommon.ScalingType.SCALE_ASPECT_FILL,
+ RendererCommon.ScalingType.SCALE_ASPECT_BALANCED);
+
+ /**
+ * List with MeasureSpec modes.
+ */
+ private static final List<Integer> measureSpecModes =
+ Arrays.asList(MeasureSpec.EXACTLY, MeasureSpec.AT_MOST);
+
+ /**
+ * Returns a dummy YUV frame.
+ */
+ static VideoRenderer.I420Frame createFrame(int width, int height, int rotationDegree) {
+ final int[] yuvStrides = new int[] {width, (width + 1) / 2, (width + 1) / 2};
+ final int[] yuvHeights = new int[] {height, (height + 1) / 2, (height + 1) / 2};
+ final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+ for (int i = 0; i < 3; ++i) {
+ yuvPlanes[i] = ByteBuffer.allocateDirect(yuvStrides[i] * yuvHeights[i]);
+ }
+ return new VideoRenderer.I420Frame(width, height, rotationDegree, yuvStrides, yuvPlanes, 0);
+ }
+
+ /**
+ * Assert onMeasure() with given parameters will result in expected measured size.
+ */
+ private static void assertMeasuredSize(
+ SurfaceViewRenderer surfaceViewRenderer, RendererCommon.ScalingType scalingType,
+ String frameDimensions,
+ int expectedWidth, int expectedHeight,
+ int widthSpec, int heightSpec) {
+ surfaceViewRenderer.setScalingType(scalingType);
+ surfaceViewRenderer.onMeasure(widthSpec, heightSpec);
+ final int measuredWidth = surfaceViewRenderer.getMeasuredWidth();
+ final int measuredHeight = surfaceViewRenderer.getMeasuredHeight();
+ if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) {
+ fail("onMeasure("
+ + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec) + ")"
+ + " with scaling type " + scalingType
+ + " and frame: " + frameDimensions
+ + " expected measured size " + expectedWidth + "x" + expectedHeight
+ + ", but was " + measuredWidth + "x" + measuredHeight);
+ }
+ }
+
+ /**
+ * Test how SurfaceViewRenderer.onMeasure() behaves when no frame has been delivered.
+ */
+ @MediumTest
+ public void testNoFrame() {
+ final SurfaceViewRenderer surfaceViewRenderer =
+ new SurfaceViewRenderer(getInstrumentation().getContext());
+ final String frameDimensions = "null";
+
+ // Test behaviour before SurfaceViewRenderer.init() is called.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ 0, 0, zeroMeasureSize, zeroMeasureSize);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ 1280, 720,
+ MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
+ MeasureSpec.makeMeasureSpec(720, measureSpecMode));
+ }
+ }
+
+ // Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
+ surfaceViewRenderer.init(EGL10.EGL_NO_CONTEXT, null);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ 0, 0, zeroMeasureSize, zeroMeasureSize);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ 1280, 720,
+ MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
+ MeasureSpec.makeMeasureSpec(720, measureSpecMode));
+ }
+ }
+
+ surfaceViewRenderer.release();
+ }
+
+ /**
+ * Test how SurfaceViewRenderer.onMeasure() behaves with a 1280x720 frame.
+ */
+ @MediumTest
+ public void testFrame1280x720() {
+ final SurfaceViewRenderer surfaceViewRenderer =
+ new SurfaceViewRenderer(getInstrumentation().getContext());
+ surfaceViewRenderer.init(EGL10.EGL_NO_CONTEXT, null);
+
+ // Test different rotation degress, but same rotated size.
+ for (int rotationDegree : new int[] {0, 90, 180, 270}) {
+ final int rotatedWidth = 1280;
+ final int rotatedHeight = 720;
+ final int unrotatedWidth = (rotationDegree % 180 == 0 ? rotatedWidth : rotatedHeight);
+ final int unrotatedHeight = (rotationDegree % 180 == 0 ? rotatedHeight : rotatedWidth);
+ final VideoRenderer.I420Frame frame =
+ createFrame(unrotatedWidth, unrotatedHeight, rotationDegree);
+ assertEquals(rotatedWidth, frame.rotatedWidth());
+ assertEquals(rotatedHeight, frame.rotatedHeight());
+ final String frameDimensions =
+ unrotatedWidth + "x" + unrotatedHeight + " with rotation " + rotationDegree;
+ surfaceViewRenderer.renderFrame(frame);
+
+ // Test forcing to zero size.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ 0, 0, zeroMeasureSize, zeroMeasureSize);
+ }
+ }
+
+ // Test perfect fit.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ rotatedWidth, rotatedHeight,
+ MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
+ MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode));
+ }
+ }
+
+ // Force spec size with different aspect ratio than frame aspect ratio.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ 720, 1280,
+ MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY),
+ MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY));
+ }
+
+ final float videoAspectRatio = (float) rotatedWidth / rotatedHeight;
+ {
+ // Relax both width and height constraints.
+ final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ final Point expectedSize =
+ RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ expectedSize.x, expectedSize.y, widthSpec, heightSpec);
+ }
+ }
+ {
+ // Force width to 720, but relax height constraint. This will give the same result as
+ // above, because width is already the limiting factor and will be maxed out.
+ final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ final Point expectedSize =
+ RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ expectedSize.x, expectedSize.y, widthSpec, heightSpec);
+ }
+ }
+ {
+ // Force height, but relax width constraint. This will force a bad layout size.
+ final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+ 720, 1280, widthSpec, heightSpec);
+ }
+ }
+ }
+
+ surfaceViewRenderer.release();
+ }
+}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
new file mode 100644
index 0000000000..dbbe5963cd
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
@@ -0,0 +1,261 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.test.suitebuilder.annotation.SmallTest;
+import android.util.Log;
+import android.util.Size;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.microedition.khronos.egl.EGL10;
+
+@SuppressWarnings("deprecation")
+public class VideoCapturerAndroidTest extends ActivityTestCase {
+ static final String TAG = "VideoCapturerAndroidTest";
+
+ @Override
+ protected void setUp() {
+ assertTrue(PeerConnectionFactory.initializeAndroidGlobals(
+ getInstrumentation().getContext(), true, true, true));
+ }
+
+ @SmallTest
+ // Test that enumerating formats using android.hardware.camera2 will give the same formats as
+ // android.hardware.camera in the range 320x240 to 1280x720. Often the camera2 API may contain
+ // some high resolutions that are not supported in camera1, but it may also be the other way
+ // around in some cases. Supported framerates may also differ, so don't compare those.
+ public void testCamera2Enumerator() {
+ if (!Camera2Enumerator.isSupported()) {
+ return;
+ }
+ final CameraEnumerationAndroid.Enumerator camera1Enumerator = new CameraEnumerator();
+ final CameraEnumerationAndroid.Enumerator camera2Enumerator =
+ new Camera2Enumerator(getInstrumentation().getContext());
+
+ for (int i = 0; i < CameraEnumerationAndroid.getDeviceCount(); ++i) {
+ final Set<Size> resolutions1 = new HashSet<Size>();
+ for (CaptureFormat format : camera1Enumerator.getSupportedFormats(i)) {
+ resolutions1.add(new Size(format.width, format.height));
+ }
+ final Set<Size> resolutions2 = new HashSet<Size>();
+ for (CaptureFormat format : camera2Enumerator.getSupportedFormats(i)) {
+ resolutions2.add(new Size(format.width, format.height));
+ }
+ for (Size size : resolutions1) {
+ if (size.getWidth() >= 320 && size.getHeight() >= 240
+ && size.getWidth() <= 1280 && size.getHeight() <= 720) {
+ assertTrue(resolutions2.contains(size));
+ }
+ }
+ }
+ }
+
+ @SmallTest
+ public void testCreateAndRelease() {
+ VideoCapturerAndroidTestFixtures.release(VideoCapturerAndroid.create("", null));
+ }
+
+ @SmallTest
+ public void testCreateAndReleaseUsingTextures() {
+ VideoCapturerAndroidTestFixtures.release(
+ VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT));
+ }
+
+ @SmallTest
+ public void testCreateNonExistingCamera() {
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create(
+ "non-existing camera", null);
+ assertNull(capturer);
+ }
+
+ @SmallTest
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using a "default" capturer.
+ // It tests both the Java and the C++ layer.
+ public void testStartVideoCapturer() throws InterruptedException {
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create("", null);
+ VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+ }
+
+ // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
+ @SmallTest
+ public void DISABLED_testStartVideoCapturerUsingTextures() throws InterruptedException {
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+ }
+
+ @SmallTest
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the front facing video capturer.
+ // It tests both the Java and the C++ layer.
+ public void testStartFrontFacingVideoCapturer() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getNameOfFrontFacingDevice();
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+ }
+
+ @SmallTest
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the back facing video capturer.
+ // It tests both the Java and the C++ layer.
+ public void testStartBackFacingVideoCapturer() throws InterruptedException {
+ if (!VideoCapturerAndroidTestFixtures.HaveTwoCameras()) {
+ return;
+ }
+
+ String deviceName = CameraEnumerationAndroid.getNameOfBackFacingDevice();
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+ }
+
+ @SmallTest
+ // This test that the default camera can be started and that the camera can
+ // later be switched to another camera.
+ // It tests both the Java and the C++ layer.
+ public void testSwitchVideoCapturer() throws InterruptedException {
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+ VideoCapturerAndroidTestFixtures.switchCamera(capturer);
+ }
+
+ // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
+ @SmallTest
+ public void DISABLED_testSwitchVideoCapturerUsingTextures() throws InterruptedException {
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroidTestFixtures.switchCamera(capturer);
+ }
+
+ @MediumTest
+ public void testCameraEvents() throws InterruptedException {
+ VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
+ VideoCapturerAndroidTestFixtures.createCameraEvents();
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents);
+ VideoCapturerAndroidTestFixtures.cameraEventsInvoked(
+ capturer, cameraEvents, getInstrumentation().getContext());
+ }
+
+ @MediumTest
+ // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
+ public void testCameraCallsAfterStop() throws InterruptedException {
+ final String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ final VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName, null);
+
+ VideoCapturerAndroidTestFixtures.cameraCallsAfterStop(capturer,
+ getInstrumentation().getContext());
+ }
+
+ @MediumTest
+ public void testCameraCallsAfterStopUsingTextures() throws InterruptedException {
+ final String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ final VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName, null,
+ EGL10.EGL_NO_CONTEXT);
+
+ VideoCapturerAndroidTestFixtures.cameraCallsAfterStop(capturer,
+ getInstrumentation().getContext());
+ }
+
+ @SmallTest
+ // This test that the VideoSource that the VideoCapturer is connected to can
+ // be stopped and restarted. It tests both the Java and the C++ layer.
+ public void testStopRestartVideoSource() throws InterruptedException {
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+ VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
+ }
+
+ // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
+ @SmallTest
+ public void DISABLED_testStopRestartVideoSourceUsingTextures() throws InterruptedException {
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
+ }
+
+ @SmallTest
+ // This test that the camera can be started at different resolutions.
+ // It does not test or use the C++ layer.
+ public void testStartStopWithDifferentResolutions() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.startStopWithDifferentResolutions(capturer,
+ getInstrumentation().getContext());
+ }
+
+ @SmallTest
+ public void testStartStopWithDifferentResolutionsUsingTextures() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroidTestFixtures.startStopWithDifferentResolutions(capturer,
+ getInstrumentation().getContext());
+ }
+
+ @SmallTest
+ // This test what happens if buffers are returned after the capturer have
+ // been stopped and restarted. It does not test or use the C++ layer.
+ public void testReturnBufferLate() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.returnBufferLate(capturer,
+ getInstrumentation().getContext());
+ }
+
+ @SmallTest
+ public void testReturnBufferLateUsingTextures() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroidTestFixtures.returnBufferLate(capturer,
+ getInstrumentation().getContext());
+ }
+
+ @MediumTest
+ // This test that we can capture frames, keep the frames in a local renderer, stop capturing,
+ // and then return the frames. The difference between the test testReturnBufferLate() is that we
+ // also test the JNI and C++ AndroidVideoCapturer parts.
+ public void testReturnBufferLateEndToEnd() throws InterruptedException {
+ final VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+ VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
+ }
+
+ // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
+ @MediumTest
+ public void DISABLED_testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
+ final VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
+ }
+}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
new file mode 100644
index 0000000000..11b3ce98a0
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
@@ -0,0 +1,443 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.Camera;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.VideoRenderer.I420Frame;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+
+import static junit.framework.Assert.*;
+
+public class VideoCapturerAndroidTestFixtures {
+ static class RendererCallbacks implements VideoRenderer.Callbacks {
+ private int framesRendered = 0;
+ private Object frameLock = 0;
+
+ @Override
+ public void renderFrame(I420Frame frame) {
+ synchronized (frameLock) {
+ ++framesRendered;
+ frameLock.notify();
+ }
+ VideoRenderer.renderFrameDone(frame);
+ }
+
+ public int WaitForNextFrameToRender() throws InterruptedException {
+ synchronized (frameLock) {
+ frameLock.wait();
+ return framesRendered;
+ }
+ }
+ }
+
+ static class FakeAsyncRenderer implements VideoRenderer.Callbacks {
+ private final List<I420Frame> pendingFrames = new ArrayList<I420Frame>();
+
+ @Override
+ public void renderFrame(I420Frame frame) {
+ synchronized (pendingFrames) {
+ pendingFrames.add(frame);
+ pendingFrames.notifyAll();
+ }
+ }
+
+ // Wait until at least one frame have been received, before returning them.
+ public List<I420Frame> waitForPendingFrames() throws InterruptedException {
+ synchronized (pendingFrames) {
+ while (pendingFrames.isEmpty()) {
+ pendingFrames.wait();
+ }
+ return new ArrayList<I420Frame>(pendingFrames);
+ }
+ }
+ }
+
+ static class FakeCapturerObserver implements
+ VideoCapturerAndroid.CapturerObserver {
+ private int framesCaptured = 0;
+ private int frameSize = 0;
+ private int frameWidth = 0;
+ private int frameHeight = 0;
+ private Object frameLock = 0;
+ private Object capturerStartLock = 0;
+ private boolean captureStartResult = false;
+ private List<Long> timestamps = new ArrayList<Long>();
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ synchronized (capturerStartLock) {
+ captureStartResult = success;
+ capturerStartLock.notify();
+ }
+ }
+
+ @Override
+ public void onByteBufferFrameCaptured(byte[] frame, int length, int width, int height,
+ int rotation, long timeStamp) {
+ synchronized (frameLock) {
+ ++framesCaptured;
+ frameSize = length;
+ frameWidth = width;
+ frameHeight = height;
+ timestamps.add(timeStamp);
+ frameLock.notify();
+ }
+ }
+ @Override
+ public void onTextureFrameCaptured(
+ int width, int height, int oesTextureId, float[] transformMatrix, long timeStamp) {
+ synchronized (frameLock) {
+ ++framesCaptured;
+ frameWidth = width;
+ frameHeight = height;
+ frameSize = 0;
+ timestamps.add(timeStamp);
+ frameLock.notify();
+ }
+ }
+
+ @Override
+ public void onOutputFormatRequest(int width, int height, int fps) {}
+
+ public boolean WaitForCapturerToStart() throws InterruptedException {
+ synchronized (capturerStartLock) {
+ capturerStartLock.wait();
+ return captureStartResult;
+ }
+ }
+
+ public int WaitForNextCapturedFrame() throws InterruptedException {
+ synchronized (frameLock) {
+ frameLock.wait();
+ return framesCaptured;
+ }
+ }
+
+ int frameSize() {
+ synchronized (frameLock) {
+ return frameSize;
+ }
+ }
+
+ int frameWidth() {
+ synchronized (frameLock) {
+ return frameWidth;
+ }
+ }
+
+ int frameHeight() {
+ synchronized (frameLock) {
+ return frameHeight;
+ }
+ }
+
+ List<Long> getCopyAndResetListOftimeStamps() {
+ synchronized (frameLock) {
+ ArrayList<Long> list = new ArrayList<Long>(timestamps);
+ timestamps.clear();
+ return list;
+ }
+ }
+ }
+
+ static class CameraEvents implements
+ VideoCapturerAndroid.CameraEventsHandler {
+ public boolean onCameraOpeningCalled;
+ public boolean onFirstFrameAvailableCalled;
+
+ @Override
+ public void onCameraError(String errorDescription) { }
+
+ @Override
+ public void onCameraOpening(int cameraId) {
+ onCameraOpeningCalled = true;
+ }
+
+ @Override
+ public void onFirstFrameAvailable() {
+ onFirstFrameAvailableCalled = true;
+ }
+
+ @Override
+ public void onCameraClosed() { }
+ }
+
+ static public CameraEvents createCameraEvents() {
+ return new CameraEvents();
+ }
+
+ // Return true if the device under test have at least two cameras.
+ @SuppressWarnings("deprecation")
+ static public boolean HaveTwoCameras() {
+ return (Camera.getNumberOfCameras() >= 2);
+ }
+
+ static public void release(VideoCapturerAndroid capturer) {
+ assertNotNull(capturer);
+ capturer.dispose();
+ assertTrue(capturer.isReleased());
+ }
+
+ static public void startCapturerAndRender(VideoCapturerAndroid capturer)
+ throws InterruptedException {
+ PeerConnectionFactory factory = new PeerConnectionFactory();
+ VideoSource source =
+ factory.createVideoSource(capturer, new MediaConstraints());
+ VideoTrack track = factory.createVideoTrack("dummy", source);
+ RendererCallbacks callbacks = new RendererCallbacks();
+ track.addRenderer(new VideoRenderer(callbacks));
+ assertTrue(callbacks.WaitForNextFrameToRender() > 0);
+ track.dispose();
+ source.dispose();
+ factory.dispose();
+ assertTrue(capturer.isReleased());
+ }
+
+ static public void switchCamera(VideoCapturerAndroid capturer) throws InterruptedException {
+ PeerConnectionFactory factory = new PeerConnectionFactory();
+ VideoSource source =
+ factory.createVideoSource(capturer, new MediaConstraints());
+ VideoTrack track = factory.createVideoTrack("dummy", source);
+
+ // Array with one element to avoid final problem in nested classes.
+ final boolean[] cameraSwitchSuccessful = new boolean[1];
+ final CountDownLatch barrier = new CountDownLatch(1);
+ capturer.switchCamera(new VideoCapturerAndroid.CameraSwitchHandler() {
+ @Override
+ public void onCameraSwitchDone(boolean isFrontCamera) {
+ cameraSwitchSuccessful[0] = true;
+ barrier.countDown();
+ }
+ @Override
+ public void onCameraSwitchError(String errorDescription) {
+ cameraSwitchSuccessful[0] = false;
+ barrier.countDown();
+ }
+ });
+ // Wait until the camera has been switched.
+ barrier.await();
+
+ // Check result.
+ if (HaveTwoCameras()) {
+ assertTrue(cameraSwitchSuccessful[0]);
+ } else {
+ assertFalse(cameraSwitchSuccessful[0]);
+ }
+ // Ensure that frames are received.
+ RendererCallbacks callbacks = new RendererCallbacks();
+ track.addRenderer(new VideoRenderer(callbacks));
+ assertTrue(callbacks.WaitForNextFrameToRender() > 0);
+ track.dispose();
+ source.dispose();
+ factory.dispose();
+ assertTrue(capturer.isReleased());
+ }
+
+ static public void cameraEventsInvoked(VideoCapturerAndroid capturer, CameraEvents events,
+ Context appContext) throws InterruptedException {
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ // Make sure camera is started and first frame is received and then stop it.
+ assertTrue(observer.WaitForCapturerToStart());
+ observer.WaitForNextCapturedFrame();
+ capturer.stopCapture();
+ for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
+ capturer.returnBuffer(timeStamp);
+ }
+ capturer.dispose();
+
+ assertTrue(capturer.isReleased());
+ assertTrue(events.onCameraOpeningCalled);
+ assertTrue(events.onFirstFrameAvailableCalled);
+ }
+
+ static public void cameraCallsAfterStop(
+ VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ // Make sure camera is started and then stop it.
+ assertTrue(observer.WaitForCapturerToStart());
+ capturer.stopCapture();
+ for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
+ capturer.returnBuffer(timeStamp);
+ }
+ // We can't change |capturer| at this point, but we should not crash.
+ capturer.switchCamera(null);
+ capturer.onOutputFormatRequest(640, 480, 15);
+ capturer.changeCaptureFormat(640, 480, 15);
+
+ capturer.dispose();
+ assertTrue(capturer.isReleased());
+ }
+
+ static public void stopRestartVideoSource(VideoCapturerAndroid capturer)
+ throws InterruptedException {
+ PeerConnectionFactory factory = new PeerConnectionFactory();
+ VideoSource source =
+ factory.createVideoSource(capturer, new MediaConstraints());
+ VideoTrack track = factory.createVideoTrack("dummy", source);
+ RendererCallbacks callbacks = new RendererCallbacks();
+ track.addRenderer(new VideoRenderer(callbacks));
+ assertTrue(callbacks.WaitForNextFrameToRender() > 0);
+ assertEquals(MediaSource.State.LIVE, source.state());
+
+ source.stop();
+ assertEquals(MediaSource.State.ENDED, source.state());
+
+ source.restart();
+ assertTrue(callbacks.WaitForNextFrameToRender() > 0);
+ assertEquals(MediaSource.State.LIVE, source.state());
+ track.dispose();
+ source.dispose();
+ factory.dispose();
+ assertTrue(capturer.isReleased());
+ }
+
+ static public void startStopWithDifferentResolutions(VideoCapturerAndroid capturer,
+ Context appContext) throws InterruptedException {
+ FakeCapturerObserver observer = new FakeCapturerObserver();
+ List<CaptureFormat> formats = capturer.getSupportedFormats();
+
+ for(int i = 0; i < 3 ; ++i) {
+ CameraEnumerationAndroid.CaptureFormat format = formats.get(i);
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ assertTrue(observer.WaitForCapturerToStart());
+ observer.WaitForNextCapturedFrame();
+
+ // Check the frame size. The actual width and height depend on how the capturer is mounted.
+ final boolean identicalResolution = (observer.frameWidth() == format.width
+ && observer.frameHeight() == format.height);
+ final boolean flippedResolution = (observer.frameWidth() == format.height
+ && observer.frameHeight() == format.width);
+ if (!identicalResolution && !flippedResolution) {
+ fail("Wrong resolution, got: " + observer.frameWidth() + "x" + observer.frameHeight()
+ + " expected: " + format.width + "x" + format.height + " or " + format.height + "x"
+ + format.width);
+ }
+
+ if (capturer.isCapturingToTexture()) {
+ assertEquals(0, observer.frameSize());
+ } else {
+ assertEquals(format.frameSize(), observer.frameSize());
+ }
+ capturer.stopCapture();
+ for (long timestamp : observer.getCopyAndResetListOftimeStamps()) {
+ capturer.returnBuffer(timestamp);
+ }
+ }
+ capturer.dispose();
+ assertTrue(capturer.isReleased());
+ }
+
+ static public void returnBufferLate(VideoCapturerAndroid capturer,
+ Context appContext) throws InterruptedException {
+ FakeCapturerObserver observer = new FakeCapturerObserver();
+
+ List<CaptureFormat> formats = capturer.getSupportedFormats();
+ CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ assertTrue(observer.WaitForCapturerToStart());
+
+ observer.WaitForNextCapturedFrame();
+ capturer.stopCapture();
+ List<Long> listOftimestamps = observer.getCopyAndResetListOftimeStamps();
+ assertTrue(listOftimestamps.size() >= 1);
+
+ format = formats.get(1);
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ observer.WaitForCapturerToStart();
+
+ for (Long timeStamp : listOftimestamps) {
+ capturer.returnBuffer(timeStamp);
+ }
+
+ observer.WaitForNextCapturedFrame();
+ capturer.stopCapture();
+
+ listOftimestamps = observer.getCopyAndResetListOftimeStamps();
+ assertTrue(listOftimestamps.size() >= 1);
+ for (Long timeStamp : listOftimestamps) {
+ capturer.returnBuffer(timeStamp);
+ }
+ capturer.dispose();
+ assertTrue(capturer.isReleased());
+ }
+
+ static public void returnBufferLateEndToEnd(VideoCapturerAndroid capturer)
+ throws InterruptedException {
+ final PeerConnectionFactory factory = new PeerConnectionFactory();
+ final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints());
+ final VideoTrack track = factory.createVideoTrack("dummy", source);
+ final FakeAsyncRenderer renderer = new FakeAsyncRenderer();
+ track.addRenderer(new VideoRenderer(renderer));
+ // Wait for at least one frame that has not been returned.
+ assertFalse(renderer.waitForPendingFrames().isEmpty());
+
+ capturer.stopCapture();
+
+ // Dispose everything.
+ track.dispose();
+ source.dispose();
+ factory.dispose();
+
+ // The pending frames should keep the JNI parts and |capturer| alive.
+ assertFalse(capturer.isReleased());
+
+ // Return the frame(s), on a different thread out of spite.
+ final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
+ final Thread returnThread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ for (I420Frame frame : pendingFrames) {
+ VideoRenderer.renderFrameDone(frame);
+ }
+ }
+ });
+ returnThread.start();
+ returnThread.join();
+
+ // Check that frames have successfully returned. This will cause |capturer| to be released.
+ assertTrue(capturer.isReleased());
+ }
+}
diff --git a/talk/app/webrtc/androidvideocapturer.cc b/talk/app/webrtc/androidvideocapturer.cc
new file mode 100644
index 0000000000..afcfb5bb7c
--- /dev/null
+++ b/talk/app/webrtc/androidvideocapturer.cc
@@ -0,0 +1,240 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+#include "talk/app/webrtc/androidvideocapturer.h"
+
+#include "talk/media/webrtc/webrtcvideoframe.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/json.h"
+#include "webrtc/base/timeutils.h"
+
+namespace webrtc {
+
+// A hack for avoiding deep frame copies in
+// cricket::VideoCapturer.SignalFrameCaptured() using a custom FrameFactory.
+// A frame is injected using UpdateCapturedFrame(), and converted into a
+// cricket::VideoFrame with CreateAliasedFrame(). UpdateCapturedFrame() should
+// be called before CreateAliasedFrame() for every frame.
+// TODO(magjed): Add an interface cricket::VideoCapturer::OnFrameCaptured()
+// for ref counted I420 frames instead of this hack.
+class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
+ public:
+ FrameFactory(const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
+ : delegate_(delegate) {
+ // Create a CapturedFrame that only contains header information, not the
+ // actual pixel data.
+ captured_frame_.pixel_height = 1;
+ captured_frame_.pixel_width = 1;
+ captured_frame_.data = nullptr;
+ captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize;
+ captured_frame_.fourcc = static_cast<uint32_t>(cricket::FOURCC_ANY);
+ }
+
+ void UpdateCapturedFrame(
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+ int rotation,
+ int64_t time_stamp_in_ns) {
+ buffer_ = buffer;
+ captured_frame_.width = buffer->width();
+ captured_frame_.height = buffer->height();
+ captured_frame_.time_stamp = time_stamp_in_ns;
+ captured_frame_.rotation = rotation;
+ }
+
+ void ClearCapturedFrame() {
+ buffer_ = nullptr;
+ captured_frame_.width = 0;
+ captured_frame_.height = 0;
+ captured_frame_.time_stamp = 0;
+ }
+
+ const cricket::CapturedFrame* GetCapturedFrame() const {
+ return &captured_frame_;
+ }
+
+ cricket::VideoFrame* CreateAliasedFrame(
+ const cricket::CapturedFrame* captured_frame,
+ int dst_width,
+ int dst_height) const override {
+ // Check that captured_frame is actually our frame.
+ RTC_CHECK(captured_frame == &captured_frame_);
+ RTC_CHECK(buffer_->native_handle() == nullptr);
+
+ rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
+ ShallowCenterCrop(buffer_, dst_width, dst_height),
+ captured_frame->time_stamp, captured_frame->GetRotation()));
+ // Caller takes ownership.
+ // TODO(magjed): Change CreateAliasedFrame() to return a rtc::scoped_ptr.
+ return apply_rotation_ ? frame->GetCopyWithRotationApplied()->Copy()
+ : frame.release();
+ }
+
+ cricket::VideoFrame* CreateAliasedFrame(
+ const cricket::CapturedFrame* input_frame,
+ int cropped_input_width,
+ int cropped_input_height,
+ int output_width,
+ int output_height) const override {
+ if (buffer_->native_handle() != nullptr) {
+ // TODO(perkj): Implement CreateAliasedFrame properly for textures.
+ rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
+ buffer_, input_frame->time_stamp, input_frame->GetRotation()));
+ return frame.release();
+ }
+ return VideoFrameFactory::CreateAliasedFrame(input_frame,
+ cropped_input_width,
+ cropped_input_height,
+ output_width,
+ output_height);
+ }
+
+ private:
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer_;
+ cricket::CapturedFrame captured_frame_;
+ rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
+};
+
+AndroidVideoCapturer::AndroidVideoCapturer(
+ const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
+ : running_(false),
+ delegate_(delegate),
+ frame_factory_(NULL),
+ current_state_(cricket::CS_STOPPED) {
+ thread_checker_.DetachFromThread();
+ std::string json_string = delegate_->GetSupportedFormats();
+ LOG(LS_INFO) << json_string;
+
+ Json::Value json_values;
+ Json::Reader reader(Json::Features::strictMode());
+ if (!reader.parse(json_string, json_values)) {
+ LOG(LS_ERROR) << "Failed to parse formats.";
+ }
+
+ std::vector<cricket::VideoFormat> formats;
+ for (Json::ArrayIndex i = 0; i < json_values.size(); ++i) {
+ const Json::Value& json_value = json_values[i];
+ RTC_CHECK(!json_value["width"].isNull() &&
+ !json_value["height"].isNull() &&
+ !json_value["framerate"].isNull());
+ cricket::VideoFormat format(
+ json_value["width"].asInt(),
+ json_value["height"].asInt(),
+ cricket::VideoFormat::FpsToInterval(json_value["framerate"].asInt()),
+ cricket::FOURCC_YV12);
+ formats.push_back(format);
+ }
+ SetSupportedFormats(formats);
+ // Do not apply frame rotation by default.
+ SetApplyRotation(false);
+}
+
+AndroidVideoCapturer::~AndroidVideoCapturer() {
+ RTC_CHECK(!running_);
+}
+
+cricket::CaptureState AndroidVideoCapturer::Start(
+ const cricket::VideoFormat& capture_format) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!running_);
+ const int fps = cricket::VideoFormat::IntervalToFps(capture_format.interval);
+ LOG(LS_INFO) << " AndroidVideoCapturer::Start " << capture_format.width << "x"
+ << capture_format.height << "@" << fps;
+
+ frame_factory_ = new AndroidVideoCapturer::FrameFactory(delegate_.get());
+ set_frame_factory(frame_factory_);
+
+ running_ = true;
+ delegate_->Start(capture_format.width, capture_format.height, fps, this);
+ SetCaptureFormat(&capture_format);
+ current_state_ = cricket::CS_STARTING;
+ return current_state_;
+}
+
+void AndroidVideoCapturer::Stop() {
+ LOG(LS_INFO) << " AndroidVideoCapturer::Stop ";
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ RTC_CHECK(running_);
+ running_ = false;
+ SetCaptureFormat(NULL);
+
+ delegate_->Stop();
+ current_state_ = cricket::CS_STOPPED;
+ SignalStateChange(this, current_state_);
+}
+
+bool AndroidVideoCapturer::IsRunning() {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ return running_;
+}
+
+bool AndroidVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ fourccs->push_back(cricket::FOURCC_YV12);
+ return true;
+}
+
+void AndroidVideoCapturer::OnCapturerStarted(bool success) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ cricket::CaptureState new_state =
+ success ? cricket::CS_RUNNING : cricket::CS_FAILED;
+ if (new_state == current_state_)
+ return;
+ current_state_ = new_state;
+
+ // TODO(perkj): SetCaptureState can not be used since it posts to |thread_|.
+ // But |thread_ | is currently just the thread that happened to create the
+ // cricket::VideoCapturer.
+ SignalStateChange(this, new_state);
+}
+
+void AndroidVideoCapturer::OnIncomingFrame(
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+ int rotation,
+ int64_t time_stamp) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ frame_factory_->UpdateCapturedFrame(buffer, rotation, time_stamp);
+ SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
+ frame_factory_->ClearCapturedFrame();
+}
+
+void AndroidVideoCapturer::OnOutputFormatRequest(
+ int width, int height, int fps) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ const cricket::VideoFormat& current = video_adapter()->output_format();
+ cricket::VideoFormat format(
+ width, height, cricket::VideoFormat::FpsToInterval(fps), current.fourcc);
+ video_adapter()->OnOutputFormatRequest(format);
+}
+
+bool AndroidVideoCapturer::GetBestCaptureFormat(
+ const cricket::VideoFormat& desired,
+ cricket::VideoFormat* best_format) {
+ // Delegate this choice to VideoCapturerAndroid.startCapture().
+ *best_format = desired;
+ return true;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/androidvideocapturer.h b/talk/app/webrtc/androidvideocapturer.h
new file mode 100644
index 0000000000..df783bdf6f
--- /dev/null
+++ b/talk/app/webrtc/androidvideocapturer.h
@@ -0,0 +1,108 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+#ifndef TALK_APP_WEBRTC_ANDROIDVIDEOCAPTURER_H_
+#define TALK_APP_WEBRTC_ANDROIDVIDEOCAPTURER_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/media/base/videocapturer.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/interface/video_frame_buffer.h"
+
+namespace webrtc {
+
+class AndroidVideoCapturer;
+
+class AndroidVideoCapturerDelegate : public rtc::RefCountInterface {
+ public:
+ virtual ~AndroidVideoCapturerDelegate() {}
+ // Start capturing. The implementation of the delegate must call
+ // AndroidVideoCapturer::OnCapturerStarted with the result of this request.
+ virtual void Start(int width, int height, int framerate,
+ AndroidVideoCapturer* capturer) = 0;
+
+ // Stops capturing.
+ // The delegate may not call into AndroidVideoCapturer after this call.
+ virtual void Stop() = 0;
+
+ // Must returns a JSON string "{{width=xxx, height=xxx, framerate = xxx}}"
+ virtual std::string GetSupportedFormats() = 0;
+};
+
+// Android implementation of cricket::VideoCapturer for use with WebRtc
+// PeerConnection.
+class AndroidVideoCapturer : public cricket::VideoCapturer {
+ public:
+ explicit AndroidVideoCapturer(
+ const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate);
+ virtual ~AndroidVideoCapturer();
+
+ // Called from JNI when the capturer has been started.
+ void OnCapturerStarted(bool success);
+
+ // Called from JNI when a new frame has been captured.
+ // Argument |buffer| is intentionally by value, for use with rtc::Bind.
+ void OnIncomingFrame(
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+ int rotation,
+ int64_t time_stamp);
+
+ // Called from JNI to request a new video format.
+ void OnOutputFormatRequest(int width, int height, int fps);
+
+ AndroidVideoCapturerDelegate* delegate() { return delegate_.get(); }
+
+ // cricket::VideoCapturer implementation.
+ bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
+ cricket::VideoFormat* best_format) override;
+
+ private:
+ // cricket::VideoCapturer implementation.
+ // Video frames will be delivered using
+ // cricket::VideoCapturer::SignalFrameCaptured on the thread that calls Start.
+ cricket::CaptureState Start(
+ const cricket::VideoFormat& capture_format) override;
+ void Stop() override;
+ bool IsRunning() override;
+ bool IsScreencast() const override { return false; }
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override;
+
+ bool running_;
+ rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
+
+ rtc::ThreadChecker thread_checker_;
+
+ class FrameFactory;
+ FrameFactory* frame_factory_; // Owned by cricket::VideoCapturer.
+
+ cricket::CaptureState current_state_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_ANDROIDVIDEOCAPTURER_H_
diff --git a/talk/app/webrtc/audiotrack.cc b/talk/app/webrtc/audiotrack.cc
new file mode 100644
index 0000000000..b0c91296f9
--- /dev/null
+++ b/talk/app/webrtc/audiotrack.cc
@@ -0,0 +1,53 @@
+/*
+ * libjingle
+ * Copyright 2004--2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/audiotrack.h"
+
+#include <string>
+
+namespace webrtc {
+
+static const char kAudioTrackKind[] = "audio";
+
+AudioTrack::AudioTrack(const std::string& label,
+ AudioSourceInterface* audio_source)
+ : MediaStreamTrack<AudioTrackInterface>(label),
+ audio_source_(audio_source) {
+}
+
+std::string AudioTrack::kind() const {
+ return kAudioTrackKind;
+}
+
+rtc::scoped_refptr<AudioTrack> AudioTrack::Create(
+ const std::string& id, AudioSourceInterface* source) {
+ rtc::RefCountedObject<AudioTrack>* track =
+ new rtc::RefCountedObject<AudioTrack>(id, source);
+ return track;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/audiotrack.h b/talk/app/webrtc/audiotrack.h
new file mode 100644
index 0000000000..750f272ba2
--- /dev/null
+++ b/talk/app/webrtc/audiotrack.h
@@ -0,0 +1,69 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_AUDIOTRACK_H_
+#define TALK_APP_WEBRTC_AUDIOTRACK_H_
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/mediastreamtrack.h"
+#include "talk/app/webrtc/notifier.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace webrtc {
+
+class AudioTrack : public MediaStreamTrack<AudioTrackInterface> {
+ public:
+ static rtc::scoped_refptr<AudioTrack> Create(
+ const std::string& id, AudioSourceInterface* source);
+
+ // AudioTrackInterface implementation.
+ AudioSourceInterface* GetSource() const override {
+ return audio_source_.get();
+ }
+ // TODO(xians): Implement these methods.
+ void AddSink(AudioTrackSinkInterface* sink) override {}
+ void RemoveSink(AudioTrackSinkInterface* sink) override {}
+ bool GetSignalLevel(int* level) override { return false; }
+ rtc::scoped_refptr<AudioProcessorInterface> GetAudioProcessor() override {
+ return NULL;
+ }
+ cricket::AudioRenderer* GetRenderer() override { return NULL; }
+
+ // MediaStreamTrack implementation.
+ std::string kind() const override;
+
+ protected:
+ AudioTrack(const std::string& label, AudioSourceInterface* audio_source);
+
+ private:
+ rtc::scoped_refptr<AudioSourceInterface> audio_source_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_AUDIOTRACK_H_
diff --git a/talk/app/webrtc/datachannel.cc b/talk/app/webrtc/datachannel.cc
new file mode 100644
index 0000000000..20cf743355
--- /dev/null
+++ b/talk/app/webrtc/datachannel.cc
@@ -0,0 +1,655 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/datachannel.h"
+
+#include <string>
+
+#include "talk/app/webrtc/mediastreamprovider.h"
+#include "talk/app/webrtc/sctputils.h"
+#include "talk/media/sctp/sctpdataengine.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/refcount.h"
+
+namespace webrtc {
+
+static size_t kMaxQueuedReceivedDataBytes = 16 * 1024 * 1024;
+static size_t kMaxQueuedSendDataBytes = 16 * 1024 * 1024;
+
+enum {
+ MSG_CHANNELREADY,
+};
+
+bool SctpSidAllocator::AllocateSid(rtc::SSLRole role, int* sid) {
+ int potential_sid = (role == rtc::SSL_CLIENT) ? 0 : 1;
+ while (!IsSidAvailable(potential_sid)) {
+ potential_sid += 2;
+ if (potential_sid > static_cast<int>(cricket::kMaxSctpSid)) {
+ return false;
+ }
+ }
+
+ *sid = potential_sid;
+ used_sids_.insert(potential_sid);
+ return true;
+}
+
+bool SctpSidAllocator::ReserveSid(int sid) {
+ if (!IsSidAvailable(sid)) {
+ return false;
+ }
+ used_sids_.insert(sid);
+ return true;
+}
+
+void SctpSidAllocator::ReleaseSid(int sid) {
+ auto it = used_sids_.find(sid);
+ if (it != used_sids_.end()) {
+ used_sids_.erase(it);
+ }
+}
+
+bool SctpSidAllocator::IsSidAvailable(int sid) const {
+ if (sid < 0 || sid > static_cast<int>(cricket::kMaxSctpSid)) {
+ return false;
+ }
+ return used_sids_.find(sid) == used_sids_.end();
+}
+
+DataChannel::PacketQueue::PacketQueue() : byte_count_(0) {}
+
+DataChannel::PacketQueue::~PacketQueue() {
+ Clear();
+}
+
+bool DataChannel::PacketQueue::Empty() const {
+ return packets_.empty();
+}
+
+DataBuffer* DataChannel::PacketQueue::Front() {
+ return packets_.front();
+}
+
+void DataChannel::PacketQueue::Pop() {
+ if (packets_.empty()) {
+ return;
+ }
+
+ byte_count_ -= packets_.front()->size();
+ packets_.pop_front();
+}
+
+void DataChannel::PacketQueue::Push(DataBuffer* packet) {
+ byte_count_ += packet->size();
+ packets_.push_back(packet);
+}
+
+void DataChannel::PacketQueue::Clear() {
+ while (!packets_.empty()) {
+ delete packets_.front();
+ packets_.pop_front();
+ }
+ byte_count_ = 0;
+}
+
+void DataChannel::PacketQueue::Swap(PacketQueue* other) {
+ size_t other_byte_count = other->byte_count_;
+ other->byte_count_ = byte_count_;
+ byte_count_ = other_byte_count;
+
+ other->packets_.swap(packets_);
+}
+
+rtc::scoped_refptr<DataChannel> DataChannel::Create(
+ DataChannelProviderInterface* provider,
+ cricket::DataChannelType dct,
+ const std::string& label,
+ const InternalDataChannelInit& config) {
+ rtc::scoped_refptr<DataChannel> channel(
+ new rtc::RefCountedObject<DataChannel>(provider, dct, label));
+ if (!channel->Init(config)) {
+ return NULL;
+ }
+ return channel;
+}
+
+DataChannel::DataChannel(
+ DataChannelProviderInterface* provider,
+ cricket::DataChannelType dct,
+ const std::string& label)
+ : label_(label),
+ observer_(NULL),
+ state_(kConnecting),
+ data_channel_type_(dct),
+ provider_(provider),
+ handshake_state_(kHandshakeInit),
+ connected_to_provider_(false),
+ send_ssrc_set_(false),
+ receive_ssrc_set_(false),
+ writable_(false),
+ send_ssrc_(0),
+ receive_ssrc_(0) {
+}
+
+bool DataChannel::Init(const InternalDataChannelInit& config) {
+ if (data_channel_type_ == cricket::DCT_RTP) {
+ if (config.reliable ||
+ config.id != -1 ||
+ config.maxRetransmits != -1 ||
+ config.maxRetransmitTime != -1) {
+ LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
+ << "invalid DataChannelInit.";
+ return false;
+ }
+ handshake_state_ = kHandshakeReady;
+ } else if (data_channel_type_ == cricket::DCT_SCTP) {
+ if (config.id < -1 ||
+ config.maxRetransmits < -1 ||
+ config.maxRetransmitTime < -1) {
+ LOG(LS_ERROR) << "Failed to initialize the SCTP data channel due to "
+ << "invalid DataChannelInit.";
+ return false;
+ }
+ if (config.maxRetransmits != -1 && config.maxRetransmitTime != -1) {
+ LOG(LS_ERROR) <<
+ "maxRetransmits and maxRetransmitTime should not be both set.";
+ return false;
+ }
+ config_ = config;
+
+ switch (config_.open_handshake_role) {
+ case webrtc::InternalDataChannelInit::kNone: // pre-negotiated
+ handshake_state_ = kHandshakeReady;
+ break;
+ case webrtc::InternalDataChannelInit::kOpener:
+ handshake_state_ = kHandshakeShouldSendOpen;
+ break;
+ case webrtc::InternalDataChannelInit::kAcker:
+ handshake_state_ = kHandshakeShouldSendAck;
+ break;
+ };
+
+ // Try to connect to the transport in case the transport channel already
+ // exists.
+ OnTransportChannelCreated();
+
+ // Checks if the transport is ready to send because the initial channel
+ // ready signal may have been sent before the DataChannel creation.
+ // This has to be done async because the upper layer objects (e.g.
+ // Chrome glue and WebKit) are not wired up properly until after this
+ // function returns.
+ if (provider_->ReadyToSendData()) {
+ rtc::Thread::Current()->Post(this, MSG_CHANNELREADY, NULL);
+ }
+ }
+
+ return true;
+}
+
+DataChannel::~DataChannel() {}
+
+void DataChannel::RegisterObserver(DataChannelObserver* observer) {
+ observer_ = observer;
+ DeliverQueuedReceivedData();
+}
+
+void DataChannel::UnregisterObserver() {
+ observer_ = NULL;
+}
+
+bool DataChannel::reliable() const {
+ if (data_channel_type_ == cricket::DCT_RTP) {
+ return false;
+ } else {
+ return config_.maxRetransmits == -1 &&
+ config_.maxRetransmitTime == -1;
+ }
+}
+
+uint64_t DataChannel::buffered_amount() const {
+ return queued_send_data_.byte_count();
+}
+
+void DataChannel::Close() {
+ if (state_ == kClosed)
+ return;
+ send_ssrc_ = 0;
+ send_ssrc_set_ = false;
+ SetState(kClosing);
+ UpdateState();
+}
+
+bool DataChannel::Send(const DataBuffer& buffer) {
+ if (state_ != kOpen) {
+ return false;
+ }
+
+ // TODO(jiayl): the spec is unclear about if the remote side should get the
+ // onmessage event. We need to figure out the expected behavior and change the
+ // code accordingly.
+ if (buffer.size() == 0) {
+ return true;
+ }
+
+ // If the queue is non-empty, we're waiting for SignalReadyToSend,
+ // so just add to the end of the queue and keep waiting.
+ if (!queued_send_data_.Empty()) {
+ // Only SCTP DataChannel queues the outgoing data when the transport is
+ // blocked.
+ ASSERT(data_channel_type_ == cricket::DCT_SCTP);
+ if (!QueueSendDataMessage(buffer)) {
+ Close();
+ }
+ return true;
+ }
+
+ bool success = SendDataMessage(buffer, true);
+ if (data_channel_type_ == cricket::DCT_RTP) {
+ return success;
+ }
+
+ // Always return true for SCTP DataChannel per the spec.
+ return true;
+}
+
+void DataChannel::SetReceiveSsrc(uint32_t receive_ssrc) {
+ ASSERT(data_channel_type_ == cricket::DCT_RTP);
+
+ if (receive_ssrc_set_) {
+ return;
+ }
+ receive_ssrc_ = receive_ssrc;
+ receive_ssrc_set_ = true;
+ UpdateState();
+}
+
+// The remote peer request that this channel shall be closed.
+void DataChannel::RemotePeerRequestClose() {
+ DoClose();
+}
+
+void DataChannel::SetSctpSid(int sid) {
+ ASSERT(config_.id < 0 && sid >= 0 && data_channel_type_ == cricket::DCT_SCTP);
+ if (config_.id == sid) {
+ return;
+ }
+
+ config_.id = sid;
+ provider_->AddSctpDataStream(sid);
+}
+
+void DataChannel::OnTransportChannelCreated() {
+ ASSERT(data_channel_type_ == cricket::DCT_SCTP);
+ if (!connected_to_provider_) {
+ connected_to_provider_ = provider_->ConnectDataChannel(this);
+ }
+ // The sid may have been unassigned when provider_->ConnectDataChannel was
+ // done. So always add the streams even if connected_to_provider_ is true.
+ if (config_.id >= 0) {
+ provider_->AddSctpDataStream(config_.id);
+ }
+}
+
+// The underlying transport channel was destroyed.
+// This function makes sure the DataChannel is disconnected and changes state to
+// kClosed.
+void DataChannel::OnTransportChannelDestroyed() {
+ DoClose();
+}
+
+void DataChannel::SetSendSsrc(uint32_t send_ssrc) {
+ ASSERT(data_channel_type_ == cricket::DCT_RTP);
+ if (send_ssrc_set_) {
+ return;
+ }
+ send_ssrc_ = send_ssrc;
+ send_ssrc_set_ = true;
+ UpdateState();
+}
+
+void DataChannel::OnMessage(rtc::Message* msg) {
+ switch (msg->message_id) {
+ case MSG_CHANNELREADY:
+ OnChannelReady(true);
+ break;
+ }
+}
+
+void DataChannel::OnDataReceived(cricket::DataChannel* channel,
+ const cricket::ReceiveDataParams& params,
+ const rtc::Buffer& payload) {
+ uint32_t expected_ssrc =
+ (data_channel_type_ == cricket::DCT_RTP) ? receive_ssrc_ : config_.id;
+ if (params.ssrc != expected_ssrc) {
+ return;
+ }
+
+ if (params.type == cricket::DMT_CONTROL) {
+ ASSERT(data_channel_type_ == cricket::DCT_SCTP);
+ if (handshake_state_ != kHandshakeWaitingForAck) {
+ // Ignore it if we are not expecting an ACK message.
+ LOG(LS_WARNING) << "DataChannel received unexpected CONTROL message, "
+ << "sid = " << params.ssrc;
+ return;
+ }
+ if (ParseDataChannelOpenAckMessage(payload)) {
+ // We can send unordered as soon as we receive the ACK message.
+ handshake_state_ = kHandshakeReady;
+ LOG(LS_INFO) << "DataChannel received OPEN_ACK message, sid = "
+ << params.ssrc;
+ } else {
+ LOG(LS_WARNING) << "DataChannel failed to parse OPEN_ACK message, sid = "
+ << params.ssrc;
+ }
+ return;
+ }
+
+ ASSERT(params.type == cricket::DMT_BINARY ||
+ params.type == cricket::DMT_TEXT);
+
+ LOG(LS_VERBOSE) << "DataChannel received DATA message, sid = " << params.ssrc;
+ // We can send unordered as soon as we receive any DATA message since the
+ // remote side must have received the OPEN (and old clients do not send
+ // OPEN_ACK).
+ if (handshake_state_ == kHandshakeWaitingForAck) {
+ handshake_state_ = kHandshakeReady;
+ }
+
+ bool binary = (params.type == cricket::DMT_BINARY);
+ rtc::scoped_ptr<DataBuffer> buffer(new DataBuffer(payload, binary));
+ if (state_ == kOpen && observer_) {
+ observer_->OnMessage(*buffer.get());
+ } else {
+ if (queued_received_data_.byte_count() + payload.size() >
+ kMaxQueuedReceivedDataBytes) {
+ LOG(LS_ERROR) << "Queued received data exceeds the max buffer size.";
+
+ queued_received_data_.Clear();
+ if (data_channel_type_ != cricket::DCT_RTP) {
+ Close();
+ }
+
+ return;
+ }
+ queued_received_data_.Push(buffer.release());
+ }
+}
+
+void DataChannel::OnStreamClosedRemotely(uint32_t sid) {
+ if (data_channel_type_ == cricket::DCT_SCTP && sid == config_.id) {
+ Close();
+ }
+}
+
+void DataChannel::OnChannelReady(bool writable) {
+ writable_ = writable;
+ if (!writable) {
+ return;
+ }
+
+ SendQueuedControlMessages();
+ SendQueuedDataMessages();
+ UpdateState();
+}
+
+void DataChannel::DoClose() {
+ if (state_ == kClosed)
+ return;
+
+ receive_ssrc_set_ = false;
+ send_ssrc_set_ = false;
+ SetState(kClosing);
+ UpdateState();
+}
+
+void DataChannel::UpdateState() {
+ // UpdateState determines what to do from a few state variables. Include
+ // all conditions required for each state transition here for
+ // clarity. OnChannelReady(true) will send any queued data and then invoke
+ // UpdateState().
+ switch (state_) {
+ case kConnecting: {
+ if (send_ssrc_set_ == receive_ssrc_set_) {
+ if (data_channel_type_ == cricket::DCT_RTP && !connected_to_provider_) {
+ connected_to_provider_ = provider_->ConnectDataChannel(this);
+ }
+ if (connected_to_provider_) {
+ if (handshake_state_ == kHandshakeShouldSendOpen) {
+ rtc::Buffer payload;
+ WriteDataChannelOpenMessage(label_, config_, &payload);
+ SendControlMessage(payload);
+ } else if (handshake_state_ == kHandshakeShouldSendAck) {
+ rtc::Buffer payload;
+ WriteDataChannelOpenAckMessage(&payload);
+ SendControlMessage(payload);
+ }
+ if (writable_ &&
+ (handshake_state_ == kHandshakeReady ||
+ handshake_state_ == kHandshakeWaitingForAck)) {
+ SetState(kOpen);
+ // If we have received buffers before the channel got writable.
+ // Deliver them now.
+ DeliverQueuedReceivedData();
+ }
+ }
+ }
+ break;
+ }
+ case kOpen: {
+ break;
+ }
+ case kClosing: {
+ if (queued_send_data_.Empty() && queued_control_data_.Empty()) {
+ if (connected_to_provider_) {
+ DisconnectFromProvider();
+ }
+
+ if (!connected_to_provider_ && !send_ssrc_set_ && !receive_ssrc_set_) {
+ SetState(kClosed);
+ }
+ }
+ break;
+ }
+ case kClosed:
+ break;
+ }
+}
+
+void DataChannel::SetState(DataState state) {
+ if (state_ == state) {
+ return;
+ }
+
+ state_ = state;
+ if (observer_) {
+ observer_->OnStateChange();
+ }
+ if (state_ == kClosed) {
+ SignalClosed(this);
+ }
+}
+
+void DataChannel::DisconnectFromProvider() {
+ if (!connected_to_provider_)
+ return;
+
+ provider_->DisconnectDataChannel(this);
+ connected_to_provider_ = false;
+
+ if (data_channel_type_ == cricket::DCT_SCTP && config_.id >= 0) {
+ provider_->RemoveSctpDataStream(config_.id);
+ }
+}
+
+void DataChannel::DeliverQueuedReceivedData() {
+ if (!observer_) {
+ return;
+ }
+
+ while (!queued_received_data_.Empty()) {
+ rtc::scoped_ptr<DataBuffer> buffer(queued_received_data_.Front());
+ observer_->OnMessage(*buffer);
+ queued_received_data_.Pop();
+ }
+}
+
+void DataChannel::SendQueuedDataMessages() {
+ if (queued_send_data_.Empty()) {
+ return;
+ }
+
+ ASSERT(state_ == kOpen || state_ == kClosing);
+
+ uint64_t start_buffered_amount = buffered_amount();
+ while (!queued_send_data_.Empty()) {
+ DataBuffer* buffer = queued_send_data_.Front();
+ if (!SendDataMessage(*buffer, false)) {
+ // Leave the message in the queue if sending is aborted.
+ break;
+ }
+ queued_send_data_.Pop();
+ delete buffer;
+ }
+
+ if (observer_ && buffered_amount() < start_buffered_amount) {
+ observer_->OnBufferedAmountChange(start_buffered_amount);
+ }
+}
+
+bool DataChannel::SendDataMessage(const DataBuffer& buffer,
+ bool queue_if_blocked) {
+ cricket::SendDataParams send_params;
+
+ if (data_channel_type_ == cricket::DCT_SCTP) {
+ send_params.ordered = config_.ordered;
+ // Send as ordered if it is still going through OPEN/ACK signaling.
+ if (handshake_state_ != kHandshakeReady && !config_.ordered) {
+ send_params.ordered = true;
+ LOG(LS_VERBOSE) << "Sending data as ordered for unordered DataChannel "
+ << "because the OPEN_ACK message has not been received.";
+ }
+
+ send_params.max_rtx_count = config_.maxRetransmits;
+ send_params.max_rtx_ms = config_.maxRetransmitTime;
+ send_params.ssrc = config_.id;
+ } else {
+ send_params.ssrc = send_ssrc_;
+ }
+ send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT;
+
+ cricket::SendDataResult send_result = cricket::SDR_SUCCESS;
+ bool success = provider_->SendData(send_params, buffer.data, &send_result);
+
+ if (success) {
+ return true;
+ }
+
+ if (data_channel_type_ != cricket::DCT_SCTP) {
+ return false;
+ }
+
+ if (send_result == cricket::SDR_BLOCK) {
+ if (!queue_if_blocked || QueueSendDataMessage(buffer)) {
+ return false;
+ }
+ }
+ // Close the channel if the error is not SDR_BLOCK, or if queuing the
+ // message failed.
+ LOG(LS_ERROR) << "Closing the DataChannel due to a failure to send data, "
+ << "send_result = " << send_result;
+ Close();
+
+ return false;
+}
+
+bool DataChannel::QueueSendDataMessage(const DataBuffer& buffer) {
+ size_t start_buffered_amount = buffered_amount();
+ if (start_buffered_amount >= kMaxQueuedSendDataBytes) {
+ LOG(LS_ERROR) << "Can't buffer any more data for the data channel.";
+ return false;
+ }
+ queued_send_data_.Push(new DataBuffer(buffer));
+
+ // The buffer can have length zero, in which case there is no change.
+ if (observer_ && buffered_amount() > start_buffered_amount) {
+ observer_->OnBufferedAmountChange(start_buffered_amount);
+ }
+ return true;
+}
+
+void DataChannel::SendQueuedControlMessages() {
+ PacketQueue control_packets;
+ control_packets.Swap(&queued_control_data_);
+
+ while (!control_packets.Empty()) {
+ rtc::scoped_ptr<DataBuffer> buf(control_packets.Front());
+ SendControlMessage(buf->data);
+ control_packets.Pop();
+ }
+}
+
+void DataChannel::QueueControlMessage(const rtc::Buffer& buffer) {
+ queued_control_data_.Push(new DataBuffer(buffer, true));
+}
+
+bool DataChannel::SendControlMessage(const rtc::Buffer& buffer) {
+ bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen;
+
+ ASSERT(data_channel_type_ == cricket::DCT_SCTP &&
+ writable_ &&
+ config_.id >= 0 &&
+ (!is_open_message || !config_.negotiated));
+
+ cricket::SendDataParams send_params;
+ send_params.ssrc = config_.id;
+ // Send data as ordered before we receive any message from the remote peer to
+ // make sure the remote peer will not receive any data before it receives the
+ // OPEN message.
+ send_params.ordered = config_.ordered || is_open_message;
+ send_params.type = cricket::DMT_CONTROL;
+
+ cricket::SendDataResult send_result = cricket::SDR_SUCCESS;
+ bool retval = provider_->SendData(send_params, buffer, &send_result);
+ if (retval) {
+ LOG(LS_INFO) << "Sent CONTROL message on channel " << config_.id;
+
+ if (handshake_state_ == kHandshakeShouldSendAck) {
+ handshake_state_ = kHandshakeReady;
+ } else if (handshake_state_ == kHandshakeShouldSendOpen) {
+ handshake_state_ = kHandshakeWaitingForAck;
+ }
+ } else if (send_result == cricket::SDR_BLOCK) {
+ QueueControlMessage(buffer);
+ } else {
+ LOG(LS_ERROR) << "Closing the DataChannel due to a failure to send"
+ << " the CONTROL message, send_result = " << send_result;
+ Close();
+ }
+ return retval;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/datachannel.h b/talk/app/webrtc/datachannel.h
new file mode 100644
index 0000000000..2713ae3b55
--- /dev/null
+++ b/talk/app/webrtc/datachannel.h
@@ -0,0 +1,299 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_DATACHANNEL_H_
+#define TALK_APP_WEBRTC_DATACHANNEL_H_
+
+#include <deque>
+#include <set>
+#include <string>
+
+#include "talk/app/webrtc/datachannelinterface.h"
+#include "talk/app/webrtc/proxy.h"
+#include "talk/media/base/mediachannel.h"
+#include "talk/session/media/channel.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sigslot.h"
+
+namespace webrtc {
+
+class DataChannel;
+
+class DataChannelProviderInterface {
+ public:
+ // Sends the data to the transport.
+ virtual bool SendData(const cricket::SendDataParams& params,
+ const rtc::Buffer& payload,
+ cricket::SendDataResult* result) = 0;
+ // Connects to the transport signals.
+ virtual bool ConnectDataChannel(DataChannel* data_channel) = 0;
+ // Disconnects from the transport signals.
+ virtual void DisconnectDataChannel(DataChannel* data_channel) = 0;
+ // Adds the data channel SID to the transport for SCTP.
+ virtual void AddSctpDataStream(int sid) = 0;
+ // Removes the data channel SID from the transport for SCTP.
+ virtual void RemoveSctpDataStream(int sid) = 0;
+ // Returns true if the transport channel is ready to send data.
+ virtual bool ReadyToSendData() const = 0;
+
+ protected:
+ virtual ~DataChannelProviderInterface() {}
+};
+
+struct InternalDataChannelInit : public DataChannelInit {
+ enum OpenHandshakeRole {
+ kOpener,
+ kAcker,
+ kNone
+ };
+ // The default role is kOpener because the default |negotiated| is false.
+ InternalDataChannelInit() : open_handshake_role(kOpener) {}
+ explicit InternalDataChannelInit(const DataChannelInit& base)
+ : DataChannelInit(base), open_handshake_role(kOpener) {
+ // If the channel is externally negotiated, do not send the OPEN message.
+ if (base.negotiated) {
+ open_handshake_role = kNone;
+ }
+ }
+
+ OpenHandshakeRole open_handshake_role;
+};
+
+// Helper class to allocate unique IDs for SCTP DataChannels
+class SctpSidAllocator {
+ public:
+ // Gets the first unused odd/even id based on the DTLS role. If |role| is
+ // SSL_CLIENT, the allocated id starts from 0 and takes even numbers;
+ // otherwise, the id starts from 1 and takes odd numbers.
+ // Returns false if no id can be allocated.
+ bool AllocateSid(rtc::SSLRole role, int* sid);
+
+ // Attempts to reserve a specific sid. Returns false if it's unavailable.
+ bool ReserveSid(int sid);
+
+ // Indicates that |sid| isn't in use any more, and is thus available again.
+ void ReleaseSid(int sid);
+
+ private:
+ // Checks if |sid| is available to be assigned to a new SCTP data channel.
+ bool IsSidAvailable(int sid) const;
+
+ std::set<int> used_sids_;
+};
+
+// DataChannel is a an implementation of the DataChannelInterface based on
+// libjingle's data engine. It provides an implementation of unreliable or
+// reliabledata channels. Currently this class is specifically designed to use
+// both RtpDataEngine and SctpDataEngine.
+
+// DataChannel states:
+// kConnecting: The channel has been created the transport might not yet be
+// ready.
+// kOpen: The channel have a local SSRC set by a call to UpdateSendSsrc
+// and a remote SSRC set by call to UpdateReceiveSsrc and the transport
+// has been writable once.
+// kClosing: DataChannelInterface::Close has been called or UpdateReceiveSsrc
+// has been called with SSRC==0
+// kClosed: Both UpdateReceiveSsrc and UpdateSendSsrc has been called with
+// SSRC==0.
+class DataChannel : public DataChannelInterface,
+ public sigslot::has_slots<>,
+ public rtc::MessageHandler {
+ public:
+ static rtc::scoped_refptr<DataChannel> Create(
+ DataChannelProviderInterface* provider,
+ cricket::DataChannelType dct,
+ const std::string& label,
+ const InternalDataChannelInit& config);
+
+ virtual void RegisterObserver(DataChannelObserver* observer);
+ virtual void UnregisterObserver();
+
+ virtual std::string label() const { return label_; }
+ virtual bool reliable() const;
+ virtual bool ordered() const { return config_.ordered; }
+ virtual uint16_t maxRetransmitTime() const {
+ return config_.maxRetransmitTime;
+ }
+ virtual uint16_t maxRetransmits() const { return config_.maxRetransmits; }
+ virtual std::string protocol() const { return config_.protocol; }
+ virtual bool negotiated() const { return config_.negotiated; }
+ virtual int id() const { return config_.id; }
+ virtual uint64_t buffered_amount() const;
+ virtual void Close();
+ virtual DataState state() const { return state_; }
+ virtual bool Send(const DataBuffer& buffer);
+
+ // rtc::MessageHandler override.
+ virtual void OnMessage(rtc::Message* msg);
+
+ // Called when the channel's ready to use. That can happen when the
+ // underlying DataMediaChannel becomes ready, or when this channel is a new
+ // stream on an existing DataMediaChannel, and we've finished negotiation.
+ void OnChannelReady(bool writable);
+
+ // Sigslots from cricket::DataChannel
+ void OnDataReceived(cricket::DataChannel* channel,
+ const cricket::ReceiveDataParams& params,
+ const rtc::Buffer& payload);
+ void OnStreamClosedRemotely(uint32_t sid);
+
+ // The remote peer request that this channel should be closed.
+ void RemotePeerRequestClose();
+
+ // The following methods are for SCTP only.
+
+ // Sets the SCTP sid and adds to transport layer if not set yet. Should only
+ // be called once.
+ void SetSctpSid(int sid);
+ // Called when the transport channel is created.
+ // Only needs to be called for SCTP data channels.
+ void OnTransportChannelCreated();
+ // Called when the transport channel is destroyed.
+ void OnTransportChannelDestroyed();
+
+ // The following methods are for RTP only.
+
+ // Set the SSRC this channel should use to send data on the
+ // underlying data engine. |send_ssrc| == 0 means that the channel is no
+ // longer part of the session negotiation.
+ void SetSendSsrc(uint32_t send_ssrc);
+ // Set the SSRC this channel should use to receive data from the
+ // underlying data engine.
+ void SetReceiveSsrc(uint32_t receive_ssrc);
+
+ cricket::DataChannelType data_channel_type() const {
+ return data_channel_type_;
+ }
+
+ // Emitted when state transitions to kClosed.
+ // In the case of SCTP channels, this signal can be used to tell when the
+ // channel's sid is free.
+ sigslot::signal1<DataChannel*> SignalClosed;
+
+ protected:
+ DataChannel(DataChannelProviderInterface* client,
+ cricket::DataChannelType dct,
+ const std::string& label);
+ virtual ~DataChannel();
+
+ private:
+ // A packet queue which tracks the total queued bytes. Queued packets are
+ // owned by this class.
+ class PacketQueue {
+ public:
+ PacketQueue();
+ ~PacketQueue();
+
+ size_t byte_count() const {
+ return byte_count_;
+ }
+
+ bool Empty() const;
+
+ DataBuffer* Front();
+
+ void Pop();
+
+ void Push(DataBuffer* packet);
+
+ void Clear();
+
+ void Swap(PacketQueue* other);
+
+ private:
+ std::deque<DataBuffer*> packets_;
+ size_t byte_count_;
+ };
+
+ // The OPEN(_ACK) signaling state.
+ enum HandshakeState {
+ kHandshakeInit,
+ kHandshakeShouldSendOpen,
+ kHandshakeShouldSendAck,
+ kHandshakeWaitingForAck,
+ kHandshakeReady
+ };
+
+ bool Init(const InternalDataChannelInit& config);
+ void DoClose();
+ void UpdateState();
+ void SetState(DataState state);
+ void DisconnectFromProvider();
+
+ void DeliverQueuedReceivedData();
+
+ void SendQueuedDataMessages();
+ bool SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked);
+ bool QueueSendDataMessage(const DataBuffer& buffer);
+
+ void SendQueuedControlMessages();
+ void QueueControlMessage(const rtc::Buffer& buffer);
+ bool SendControlMessage(const rtc::Buffer& buffer);
+
+ std::string label_;
+ InternalDataChannelInit config_;
+ DataChannelObserver* observer_;
+ DataState state_;
+ cricket::DataChannelType data_channel_type_;
+ DataChannelProviderInterface* provider_;
+ HandshakeState handshake_state_;
+ bool connected_to_provider_;
+ bool send_ssrc_set_;
+ bool receive_ssrc_set_;
+ bool writable_;
+ uint32_t send_ssrc_;
+ uint32_t receive_ssrc_;
+ // Control messages that always have to get sent out before any queued
+ // data.
+ PacketQueue queued_control_data_;
+ PacketQueue queued_received_data_;
+ PacketQueue queued_send_data_;
+};
+
+// Define proxy for DataChannelInterface.
+BEGIN_PROXY_MAP(DataChannel)
+ PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
+ PROXY_METHOD0(void, UnregisterObserver)
+ PROXY_CONSTMETHOD0(std::string, label)
+ PROXY_CONSTMETHOD0(bool, reliable)
+ PROXY_CONSTMETHOD0(bool, ordered)
+ PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
+ PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
+ PROXY_CONSTMETHOD0(std::string, protocol)
+ PROXY_CONSTMETHOD0(bool, negotiated)
+ PROXY_CONSTMETHOD0(int, id)
+ PROXY_CONSTMETHOD0(DataState, state)
+ PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
+ PROXY_METHOD0(void, Close)
+ PROXY_METHOD1(bool, Send, const DataBuffer&)
+END_PROXY()
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_DATACHANNEL_H_
diff --git a/talk/app/webrtc/datachannel_unittest.cc b/talk/app/webrtc/datachannel_unittest.cc
new file mode 100644
index 0000000000..ff79541478
--- /dev/null
+++ b/talk/app/webrtc/datachannel_unittest.cc
@@ -0,0 +1,581 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/datachannel.h"
+#include "talk/app/webrtc/sctputils.h"
+#include "talk/app/webrtc/test/fakedatachannelprovider.h"
+#include "webrtc/base/gunit.h"
+
+using webrtc::DataChannel;
+using webrtc::SctpSidAllocator;
+
+class FakeDataChannelObserver : public webrtc::DataChannelObserver {
+ public:
+ FakeDataChannelObserver()
+ : messages_received_(0),
+ on_state_change_count_(0),
+ on_buffered_amount_change_count_(0) {}
+
+ void OnStateChange() {
+ ++on_state_change_count_;
+ }
+
+ void OnBufferedAmountChange(uint64_t previous_amount) {
+ ++on_buffered_amount_change_count_;
+ }
+
+ void OnMessage(const webrtc::DataBuffer& buffer) {
+ ++messages_received_;
+ }
+
+ size_t messages_received() const {
+ return messages_received_;
+ }
+
+ void ResetOnStateChangeCount() {
+ on_state_change_count_ = 0;
+ }
+
+ void ResetOnBufferedAmountChangeCount() {
+ on_buffered_amount_change_count_ = 0;
+ }
+
+ size_t on_state_change_count() const {
+ return on_state_change_count_;
+ }
+
+ size_t on_buffered_amount_change_count() const {
+ return on_buffered_amount_change_count_;
+ }
+
+ private:
+ size_t messages_received_;
+ size_t on_state_change_count_;
+ size_t on_buffered_amount_change_count_;
+};
+
+class SctpDataChannelTest : public testing::Test {
+ protected:
+ SctpDataChannelTest()
+ : webrtc_data_channel_(
+ DataChannel::Create(
+ &provider_, cricket::DCT_SCTP, "test", init_)) {
+ }
+
+ void SetChannelReady() {
+ provider_.set_transport_available(true);
+ webrtc_data_channel_->OnTransportChannelCreated();
+ if (webrtc_data_channel_->id() < 0) {
+ webrtc_data_channel_->SetSctpSid(0);
+ }
+ provider_.set_ready_to_send(true);
+ }
+
+ void AddObserver() {
+ observer_.reset(new FakeDataChannelObserver());
+ webrtc_data_channel_->RegisterObserver(observer_.get());
+ }
+
+ webrtc::InternalDataChannelInit init_;
+ FakeDataChannelProvider provider_;
+ rtc::scoped_ptr<FakeDataChannelObserver> observer_;
+ rtc::scoped_refptr<DataChannel> webrtc_data_channel_;
+};
+
+// Verifies that the data channel is connected to the transport after creation.
+TEST_F(SctpDataChannelTest, ConnectedToTransportOnCreated) {
+ provider_.set_transport_available(true);
+ rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+ &provider_, cricket::DCT_SCTP, "test1", init_);
+
+ EXPECT_TRUE(provider_.IsConnected(dc.get()));
+ // The sid is not set yet, so it should not have added the streams.
+ EXPECT_FALSE(provider_.IsSendStreamAdded(dc->id()));
+ EXPECT_FALSE(provider_.IsRecvStreamAdded(dc->id()));
+
+ dc->SetSctpSid(0);
+ EXPECT_TRUE(provider_.IsSendStreamAdded(dc->id()));
+ EXPECT_TRUE(provider_.IsRecvStreamAdded(dc->id()));
+}
+
+// Verifies that the data channel is connected to the transport if the transport
+// is not available initially and becomes available later.
+TEST_F(SctpDataChannelTest, ConnectedAfterTransportBecomesAvailable) {
+ EXPECT_FALSE(provider_.IsConnected(webrtc_data_channel_.get()));
+
+ provider_.set_transport_available(true);
+ webrtc_data_channel_->OnTransportChannelCreated();
+ EXPECT_TRUE(provider_.IsConnected(webrtc_data_channel_.get()));
+}
+
+// Tests the state of the data channel.
+TEST_F(SctpDataChannelTest, StateTransition) {
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting,
+ webrtc_data_channel_->state());
+ SetChannelReady();
+
+ EXPECT_EQ(webrtc::DataChannelInterface::kOpen, webrtc_data_channel_->state());
+ webrtc_data_channel_->Close();
+ EXPECT_EQ(webrtc::DataChannelInterface::kClosed,
+ webrtc_data_channel_->state());
+ // Verifies that it's disconnected from the transport.
+ EXPECT_FALSE(provider_.IsConnected(webrtc_data_channel_.get()));
+}
+
+// Tests that DataChannel::buffered_amount() is correct after the channel is
+// blocked.
+TEST_F(SctpDataChannelTest, BufferedAmountWhenBlocked) {
+ AddObserver();
+ SetChannelReady();
+ webrtc::DataBuffer buffer("abcd");
+ EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+
+ EXPECT_EQ(0U, webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(0U, observer_->on_buffered_amount_change_count());
+
+ provider_.set_send_blocked(true);
+
+ const int number_of_packets = 3;
+ for (int i = 0; i < number_of_packets; ++i) {
+ EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+ }
+ EXPECT_EQ(buffer.data.size() * number_of_packets,
+ webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(number_of_packets, observer_->on_buffered_amount_change_count());
+}
+
+// Tests that the queued data are sent when the channel transitions from blocked
+// to unblocked.
+TEST_F(SctpDataChannelTest, QueuedDataSentWhenUnblocked) {
+ AddObserver();
+ SetChannelReady();
+ webrtc::DataBuffer buffer("abcd");
+ provider_.set_send_blocked(true);
+ EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+
+ EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+
+ provider_.set_send_blocked(false);
+ SetChannelReady();
+ EXPECT_EQ(0U, webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(2U, observer_->on_buffered_amount_change_count());
+}
+
+// Tests that no crash when the channel is blocked right away while trying to
+// send queued data.
+TEST_F(SctpDataChannelTest, BlockedWhenSendQueuedDataNoCrash) {
+ AddObserver();
+ SetChannelReady();
+ webrtc::DataBuffer buffer("abcd");
+ provider_.set_send_blocked(true);
+ EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+ EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+
+ // Set channel ready while it is still blocked.
+ SetChannelReady();
+ EXPECT_EQ(buffer.size(), webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+
+ // Unblock the channel to send queued data again, there should be no crash.
+ provider_.set_send_blocked(false);
+ SetChannelReady();
+ EXPECT_EQ(0U, webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(2U, observer_->on_buffered_amount_change_count());
+}
+
+// Tests that the queued control message is sent when channel is ready.
+TEST_F(SctpDataChannelTest, OpenMessageSent) {
+ // Initially the id is unassigned.
+ EXPECT_EQ(-1, webrtc_data_channel_->id());
+
+ SetChannelReady();
+ EXPECT_GE(webrtc_data_channel_->id(), 0);
+ EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
+ EXPECT_EQ(provider_.last_send_data_params().ssrc,
+ static_cast<uint32_t>(webrtc_data_channel_->id()));
+}
+
+TEST_F(SctpDataChannelTest, QueuedOpenMessageSent) {
+ provider_.set_send_blocked(true);
+ SetChannelReady();
+ provider_.set_send_blocked(false);
+
+ EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
+ EXPECT_EQ(provider_.last_send_data_params().ssrc,
+ static_cast<uint32_t>(webrtc_data_channel_->id()));
+}
+
+// Tests that the DataChannel created after transport gets ready can enter OPEN
+// state.
+TEST_F(SctpDataChannelTest, LateCreatedChannelTransitionToOpen) {
+ SetChannelReady();
+ webrtc::InternalDataChannelInit init;
+ init.id = 1;
+ rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+ &provider_, cricket::DCT_SCTP, "test1", init);
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, dc->state());
+ EXPECT_TRUE_WAIT(webrtc::DataChannelInterface::kOpen == dc->state(),
+ 1000);
+}
+
+// Tests that an unordered DataChannel sends data as ordered until the OPEN_ACK
+// message is received.
+TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) {
+ SetChannelReady();
+ webrtc::InternalDataChannelInit init;
+ init.id = 1;
+ init.ordered = false;
+ rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+ &provider_, cricket::DCT_SCTP, "test1", init);
+
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
+
+ // Sends a message and verifies it's ordered.
+ webrtc::DataBuffer buffer("some data");
+ ASSERT_TRUE(dc->Send(buffer));
+ EXPECT_TRUE(provider_.last_send_data_params().ordered);
+
+ // Emulates receiving an OPEN_ACK message.
+ cricket::ReceiveDataParams params;
+ params.ssrc = init.id;
+ params.type = cricket::DMT_CONTROL;
+ rtc::Buffer payload;
+ webrtc::WriteDataChannelOpenAckMessage(&payload);
+ dc->OnDataReceived(NULL, params, payload);
+
+ // Sends another message and verifies it's unordered.
+ ASSERT_TRUE(dc->Send(buffer));
+ EXPECT_FALSE(provider_.last_send_data_params().ordered);
+}
+
+// Tests that an unordered DataChannel sends unordered data after any DATA
+// message is received.
+TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) {
+ SetChannelReady();
+ webrtc::InternalDataChannelInit init;
+ init.id = 1;
+ init.ordered = false;
+ rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+ &provider_, cricket::DCT_SCTP, "test1", init);
+
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
+
+ // Emulates receiving a DATA message.
+ cricket::ReceiveDataParams params;
+ params.ssrc = init.id;
+ params.type = cricket::DMT_TEXT;
+ webrtc::DataBuffer buffer("data");
+ dc->OnDataReceived(NULL, params, buffer.data);
+
+ // Sends a message and verifies it's unordered.
+ ASSERT_TRUE(dc->Send(buffer));
+ EXPECT_FALSE(provider_.last_send_data_params().ordered);
+}
+
+// Tests that the channel can't open until it's successfully sent the OPEN
+// message.
+TEST_F(SctpDataChannelTest, OpenWaitsForOpenMesssage) {
+ webrtc::DataBuffer buffer("foo");
+
+ provider_.set_send_blocked(true);
+ SetChannelReady();
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting,
+ webrtc_data_channel_->state());
+ provider_.set_send_blocked(false);
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen,
+ webrtc_data_channel_->state(), 1000);
+ EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
+}
+
+// Tests that close first makes sure all queued data gets sent.
+TEST_F(SctpDataChannelTest, QueuedCloseFlushes) {
+ webrtc::DataBuffer buffer("foo");
+
+ provider_.set_send_blocked(true);
+ SetChannelReady();
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting,
+ webrtc_data_channel_->state());
+ provider_.set_send_blocked(false);
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen,
+ webrtc_data_channel_->state(), 1000);
+ provider_.set_send_blocked(true);
+ webrtc_data_channel_->Send(buffer);
+ webrtc_data_channel_->Close();
+ provider_.set_send_blocked(false);
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed,
+ webrtc_data_channel_->state(), 1000);
+ EXPECT_EQ(cricket::DMT_TEXT, provider_.last_send_data_params().type);
+}
+
+// Tests that messages are sent with the right ssrc.
+TEST_F(SctpDataChannelTest, SendDataSsrc) {
+ webrtc_data_channel_->SetSctpSid(1);
+ SetChannelReady();
+ webrtc::DataBuffer buffer("data");
+ EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+ EXPECT_EQ(1U, provider_.last_send_data_params().ssrc);
+}
+
+// Tests that the incoming messages with wrong ssrcs are rejected.
+TEST_F(SctpDataChannelTest, ReceiveDataWithInvalidSsrc) {
+ webrtc_data_channel_->SetSctpSid(1);
+ SetChannelReady();
+
+ AddObserver();
+
+ cricket::ReceiveDataParams params;
+ params.ssrc = 0;
+ webrtc::DataBuffer buffer("abcd");
+ webrtc_data_channel_->OnDataReceived(NULL, params, buffer.data);
+
+ EXPECT_EQ(0U, observer_->messages_received());
+}
+
+// Tests that the incoming messages with right ssrcs are acceted.
+TEST_F(SctpDataChannelTest, ReceiveDataWithValidSsrc) {
+ webrtc_data_channel_->SetSctpSid(1);
+ SetChannelReady();
+
+ AddObserver();
+
+ cricket::ReceiveDataParams params;
+ params.ssrc = 1;
+ webrtc::DataBuffer buffer("abcd");
+
+ webrtc_data_channel_->OnDataReceived(NULL, params, buffer.data);
+ EXPECT_EQ(1U, observer_->messages_received());
+}
+
+// Tests that no CONTROL message is sent if the datachannel is negotiated and
+// not created from an OPEN message.
+TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) {
+ webrtc::InternalDataChannelInit config;
+ config.id = 1;
+ config.negotiated = true;
+ config.open_handshake_role = webrtc::InternalDataChannelInit::kNone;
+
+ SetChannelReady();
+ rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+ &provider_, cricket::DCT_SCTP, "test1", config);
+
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
+ EXPECT_EQ(0U, provider_.last_send_data_params().ssrc);
+}
+
+// Tests that OPEN_ACK message is sent if the datachannel is created from an
+// OPEN message.
+TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) {
+ webrtc::InternalDataChannelInit config;
+ config.id = 1;
+ config.negotiated = true;
+ config.open_handshake_role = webrtc::InternalDataChannelInit::kAcker;
+
+ SetChannelReady();
+ rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+ &provider_, cricket::DCT_SCTP, "test1", config);
+
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
+
+ EXPECT_EQ(static_cast<unsigned int>(config.id),
+ provider_.last_send_data_params().ssrc);
+ EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
+}
+
+// Tests the OPEN_ACK role assigned by InternalDataChannelInit.
+TEST_F(SctpDataChannelTest, OpenAckRoleInitialization) {
+ webrtc::InternalDataChannelInit init;
+ EXPECT_EQ(webrtc::InternalDataChannelInit::kOpener, init.open_handshake_role);
+ EXPECT_FALSE(init.negotiated);
+
+ webrtc::DataChannelInit base;
+ base.negotiated = true;
+ webrtc::InternalDataChannelInit init2(base);
+ EXPECT_EQ(webrtc::InternalDataChannelInit::kNone, init2.open_handshake_role);
+}
+
+// Tests that the DataChannel is closed if the sending buffer is full.
+TEST_F(SctpDataChannelTest, ClosedWhenSendBufferFull) {
+ SetChannelReady();
+
+ rtc::Buffer buffer(1024);
+ memset(buffer.data(), 0, buffer.size());
+
+ webrtc::DataBuffer packet(buffer, true);
+ provider_.set_send_blocked(true);
+
+ for (size_t i = 0; i < 16 * 1024 + 1; ++i) {
+ EXPECT_TRUE(webrtc_data_channel_->Send(packet));
+ }
+
+ EXPECT_TRUE(
+ webrtc::DataChannelInterface::kClosed == webrtc_data_channel_->state() ||
+ webrtc::DataChannelInterface::kClosing == webrtc_data_channel_->state());
+}
+
+// Tests that the DataChannel is closed on transport errors.
+TEST_F(SctpDataChannelTest, ClosedOnTransportError) {
+ SetChannelReady();
+ webrtc::DataBuffer buffer("abcd");
+ provider_.set_transport_error();
+
+ EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+
+ EXPECT_EQ(webrtc::DataChannelInterface::kClosed,
+ webrtc_data_channel_->state());
+}
+
+// Tests that a already closed DataChannel does not fire onStateChange again.
+TEST_F(SctpDataChannelTest, ClosedDataChannelDoesNotFireOnStateChange) {
+ AddObserver();
+ webrtc_data_channel_->Close();
+ // OnStateChange called for kClosing and kClosed.
+ EXPECT_EQ(2U, observer_->on_state_change_count());
+
+ observer_->ResetOnStateChangeCount();
+ webrtc_data_channel_->RemotePeerRequestClose();
+ EXPECT_EQ(0U, observer_->on_state_change_count());
+}
+
+// Tests that RemotePeerRequestClose closes the local DataChannel.
+TEST_F(SctpDataChannelTest, RemotePeerRequestClose) {
+ AddObserver();
+ webrtc_data_channel_->RemotePeerRequestClose();
+
+ // OnStateChange called for kClosing and kClosed.
+ EXPECT_EQ(2U, observer_->on_state_change_count());
+ EXPECT_EQ(webrtc::DataChannelInterface::kClosed,
+ webrtc_data_channel_->state());
+}
+
+// Tests that the DataChannel is closed if the received buffer is full.
+TEST_F(SctpDataChannelTest, ClosedWhenReceivedBufferFull) {
+ SetChannelReady();
+ rtc::Buffer buffer(1024);
+ memset(buffer.data(), 0, buffer.size());
+
+ cricket::ReceiveDataParams params;
+ params.ssrc = 0;
+
+ // Receiving data without having an observer will overflow the buffer.
+ for (size_t i = 0; i < 16 * 1024 + 1; ++i) {
+ webrtc_data_channel_->OnDataReceived(NULL, params, buffer);
+ }
+ EXPECT_EQ(webrtc::DataChannelInterface::kClosed,
+ webrtc_data_channel_->state());
+}
+
+// Tests that sending empty data returns no error and keeps the channel open.
+TEST_F(SctpDataChannelTest, SendEmptyData) {
+ webrtc_data_channel_->SetSctpSid(1);
+ SetChannelReady();
+ EXPECT_EQ(webrtc::DataChannelInterface::kOpen,
+ webrtc_data_channel_->state());
+
+ webrtc::DataBuffer buffer("");
+ EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+ EXPECT_EQ(webrtc::DataChannelInterface::kOpen,
+ webrtc_data_channel_->state());
+}
+
+// Tests that a channel can be closed without being opened or assigned an sid.
+TEST_F(SctpDataChannelTest, NeverOpened) {
+ provider_.set_transport_available(true);
+ webrtc_data_channel_->OnTransportChannelCreated();
+ webrtc_data_channel_->Close();
+}
+
+class SctpSidAllocatorTest : public testing::Test {
+ protected:
+ SctpSidAllocator allocator_;
+};
+
+// Verifies that an even SCTP id is allocated for SSL_CLIENT and an odd id for
+// SSL_SERVER.
+TEST_F(SctpSidAllocatorTest, SctpIdAllocationBasedOnRole) {
+ int id;
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &id));
+ EXPECT_EQ(1, id);
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &id));
+ EXPECT_EQ(0, id);
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &id));
+ EXPECT_EQ(3, id);
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &id));
+ EXPECT_EQ(2, id);
+}
+
+// Verifies that SCTP ids of existing DataChannels are not reused.
+TEST_F(SctpSidAllocatorTest, SctpIdAllocationNoReuse) {
+ int old_id = 1;
+ EXPECT_TRUE(allocator_.ReserveSid(old_id));
+
+ int new_id;
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &new_id));
+ EXPECT_NE(old_id, new_id);
+
+ old_id = 0;
+ EXPECT_TRUE(allocator_.ReserveSid(old_id));
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &new_id));
+ EXPECT_NE(old_id, new_id);
+}
+
+// Verifies that SCTP ids of removed DataChannels can be reused.
+TEST_F(SctpSidAllocatorTest, SctpIdReusedForRemovedDataChannel) {
+ int odd_id = 1;
+ int even_id = 0;
+ EXPECT_TRUE(allocator_.ReserveSid(odd_id));
+ EXPECT_TRUE(allocator_.ReserveSid(even_id));
+
+ int allocated_id = -1;
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+ EXPECT_EQ(odd_id + 2, allocated_id);
+
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+ EXPECT_EQ(even_id + 2, allocated_id);
+
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+ EXPECT_EQ(odd_id + 4, allocated_id);
+
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+ EXPECT_EQ(even_id + 4, allocated_id);
+
+ allocator_.ReleaseSid(odd_id);
+ allocator_.ReleaseSid(even_id);
+
+ // Verifies that removed ids are reused.
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+ EXPECT_EQ(odd_id, allocated_id);
+
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+ EXPECT_EQ(even_id, allocated_id);
+
+ // Verifies that used higher ids are not reused.
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+ EXPECT_EQ(odd_id + 6, allocated_id);
+
+ EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+ EXPECT_EQ(even_id + 6, allocated_id);
+}
diff --git a/talk/app/webrtc/datachannelinterface.h b/talk/app/webrtc/datachannelinterface.h
new file mode 100644
index 0000000000..d70972f05a
--- /dev/null
+++ b/talk/app/webrtc/datachannelinterface.h
@@ -0,0 +1,159 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains interfaces for DataChannels
+// http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcdatachannel
+
+#ifndef TALK_APP_WEBRTC_DATACHANNELINTERFACE_H_
+#define TALK_APP_WEBRTC_DATACHANNELINTERFACE_H_
+
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/refcount.h"
+
+
+namespace webrtc {
+
+struct DataChannelInit {
+ DataChannelInit()
+ : reliable(false),
+ ordered(true),
+ maxRetransmitTime(-1),
+ maxRetransmits(-1),
+ negotiated(false),
+ id(-1) {
+ }
+
+ bool reliable; // Deprecated.
+ bool ordered; // True if ordered delivery is required.
+ int maxRetransmitTime; // The max period of time in milliseconds in which
+ // retransmissions will be sent. After this time, no
+ // more retransmissions will be sent. -1 if unset.
+ int maxRetransmits; // The max number of retransmissions. -1 if unset.
+ std::string protocol; // This is set by the application and opaque to the
+ // WebRTC implementation.
+ bool negotiated; // True if the channel has been externally negotiated
+ // and we do not send an in-band signalling in the
+ // form of an "open" message.
+ int id; // The stream id, or SID, for SCTP data channels. -1
+ // if unset.
+};
+
+struct DataBuffer {
+ DataBuffer(const rtc::Buffer& data, bool binary)
+ : data(data),
+ binary(binary) {
+ }
+ // For convenience for unit tests.
+ explicit DataBuffer(const std::string& text)
+ : data(text.data(), text.length()),
+ binary(false) {
+ }
+ size_t size() const { return data.size(); }
+
+ rtc::Buffer data;
+ // Indicates if the received data contains UTF-8 or binary data.
+ // Note that the upper layers are left to verify the UTF-8 encoding.
+ // TODO(jiayl): prefer to use an enum instead of a bool.
+ bool binary;
+};
+
+class DataChannelObserver {
+ public:
+ // The data channel state have changed.
+ virtual void OnStateChange() = 0;
+ // A data buffer was successfully received.
+ virtual void OnMessage(const DataBuffer& buffer) = 0;
+ // The data channel's buffered_amount has changed.
+ virtual void OnBufferedAmountChange(uint64_t previous_amount){};
+
+ protected:
+ virtual ~DataChannelObserver() {}
+};
+
+class DataChannelInterface : public rtc::RefCountInterface {
+ public:
+ // Keep in sync with DataChannel.java:State and
+ // RTCDataChannel.h:RTCDataChannelState.
+ enum DataState {
+ kConnecting,
+ kOpen, // The DataChannel is ready to send data.
+ kClosing,
+ kClosed
+ };
+
+ static const char* DataStateString(DataState state) {
+ switch (state) {
+ case kConnecting:
+ return "connecting";
+ case kOpen:
+ return "open";
+ case kClosing:
+ return "closing";
+ case kClosed:
+ return "closed";
+ }
+ RTC_CHECK(false) << "Unknown DataChannel state: " << state;
+ return "";
+ }
+
+ virtual void RegisterObserver(DataChannelObserver* observer) = 0;
+ virtual void UnregisterObserver() = 0;
+ // The label attribute represents a label that can be used to distinguish this
+ // DataChannel object from other DataChannel objects.
+ virtual std::string label() const = 0;
+ virtual bool reliable() const = 0;
+
+ // TODO(tommyw): Remove these dummy implementations when all classes have
+ // implemented these APIs. They should all just return the values the
+ // DataChannel was created with.
+ virtual bool ordered() const { return false; }
+ virtual uint16_t maxRetransmitTime() const { return 0; }
+ virtual uint16_t maxRetransmits() const { return 0; }
+ virtual std::string protocol() const { return std::string(); }
+ virtual bool negotiated() const { return false; }
+
+ virtual int id() const = 0;
+ virtual DataState state() const = 0;
+ // The buffered_amount returns the number of bytes of application data
+ // (UTF-8 text and binary data) that have been queued using SendBuffer but
+ // have not yet been transmitted to the network.
+ virtual uint64_t buffered_amount() const = 0;
+ virtual void Close() = 0;
+ // Sends |data| to the remote peer.
+ virtual bool Send(const DataBuffer& buffer) = 0;
+
+ protected:
+ virtual ~DataChannelInterface() {}
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_DATACHANNELINTERFACE_H_
diff --git a/talk/app/webrtc/dtlsidentitystore.cc b/talk/app/webrtc/dtlsidentitystore.cc
new file mode 100644
index 0000000000..27587796bc
--- /dev/null
+++ b/talk/app/webrtc/dtlsidentitystore.cc
@@ -0,0 +1,248 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/dtlsidentitystore.h"
+
+#include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
+#include "webrtc/base/logging.h"
+
+using webrtc::DtlsIdentityRequestObserver;
+
+namespace webrtc {
+
+// Passed to SSLIdentity::Generate, "WebRTC". Used for the certificates'
+// subject and issuer name.
+const char kIdentityName[] = "WebRTC";
+
+namespace {
+
+enum {
+ MSG_DESTROY,
+ MSG_GENERATE_IDENTITY,
+ MSG_GENERATE_IDENTITY_RESULT
+};
+
+} // namespace
+
+// This class runs on the worker thread to generate the identity. It's necessary
+// to separate this class from DtlsIdentityStore so that it can live on the
+// worker thread after DtlsIdentityStore is destroyed.
+class DtlsIdentityStoreImpl::WorkerTask : public sigslot::has_slots<>,
+ public rtc::MessageHandler {
+ public:
+ WorkerTask(DtlsIdentityStoreImpl* store, rtc::KeyType key_type)
+ : signaling_thread_(rtc::Thread::Current()),
+ store_(store),
+ key_type_(key_type) {
+ store_->SignalDestroyed.connect(this, &WorkerTask::OnStoreDestroyed);
+ }
+
+ virtual ~WorkerTask() { RTC_DCHECK(signaling_thread_->IsCurrent()); }
+
+ private:
+ void GenerateIdentity_w() {
+ LOG(LS_INFO) << "Generating identity, using keytype " << key_type_;
+ rtc::scoped_ptr<rtc::SSLIdentity> identity(
+ rtc::SSLIdentity::Generate(kIdentityName, key_type_));
+
+ // Posting to |this| avoids touching |store_| on threads other than
+ // |signaling_thread_| and thus avoids having to use locks.
+ IdentityResultMessageData* msg = new IdentityResultMessageData(
+ new IdentityResult(key_type_, identity.Pass()));
+ signaling_thread_->Post(this, MSG_GENERATE_IDENTITY_RESULT, msg);
+ }
+
+ void OnMessage(rtc::Message* msg) override {
+ switch (msg->message_id) {
+ case MSG_GENERATE_IDENTITY:
+ // This message always runs on the worker thread.
+ GenerateIdentity_w();
+
+ // Must delete |this|, owned by msg->pdata, on the signaling thread to
+ // avoid races on disconnecting the signal.
+ signaling_thread_->Post(this, MSG_DESTROY, msg->pdata);
+ break;
+ case MSG_GENERATE_IDENTITY_RESULT:
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ {
+ rtc::scoped_ptr<IdentityResultMessageData> pdata(
+ static_cast<IdentityResultMessageData*>(msg->pdata));
+ if (store_) {
+ store_->OnIdentityGenerated(pdata->data()->key_type_,
+ pdata->data()->identity_.Pass());
+ }
+ }
+ break;
+ case MSG_DESTROY:
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ delete msg->pdata;
+ // |this| has now been deleted. Don't touch member variables.
+ break;
+ default:
+ RTC_CHECK(false) << "Unexpected message type";
+ }
+ }
+
+ void OnStoreDestroyed() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ store_ = nullptr;
+ }
+
+ rtc::Thread* const signaling_thread_;
+ DtlsIdentityStoreImpl* store_; // Only touched on |signaling_thread_|.
+ const rtc::KeyType key_type_;
+};
+
+DtlsIdentityStoreImpl::DtlsIdentityStoreImpl(rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread)
+ : signaling_thread_(signaling_thread),
+ worker_thread_(worker_thread),
+ request_info_() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ // Preemptively generate identities unless the worker thread and signaling
+ // thread are the same (only do preemptive work in the background).
+ if (worker_thread_ != signaling_thread_) {
+ // Only necessary for RSA.
+ GenerateIdentity(rtc::KT_RSA, nullptr);
+ }
+}
+
+DtlsIdentityStoreImpl::~DtlsIdentityStoreImpl() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ SignalDestroyed();
+}
+
+void DtlsIdentityStoreImpl::RequestIdentity(
+ rtc::KeyType key_type,
+ const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>& observer) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ RTC_DCHECK(observer);
+
+ GenerateIdentity(key_type, observer);
+}
+
+void DtlsIdentityStoreImpl::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ switch (msg->message_id) {
+ case MSG_GENERATE_IDENTITY_RESULT: {
+ rtc::scoped_ptr<IdentityResultMessageData> pdata(
+ static_cast<IdentityResultMessageData*>(msg->pdata));
+ OnIdentityGenerated(pdata->data()->key_type_,
+ pdata->data()->identity_.Pass());
+ break;
+ }
+ }
+}
+
+bool DtlsIdentityStoreImpl::HasFreeIdentityForTesting(
+ rtc::KeyType key_type) const {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ return request_info_[key_type].free_identity_.get() != nullptr;
+}
+
+void DtlsIdentityStoreImpl::GenerateIdentity(
+ rtc::KeyType key_type,
+ const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>& observer) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+
+ // Enqueue observer to be informed when generation of |key_type| is completed.
+ if (observer.get()) {
+ request_info_[key_type].request_observers_.push(observer);
+
+ // Already have a free identity generated?
+ if (request_info_[key_type].free_identity_.get()) {
+ // Return identity async - post even though we are on |signaling_thread_|.
+ LOG(LS_VERBOSE) << "Using a free DTLS identity.";
+ ++request_info_[key_type].gen_in_progress_counts_;
+ IdentityResultMessageData* msg = new IdentityResultMessageData(
+ new IdentityResult(key_type,
+ request_info_[key_type].free_identity_.Pass()));
+ signaling_thread_->Post(this, MSG_GENERATE_IDENTITY_RESULT, msg);
+ return;
+ }
+
+ // Free identity in the process of being generated?
+ if (request_info_[key_type].gen_in_progress_counts_ ==
+ request_info_[key_type].request_observers_.size()) {
+ // No need to do anything, the free identity will be returned to the
+ // observer in a MSG_GENERATE_IDENTITY_RESULT.
+ return;
+ }
+ }
+
+ // Enqueue/Post a worker task to do the generation.
+ ++request_info_[key_type].gen_in_progress_counts_;
+ WorkerTask* task = new WorkerTask(this, key_type); // Post 1 task/request.
+ // The WorkerTask is owned by the message data to make sure it will not be
+ // leaked even if the task does not get run.
+ WorkerTaskMessageData* msg = new WorkerTaskMessageData(task);
+ worker_thread_->Post(task, MSG_GENERATE_IDENTITY, msg);
+}
+
+void DtlsIdentityStoreImpl::OnIdentityGenerated(
+ rtc::KeyType key_type, rtc::scoped_ptr<rtc::SSLIdentity> identity) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+
+ RTC_DCHECK(request_info_[key_type].gen_in_progress_counts_);
+ --request_info_[key_type].gen_in_progress_counts_;
+
+ rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver> observer;
+ if (!request_info_[key_type].request_observers_.empty()) {
+ observer = request_info_[key_type].request_observers_.front();
+ request_info_[key_type].request_observers_.pop();
+ }
+
+ if (observer.get() == nullptr) {
+ // No observer - store result in |free_identities_|.
+ RTC_DCHECK(!request_info_[key_type].free_identity_.get());
+ request_info_[key_type].free_identity_.swap(identity);
+ if (request_info_[key_type].free_identity_.get())
+ LOG(LS_VERBOSE) << "A free DTLS identity was saved.";
+ else
+ LOG(LS_WARNING) << "Failed to generate DTLS identity (preemptively).";
+ } else {
+ // Return the result to the observer.
+ if (identity.get()) {
+ LOG(LS_VERBOSE) << "A DTLS identity is returned to an observer.";
+ observer->OnSuccess(identity.Pass());
+ } else {
+ LOG(LS_WARNING) << "Failed to generate DTLS identity.";
+ observer->OnFailure(0);
+ }
+
+ // Preemptively generate another identity of the same type?
+ if (worker_thread_ != signaling_thread_ && // Only do in background thread.
+ key_type == rtc::KT_RSA && // Only necessary for RSA.
+ !request_info_[key_type].free_identity_.get() &&
+ request_info_[key_type].request_observers_.size() <=
+ request_info_[key_type].gen_in_progress_counts_) {
+ GenerateIdentity(key_type, nullptr);
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/dtlsidentitystore.h b/talk/app/webrtc/dtlsidentitystore.h
new file mode 100644
index 0000000000..a0eef98e1b
--- /dev/null
+++ b/talk/app/webrtc/dtlsidentitystore.h
@@ -0,0 +1,164 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_DTLSIDENTITYSTORE_H_
+#define TALK_APP_WEBRTC_DTLSIDENTITYSTORE_H_
+
+#include <queue>
+#include <string>
+
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+// Passed to SSLIdentity::Generate.
+extern const char kIdentityName[];
+
+class SSLIdentity;
+class Thread;
+
+// Used to receive callbacks of DTLS identity requests.
+class DtlsIdentityRequestObserver : public rtc::RefCountInterface {
+ public:
+ virtual void OnFailure(int error) = 0;
+ // TODO(hbos): Unify the OnSuccess method once Chrome code is updated.
+ virtual void OnSuccess(const std::string& der_cert,
+ const std::string& der_private_key) = 0;
+ // |identity| is a scoped_ptr because rtc::SSLIdentity is not copyable and the
+ // client has to get the ownership of the object to make use of it.
+ virtual void OnSuccess(rtc::scoped_ptr<rtc::SSLIdentity> identity) = 0;
+
+ protected:
+ virtual ~DtlsIdentityRequestObserver() {}
+};
+
+// This interface defines an in-memory DTLS identity store, which generates DTLS
+// identities.
+// APIs calls must be made on the signaling thread and the callbacks are also
+// called on the signaling thread.
+class DtlsIdentityStoreInterface {
+ public:
+ virtual ~DtlsIdentityStoreInterface() { }
+
+ // The |observer| will be called when the requested identity is ready, or when
+ // identity generation fails.
+ // TODO(torbjorng,hbos): The following RequestIdentity is about to be removed,
+ // see below todo.
+ virtual void RequestIdentity(
+ rtc::KeyType key_type,
+ const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer) {
+ // Add default parameterization.
+ RequestIdentity(rtc::KeyParams(key_type), observer);
+ }
+ // TODO(torbjorng,hbos): Parameterized key types! The following
+ // RequestIdentity should replace the old one that takes rtc::KeyType. When
+ // the new one is implemented by Chromium and WebRTC the old one should be
+ // removed. crbug.com/544902, webrtc:5092.
+ virtual void RequestIdentity(
+ rtc::KeyParams key_params,
+ const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer) {
+ // Drop parameterization.
+ RequestIdentity(key_params.type(), observer);
+ }
+};
+
+// The WebRTC default implementation of DtlsIdentityStoreInterface.
+// Identity generation is performed on the worker thread.
+class DtlsIdentityStoreImpl : public DtlsIdentityStoreInterface,
+ public rtc::MessageHandler {
+ public:
+ // This will start to preemptively generating an RSA identity in the
+ // background if the worker thread is not the same as the signaling thread.
+ DtlsIdentityStoreImpl(rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread);
+ ~DtlsIdentityStoreImpl() override;
+
+ // DtlsIdentityStoreInterface override;
+ void RequestIdentity(
+ rtc::KeyType key_type,
+ const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer) override;
+
+ // rtc::MessageHandler override;
+ void OnMessage(rtc::Message* msg) override;
+
+ // Returns true if there is a free RSA identity, used for unit tests.
+ bool HasFreeIdentityForTesting(rtc::KeyType key_type) const;
+
+ private:
+ void GenerateIdentity(
+ rtc::KeyType key_type,
+ const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer);
+ void OnIdentityGenerated(rtc::KeyType key_type,
+ rtc::scoped_ptr<rtc::SSLIdentity> identity);
+
+ class WorkerTask;
+ typedef rtc::ScopedMessageData<DtlsIdentityStoreImpl::WorkerTask>
+ WorkerTaskMessageData;
+
+ // A key type-identity pair.
+ struct IdentityResult {
+ IdentityResult(rtc::KeyType key_type,
+ rtc::scoped_ptr<rtc::SSLIdentity> identity)
+ : key_type_(key_type), identity_(identity.Pass()) {}
+
+ rtc::KeyType key_type_;
+ rtc::scoped_ptr<rtc::SSLIdentity> identity_;
+ };
+
+ typedef rtc::ScopedMessageData<IdentityResult> IdentityResultMessageData;
+
+ sigslot::signal0<> SignalDestroyed;
+
+ rtc::Thread* const signaling_thread_;
+ // TODO(hbos): RSA generation is slow and would be VERY slow if we switch over
+ // to 2048, DtlsIdentityStore should use a new thread and not the "general
+ // purpose" worker thread.
+ rtc::Thread* const worker_thread_;
+
+ struct RequestInfo {
+ RequestInfo()
+ : request_observers_(), gen_in_progress_counts_(0), free_identity_() {}
+
+ std::queue<rtc::scoped_refptr<DtlsIdentityRequestObserver>>
+ request_observers_;
+ size_t gen_in_progress_counts_;
+ rtc::scoped_ptr<rtc::SSLIdentity> free_identity_;
+ };
+
+ // One RequestInfo per KeyType. Only touch on the |signaling_thread_|.
+ RequestInfo request_info_[rtc::KT_LAST];
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_DTLSIDENTITYSTORE_H_
diff --git a/talk/app/webrtc/dtlsidentitystore_unittest.cc b/talk/app/webrtc/dtlsidentitystore_unittest.cc
new file mode 100644
index 0000000000..e9242216f9
--- /dev/null
+++ b/talk/app/webrtc/dtlsidentitystore_unittest.cc
@@ -0,0 +1,152 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/dtlsidentitystore.h"
+
+#include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/ssladapter.h"
+
+using webrtc::DtlsIdentityStoreImpl;
+
+static const int kTimeoutMs = 10000;
+
+class MockDtlsIdentityRequestObserver :
+ public webrtc::DtlsIdentityRequestObserver {
+ public:
+ MockDtlsIdentityRequestObserver()
+ : call_back_called_(false), last_request_success_(false) {}
+ void OnFailure(int error) override {
+ EXPECT_FALSE(call_back_called_);
+ call_back_called_ = true;
+ last_request_success_ = false;
+ }
+ void OnSuccess(const std::string& der_cert,
+ const std::string& der_private_key) override {
+ LOG(LS_WARNING) << "The string version of OnSuccess is called unexpectedly";
+ EXPECT_TRUE(false);
+ }
+ void OnSuccess(rtc::scoped_ptr<rtc::SSLIdentity> identity) override {
+ EXPECT_FALSE(call_back_called_);
+ call_back_called_ = true;
+ last_request_success_ = true;
+ }
+
+ void Reset() {
+ call_back_called_ = false;
+ last_request_success_ = false;
+ }
+
+ bool LastRequestSucceeded() const {
+ return call_back_called_ && last_request_success_;
+ }
+
+ bool call_back_called() const {
+ return call_back_called_;
+ }
+
+ private:
+ bool call_back_called_;
+ bool last_request_success_;
+};
+
+class DtlsIdentityStoreTest : public testing::Test {
+ protected:
+ DtlsIdentityStoreTest()
+ : worker_thread_(new rtc::Thread()),
+ store_(new DtlsIdentityStoreImpl(rtc::Thread::Current(),
+ worker_thread_.get())),
+ observer_(
+ new rtc::RefCountedObject<MockDtlsIdentityRequestObserver>()) {
+ RTC_CHECK(worker_thread_->Start());
+ }
+ ~DtlsIdentityStoreTest() {}
+
+ static void SetUpTestCase() {
+ rtc::InitializeSSL();
+ }
+ static void TearDownTestCase() {
+ rtc::CleanupSSL();
+ }
+
+ rtc::scoped_ptr<rtc::Thread> worker_thread_;
+ rtc::scoped_ptr<DtlsIdentityStoreImpl> store_;
+ rtc::scoped_refptr<MockDtlsIdentityRequestObserver> observer_;
+};
+
+TEST_F(DtlsIdentityStoreTest, RequestIdentitySuccessRSA) {
+ EXPECT_TRUE_WAIT(store_->HasFreeIdentityForTesting(rtc::KT_RSA), kTimeoutMs);
+
+ store_->RequestIdentity(rtc::KT_RSA, observer_.get());
+ EXPECT_TRUE_WAIT(observer_->LastRequestSucceeded(), kTimeoutMs);
+
+ EXPECT_TRUE_WAIT(store_->HasFreeIdentityForTesting(rtc::KT_RSA), kTimeoutMs);
+
+ observer_->Reset();
+
+ // Verifies that the callback is async when a free identity is ready.
+ store_->RequestIdentity(rtc::KT_RSA, observer_.get());
+ EXPECT_FALSE(observer_->call_back_called());
+ EXPECT_TRUE_WAIT(observer_->LastRequestSucceeded(), kTimeoutMs);
+}
+
+TEST_F(DtlsIdentityStoreTest, RequestIdentitySuccessECDSA) {
+ // Since store currently does not preemptively generate free ECDSA identities
+ // we do not invoke HasFreeIdentityForTesting between requests.
+
+ store_->RequestIdentity(rtc::KT_ECDSA, observer_.get());
+ EXPECT_TRUE_WAIT(observer_->LastRequestSucceeded(), kTimeoutMs);
+
+ observer_->Reset();
+
+ // Verifies that the callback is async when a free identity is ready.
+ store_->RequestIdentity(rtc::KT_ECDSA, observer_.get());
+ EXPECT_FALSE(observer_->call_back_called());
+ EXPECT_TRUE_WAIT(observer_->LastRequestSucceeded(), kTimeoutMs);
+}
+
+TEST_F(DtlsIdentityStoreTest, DeleteStoreEarlyNoCrashRSA) {
+ EXPECT_FALSE(store_->HasFreeIdentityForTesting(rtc::KT_RSA));
+
+ store_->RequestIdentity(rtc::KT_RSA, observer_.get());
+ store_.reset();
+
+ worker_thread_->Stop();
+ EXPECT_FALSE(observer_->call_back_called());
+}
+
+TEST_F(DtlsIdentityStoreTest, DeleteStoreEarlyNoCrashECDSA) {
+ EXPECT_FALSE(store_->HasFreeIdentityForTesting(rtc::KT_ECDSA));
+
+ store_->RequestIdentity(rtc::KT_ECDSA, observer_.get());
+ store_.reset();
+
+ worker_thread_->Stop();
+ EXPECT_FALSE(observer_->call_back_called());
+}
+
diff --git a/talk/app/webrtc/dtmfsender.cc b/talk/app/webrtc/dtmfsender.cc
new file mode 100644
index 0000000000..3b311df320
--- /dev/null
+++ b/talk/app/webrtc/dtmfsender.cc
@@ -0,0 +1,257 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/dtmfsender.h"
+
+#include <ctype.h>
+
+#include <string>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+enum {
+ MSG_DO_INSERT_DTMF = 0,
+};
+
+// RFC4733
+// +-------+--------+------+---------+
+// | Event | Code | Type | Volume? |
+// +-------+--------+------+---------+
+// | 0--9 | 0--9 | tone | yes |
+// | * | 10 | tone | yes |
+// | # | 11 | tone | yes |
+// | A--D | 12--15 | tone | yes |
+// +-------+--------+------+---------+
+// The "," is a special event defined by the WebRTC spec. It means to delay for
+// 2 seconds before processing the next tone. We use -1 as its code.
+static const int kDtmfCodeTwoSecondDelay = -1;
+static const int kDtmfTwoSecondInMs = 2000;
+static const char kDtmfValidTones[] = ",0123456789*#ABCDabcd";
+static const char kDtmfTonesTable[] = ",0123456789*#ABCD";
+// The duration cannot be more than 6000ms or less than 70ms. The gap between
+// tones must be at least 50 ms.
+static const int kDtmfDefaultDurationMs = 100;
+static const int kDtmfMinDurationMs = 70;
+static const int kDtmfMaxDurationMs = 6000;
+static const int kDtmfDefaultGapMs = 50;
+static const int kDtmfMinGapMs = 50;
+
+// Get DTMF code from the DTMF event character.
+bool GetDtmfCode(char tone, int* code) {
+ // Convert a-d to A-D.
+ char event = toupper(tone);
+ const char* p = strchr(kDtmfTonesTable, event);
+ if (!p) {
+ return false;
+ }
+ *code = p - kDtmfTonesTable - 1;
+ return true;
+}
+
+rtc::scoped_refptr<DtmfSender> DtmfSender::Create(
+ AudioTrackInterface* track,
+ rtc::Thread* signaling_thread,
+ DtmfProviderInterface* provider) {
+ if (!track || !signaling_thread) {
+ return NULL;
+ }
+ rtc::scoped_refptr<DtmfSender> dtmf_sender(
+ new rtc::RefCountedObject<DtmfSender>(track, signaling_thread,
+ provider));
+ return dtmf_sender;
+}
+
+DtmfSender::DtmfSender(AudioTrackInterface* track,
+ rtc::Thread* signaling_thread,
+ DtmfProviderInterface* provider)
+ : track_(track),
+ observer_(NULL),
+ signaling_thread_(signaling_thread),
+ provider_(provider),
+ duration_(kDtmfDefaultDurationMs),
+ inter_tone_gap_(kDtmfDefaultGapMs) {
+ ASSERT(track_ != NULL);
+ ASSERT(signaling_thread_ != NULL);
+ if (provider_) {
+ ASSERT(provider_->GetOnDestroyedSignal() != NULL);
+ provider_->GetOnDestroyedSignal()->connect(
+ this, &DtmfSender::OnProviderDestroyed);
+ }
+}
+
+DtmfSender::~DtmfSender() {
+ if (provider_) {
+ ASSERT(provider_->GetOnDestroyedSignal() != NULL);
+ provider_->GetOnDestroyedSignal()->disconnect(this);
+ }
+ StopSending();
+}
+
+void DtmfSender::RegisterObserver(DtmfSenderObserverInterface* observer) {
+ observer_ = observer;
+}
+
+void DtmfSender::UnregisterObserver() {
+ observer_ = NULL;
+}
+
+bool DtmfSender::CanInsertDtmf() {
+ ASSERT(signaling_thread_->IsCurrent());
+ if (!provider_) {
+ return false;
+ }
+ return provider_->CanInsertDtmf(track_->id());
+}
+
+bool DtmfSender::InsertDtmf(const std::string& tones, int duration,
+ int inter_tone_gap) {
+ ASSERT(signaling_thread_->IsCurrent());
+
+ if (duration > kDtmfMaxDurationMs ||
+ duration < kDtmfMinDurationMs ||
+ inter_tone_gap < kDtmfMinGapMs) {
+ LOG(LS_ERROR) << "InsertDtmf is called with invalid duration or tones gap. "
+ << "The duration cannot be more than " << kDtmfMaxDurationMs
+ << "ms or less than " << kDtmfMinDurationMs << "ms. "
+ << "The gap between tones must be at least " << kDtmfMinGapMs << "ms.";
+ return false;
+ }
+
+ if (!CanInsertDtmf()) {
+ LOG(LS_ERROR)
+ << "InsertDtmf is called on DtmfSender that can't send DTMF.";
+ return false;
+ }
+
+ tones_ = tones;
+ duration_ = duration;
+ inter_tone_gap_ = inter_tone_gap;
+ // Clear the previous queue.
+ signaling_thread_->Clear(this, MSG_DO_INSERT_DTMF);
+ // Kick off a new DTMF task queue.
+ signaling_thread_->Post(this, MSG_DO_INSERT_DTMF);
+ return true;
+}
+
+const AudioTrackInterface* DtmfSender::track() const {
+ return track_;
+}
+
+std::string DtmfSender::tones() const {
+ return tones_;
+}
+
+int DtmfSender::duration() const {
+ return duration_;
+}
+
+int DtmfSender::inter_tone_gap() const {
+ return inter_tone_gap_;
+}
+
+void DtmfSender::OnMessage(rtc::Message* msg) {
+ switch (msg->message_id) {
+ case MSG_DO_INSERT_DTMF: {
+ DoInsertDtmf();
+ break;
+ }
+ default: {
+ ASSERT(false);
+ break;
+ }
+ }
+}
+
+void DtmfSender::DoInsertDtmf() {
+ ASSERT(signaling_thread_->IsCurrent());
+
+ // Get the first DTMF tone from the tone buffer. Unrecognized characters will
+ // be ignored and skipped.
+ size_t first_tone_pos = tones_.find_first_of(kDtmfValidTones);
+ int code = 0;
+ if (first_tone_pos == std::string::npos) {
+ tones_.clear();
+ // Fire a “OnToneChange” event with an empty string and stop.
+ if (observer_) {
+ observer_->OnToneChange(std::string());
+ }
+ return;
+ } else {
+ char tone = tones_[first_tone_pos];
+ if (!GetDtmfCode(tone, &code)) {
+ // The find_first_of(kDtmfValidTones) should have guarantee |tone| is
+ // a valid DTMF tone.
+ ASSERT(false);
+ }
+ }
+
+ int tone_gap = inter_tone_gap_;
+ if (code == kDtmfCodeTwoSecondDelay) {
+ // Special case defined by WebRTC - The character',' indicates a delay of 2
+ // seconds before processing the next character in the tones parameter.
+ tone_gap = kDtmfTwoSecondInMs;
+ } else {
+ if (!provider_) {
+ LOG(LS_ERROR) << "The DtmfProvider has been destroyed.";
+ return;
+ }
+ // The provider starts playout of the given tone on the
+ // associated RTP media stream, using the appropriate codec.
+ if (!provider_->InsertDtmf(track_->id(), code, duration_)) {
+ LOG(LS_ERROR) << "The DtmfProvider can no longer send DTMF.";
+ return;
+ }
+ // Wait for the number of milliseconds specified by |duration_|.
+ tone_gap += duration_;
+ }
+
+ // Fire a “OnToneChange” event with the tone that's just processed.
+ if (observer_) {
+ observer_->OnToneChange(tones_.substr(first_tone_pos, 1));
+ }
+
+ // Erase the unrecognized characters plus the tone that's just processed.
+ tones_.erase(0, first_tone_pos + 1);
+
+ // Continue with the next tone.
+ signaling_thread_->PostDelayed(tone_gap, this, MSG_DO_INSERT_DTMF);
+}
+
+void DtmfSender::OnProviderDestroyed() {
+ LOG(LS_INFO) << "The Dtmf provider is deleted. Clear the sending queue.";
+ StopSending();
+ provider_ = NULL;
+}
+
+void DtmfSender::StopSending() {
+ signaling_thread_->Clear(this);
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/dtmfsender.h b/talk/app/webrtc/dtmfsender.h
new file mode 100644
index 0000000000..6d23610c7d
--- /dev/null
+++ b/talk/app/webrtc/dtmfsender.h
@@ -0,0 +1,139 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_DTMFSENDER_H_
+#define TALK_APP_WEBRTC_DTMFSENDER_H_
+
+#include <string>
+
+#include "talk/app/webrtc/dtmfsenderinterface.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/proxy.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/refcount.h"
+
+// DtmfSender is the native implementation of the RTCDTMFSender defined by
+// the WebRTC W3C Editor's Draft.
+// http://dev.w3.org/2011/webrtc/editor/webrtc.html
+
+namespace rtc {
+class Thread;
+}
+
+namespace webrtc {
+
+// This interface is called by DtmfSender to talk to the actual audio channel
+// to send DTMF.
+class DtmfProviderInterface {
+ public:
+ // Returns true if the audio track with given id (|track_id|) is capable
+ // of sending DTMF. Otherwise returns false.
+ virtual bool CanInsertDtmf(const std::string& track_id) = 0;
+ // Sends DTMF |code| via the audio track with given id (|track_id|).
+ // The |duration| indicates the length of the DTMF tone in ms.
+ // Returns true on success and false on failure.
+ virtual bool InsertDtmf(const std::string& track_id,
+ int code, int duration) = 0;
+ // Returns a |sigslot::signal0<>| signal. The signal should fire before
+ // the provider is destroyed.
+ virtual sigslot::signal0<>* GetOnDestroyedSignal() = 0;
+
+ protected:
+ virtual ~DtmfProviderInterface() {}
+};
+
+class DtmfSender
+ : public DtmfSenderInterface,
+ public sigslot::has_slots<>,
+ public rtc::MessageHandler {
+ public:
+ static rtc::scoped_refptr<DtmfSender> Create(
+ AudioTrackInterface* track,
+ rtc::Thread* signaling_thread,
+ DtmfProviderInterface* provider);
+
+ // Implements DtmfSenderInterface.
+ void RegisterObserver(DtmfSenderObserverInterface* observer) override;
+ void UnregisterObserver() override;
+ bool CanInsertDtmf() override;
+ bool InsertDtmf(const std::string& tones,
+ int duration,
+ int inter_tone_gap) override;
+ const AudioTrackInterface* track() const override;
+ std::string tones() const override;
+ int duration() const override;
+ int inter_tone_gap() const override;
+
+ protected:
+ DtmfSender(AudioTrackInterface* track,
+ rtc::Thread* signaling_thread,
+ DtmfProviderInterface* provider);
+ virtual ~DtmfSender();
+
+ private:
+ DtmfSender();
+
+ // Implements MessageHandler.
+ virtual void OnMessage(rtc::Message* msg);
+
+ // The DTMF sending task.
+ void DoInsertDtmf();
+
+ void OnProviderDestroyed();
+
+ void StopSending();
+
+ rtc::scoped_refptr<AudioTrackInterface> track_;
+ DtmfSenderObserverInterface* observer_;
+ rtc::Thread* signaling_thread_;
+ DtmfProviderInterface* provider_;
+ std::string tones_;
+ int duration_;
+ int inter_tone_gap_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(DtmfSender);
+};
+
+// Define proxy for DtmfSenderInterface.
+BEGIN_PROXY_MAP(DtmfSender)
+ PROXY_METHOD1(void, RegisterObserver, DtmfSenderObserverInterface*)
+ PROXY_METHOD0(void, UnregisterObserver)
+ PROXY_METHOD0(bool, CanInsertDtmf)
+ PROXY_METHOD3(bool, InsertDtmf, const std::string&, int, int)
+ PROXY_CONSTMETHOD0(const AudioTrackInterface*, track)
+ PROXY_CONSTMETHOD0(std::string, tones)
+ PROXY_CONSTMETHOD0(int, duration)
+ PROXY_CONSTMETHOD0(int, inter_tone_gap)
+END_PROXY()
+
+// Get DTMF code from the DTMF event character.
+bool GetDtmfCode(char tone, int* code);
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_DTMFSENDER_H_
diff --git a/talk/app/webrtc/dtmfsender_unittest.cc b/talk/app/webrtc/dtmfsender_unittest.cc
new file mode 100644
index 0000000000..f686aa2ccc
--- /dev/null
+++ b/talk/app/webrtc/dtmfsender_unittest.cc
@@ -0,0 +1,359 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/dtmfsender.h"
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/audiotrack.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/timeutils.h"
+
+using webrtc::AudioTrackInterface;
+using webrtc::AudioTrack;
+using webrtc::DtmfProviderInterface;
+using webrtc::DtmfSender;
+using webrtc::DtmfSenderObserverInterface;
+
+static const char kTestAudioLabel[] = "test_audio_track";
+static const int kMaxWaitMs = 3000;
+
+class FakeDtmfObserver : public DtmfSenderObserverInterface {
+ public:
+ FakeDtmfObserver() : completed_(false) {}
+
+ // Implements DtmfSenderObserverInterface.
+ void OnToneChange(const std::string& tone) override {
+ LOG(LS_VERBOSE) << "FakeDtmfObserver::OnToneChange '" << tone << "'.";
+ tones_.push_back(tone);
+ if (tone.empty()) {
+ completed_ = true;
+ }
+ }
+
+ // getters
+ const std::vector<std::string>& tones() const {
+ return tones_;
+ }
+ bool completed() const {
+ return completed_;
+ }
+
+ private:
+ std::vector<std::string> tones_;
+ bool completed_;
+};
+
+class FakeDtmfProvider : public DtmfProviderInterface {
+ public:
+ struct DtmfInfo {
+ DtmfInfo(int code, int duration, int gap)
+ : code(code),
+ duration(duration),
+ gap(gap) {}
+ int code;
+ int duration;
+ int gap;
+ };
+
+ FakeDtmfProvider() : last_insert_dtmf_call_(0) {}
+
+ ~FakeDtmfProvider() {
+ SignalDestroyed();
+ }
+
+ // Implements DtmfProviderInterface.
+ bool CanInsertDtmf(const std::string& track_label) override {
+ return (can_insert_dtmf_tracks_.count(track_label) != 0);
+ }
+
+ bool InsertDtmf(const std::string& track_label,
+ int code,
+ int duration) override {
+ int gap = 0;
+ // TODO(ronghuawu): Make the timer (basically the rtc::TimeNanos)
+ // mockable and use a fake timer in the unit tests.
+ if (last_insert_dtmf_call_ > 0) {
+ gap = static_cast<int>(rtc::Time() - last_insert_dtmf_call_);
+ }
+ last_insert_dtmf_call_ = rtc::Time();
+
+ LOG(LS_VERBOSE) << "FakeDtmfProvider::InsertDtmf code=" << code
+ << " duration=" << duration
+ << " gap=" << gap << ".";
+ dtmf_info_queue_.push_back(DtmfInfo(code, duration, gap));
+ return true;
+ }
+
+ virtual sigslot::signal0<>* GetOnDestroyedSignal() {
+ return &SignalDestroyed;
+ }
+
+ // getter and setter
+ const std::vector<DtmfInfo>& dtmf_info_queue() const {
+ return dtmf_info_queue_;
+ }
+
+ // helper functions
+ void AddCanInsertDtmfTrack(const std::string& label) {
+ can_insert_dtmf_tracks_.insert(label);
+ }
+ void RemoveCanInsertDtmfTrack(const std::string& label) {
+ can_insert_dtmf_tracks_.erase(label);
+ }
+
+ private:
+ std::set<std::string> can_insert_dtmf_tracks_;
+ std::vector<DtmfInfo> dtmf_info_queue_;
+ int64_t last_insert_dtmf_call_;
+ sigslot::signal0<> SignalDestroyed;
+};
+
+class DtmfSenderTest : public testing::Test {
+ protected:
+ DtmfSenderTest()
+ : track_(AudioTrack::Create(kTestAudioLabel, NULL)),
+ observer_(new rtc::RefCountedObject<FakeDtmfObserver>()),
+ provider_(new FakeDtmfProvider()) {
+ provider_->AddCanInsertDtmfTrack(kTestAudioLabel);
+ dtmf_ = DtmfSender::Create(track_, rtc::Thread::Current(),
+ provider_.get());
+ dtmf_->RegisterObserver(observer_.get());
+ }
+
+ ~DtmfSenderTest() {
+ if (dtmf_.get()) {
+ dtmf_->UnregisterObserver();
+ }
+ }
+
+ // Constructs a list of DtmfInfo from |tones|, |duration| and
+ // |inter_tone_gap|.
+ void GetDtmfInfoFromString(const std::string& tones, int duration,
+ int inter_tone_gap,
+ std::vector<FakeDtmfProvider::DtmfInfo>* dtmfs) {
+ // Init extra_delay as -inter_tone_gap - duration to ensure the first
+ // DtmfInfo's gap field will be 0.
+ int extra_delay = -1 * (inter_tone_gap + duration);
+
+ std::string::const_iterator it = tones.begin();
+ for (; it != tones.end(); ++it) {
+ char tone = *it;
+ int code = 0;
+ webrtc::GetDtmfCode(tone, &code);
+ if (tone == ',') {
+ extra_delay = 2000; // 2 seconds
+ } else {
+ dtmfs->push_back(FakeDtmfProvider::DtmfInfo(code, duration,
+ duration + inter_tone_gap + extra_delay));
+ extra_delay = 0;
+ }
+ }
+ }
+
+ void VerifyExpectedState(AudioTrackInterface* track,
+ const std::string& tones,
+ int duration, int inter_tone_gap) {
+ EXPECT_EQ(track, dtmf_->track());
+ EXPECT_EQ(tones, dtmf_->tones());
+ EXPECT_EQ(duration, dtmf_->duration());
+ EXPECT_EQ(inter_tone_gap, dtmf_->inter_tone_gap());
+ }
+
+ // Verify the provider got all the expected calls.
+ void VerifyOnProvider(const std::string& tones, int duration,
+ int inter_tone_gap) {
+ std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
+ GetDtmfInfoFromString(tones, duration, inter_tone_gap, &dtmf_queue_ref);
+ VerifyOnProvider(dtmf_queue_ref);
+ }
+
+ void VerifyOnProvider(
+ const std::vector<FakeDtmfProvider::DtmfInfo>& dtmf_queue_ref) {
+ const std::vector<FakeDtmfProvider::DtmfInfo>& dtmf_queue =
+ provider_->dtmf_info_queue();
+ ASSERT_EQ(dtmf_queue_ref.size(), dtmf_queue.size());
+ std::vector<FakeDtmfProvider::DtmfInfo>::const_iterator it_ref =
+ dtmf_queue_ref.begin();
+ std::vector<FakeDtmfProvider::DtmfInfo>::const_iterator it =
+ dtmf_queue.begin();
+ while (it_ref != dtmf_queue_ref.end() && it != dtmf_queue.end()) {
+ EXPECT_EQ(it_ref->code, it->code);
+ EXPECT_EQ(it_ref->duration, it->duration);
+ // Allow ~100ms error.
+ EXPECT_GE(it_ref->gap, it->gap - 100);
+ EXPECT_LE(it_ref->gap, it->gap + 100);
+ ++it_ref;
+ ++it;
+ }
+ }
+
+ // Verify the observer got all the expected callbacks.
+ void VerifyOnObserver(const std::string& tones_ref) {
+ const std::vector<std::string>& tones = observer_->tones();
+ // The observer will get an empty string at the end.
+ EXPECT_EQ(tones_ref.size() + 1, tones.size());
+ EXPECT_TRUE(tones.back().empty());
+ std::string::const_iterator it_ref = tones_ref.begin();
+ std::vector<std::string>::const_iterator it = tones.begin();
+ while (it_ref != tones_ref.end() && it != tones.end()) {
+ EXPECT_EQ(*it_ref, it->at(0));
+ ++it_ref;
+ ++it;
+ }
+ }
+
+ rtc::scoped_refptr<AudioTrackInterface> track_;
+ rtc::scoped_ptr<FakeDtmfObserver> observer_;
+ rtc::scoped_ptr<FakeDtmfProvider> provider_;
+ rtc::scoped_refptr<DtmfSender> dtmf_;
+};
+
+TEST_F(DtmfSenderTest, CanInsertDtmf) {
+ EXPECT_TRUE(dtmf_->CanInsertDtmf());
+ provider_->RemoveCanInsertDtmfTrack(kTestAudioLabel);
+ EXPECT_FALSE(dtmf_->CanInsertDtmf());
+}
+
+TEST_F(DtmfSenderTest, InsertDtmf) {
+ std::string tones = "@1%a&*$";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+ EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
+
+ // The unrecognized characters should be ignored.
+ std::string known_tones = "1a*";
+ VerifyOnProvider(known_tones, duration, inter_tone_gap);
+ VerifyOnObserver(known_tones);
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfTwice) {
+ std::string tones1 = "12";
+ std::string tones2 = "ab";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap));
+ VerifyExpectedState(track_, tones1, duration, inter_tone_gap);
+ // Wait until the first tone got sent.
+ EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
+ VerifyExpectedState(track_, "2", duration, inter_tone_gap);
+ // Insert with another tone buffer.
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap));
+ VerifyExpectedState(track_, tones2, duration, inter_tone_gap);
+ // Wait until it's completed.
+ EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
+
+ std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
+ GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref);
+ GetDtmfInfoFromString("ab", duration, inter_tone_gap, &dtmf_queue_ref);
+ VerifyOnProvider(dtmf_queue_ref);
+ VerifyOnObserver("1ab");
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWhileProviderIsDeleted) {
+ std::string tones = "@1%a&*$";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+ // Wait until the first tone got sent.
+ EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
+ // Delete provider.
+ provider_.reset();
+ // The queue should be discontinued so no more tone callbacks.
+ WAIT(false, 200);
+ EXPECT_EQ(1U, observer_->tones().size());
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWhileSenderIsDeleted) {
+ std::string tones = "@1%a&*$";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+ // Wait until the first tone got sent.
+ EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
+ // Delete the sender.
+ dtmf_ = NULL;
+ // The queue should be discontinued so no more tone callbacks.
+ WAIT(false, 200);
+ EXPECT_EQ(1U, observer_->tones().size());
+}
+
+TEST_F(DtmfSenderTest, InsertEmptyTonesToCancelPreviousTask) {
+ std::string tones1 = "12";
+ std::string tones2 = "";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap));
+ // Wait until the first tone got sent.
+ EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
+ // Insert with another tone buffer.
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap));
+ // Wait until it's completed.
+ EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
+
+ std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
+ GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref);
+ VerifyOnProvider(dtmf_queue_ref);
+ VerifyOnObserver("1");
+}
+
+// Flaky when run in parallel.
+// See https://code.google.com/p/webrtc/issues/detail?id=4219.
+TEST_F(DtmfSenderTest, DISABLED_InsertDtmfWithCommaAsDelay) {
+ std::string tones = "3,4";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+ EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
+
+ VerifyOnProvider(tones, duration, inter_tone_gap);
+ VerifyOnObserver(tones);
+}
+
+TEST_F(DtmfSenderTest, TryInsertDtmfWhenItDoesNotWork) {
+ std::string tones = "3,4";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ provider_->RemoveCanInsertDtmfTrack(kTestAudioLabel);
+ EXPECT_FALSE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWithInvalidDurationOrGap) {
+ std::string tones = "3,4";
+ int duration = 100;
+ int inter_tone_gap = 50;
+
+ EXPECT_FALSE(dtmf_->InsertDtmf(tones, 6001, inter_tone_gap));
+ EXPECT_FALSE(dtmf_->InsertDtmf(tones, 69, inter_tone_gap));
+ EXPECT_FALSE(dtmf_->InsertDtmf(tones, duration, 49));
+
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+}
diff --git a/talk/app/webrtc/dtmfsenderinterface.h b/talk/app/webrtc/dtmfsenderinterface.h
new file mode 100644
index 0000000000..7fbf57af23
--- /dev/null
+++ b/talk/app/webrtc/dtmfsenderinterface.h
@@ -0,0 +1,105 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_DTMFSENDERINTERFACE_H_
+#define TALK_APP_WEBRTC_DTMFSENDERINTERFACE_H_
+
+#include <string>
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/refcount.h"
+
+// This file contains interfaces for DtmfSender.
+
+namespace webrtc {
+
+// DtmfSender callback interface. Application should implement this interface
+// to get notifications from the DtmfSender.
+class DtmfSenderObserverInterface {
+ public:
+ // Triggered when DTMF |tone| is sent.
+ // If |tone| is empty that means the DtmfSender has sent out all the given
+ // tones.
+ virtual void OnToneChange(const std::string& tone) = 0;
+
+ protected:
+ virtual ~DtmfSenderObserverInterface() {}
+};
+
+// The interface of native implementation of the RTCDTMFSender defined by the
+// WebRTC W3C Editor's Draft.
+class DtmfSenderInterface : public rtc::RefCountInterface {
+ public:
+ virtual void RegisterObserver(DtmfSenderObserverInterface* observer) = 0;
+ virtual void UnregisterObserver() = 0;
+
+ // Returns true if this DtmfSender is capable of sending DTMF.
+ // Otherwise returns false.
+ virtual bool CanInsertDtmf() = 0;
+
+ // Queues a task that sends the DTMF |tones|. The |tones| parameter is treated
+ // as a series of characters. The characters 0 through 9, A through D, #, and
+ // * generate the associated DTMF tones. The characters a to d are equivalent
+ // to A to D. The character ',' indicates a delay of 2 seconds before
+ // processing the next character in the tones parameter.
+ // Unrecognized characters are ignored.
+ // The |duration| parameter indicates the duration in ms to use for each
+ // character passed in the |tones| parameter.
+ // The duration cannot be more than 6000 or less than 70.
+ // The |inter_tone_gap| parameter indicates the gap between tones in ms.
+ // The |inter_tone_gap| must be at least 50 ms but should be as short as
+ // possible.
+ // If InsertDtmf is called on the same object while an existing task for this
+ // object to generate DTMF is still running, the previous task is canceled.
+ // Returns true on success and false on failure.
+ virtual bool InsertDtmf(const std::string& tones, int duration,
+ int inter_tone_gap) = 0;
+
+ // Returns the track given as argument to the constructor.
+ virtual const AudioTrackInterface* track() const = 0;
+
+ // Returns the tones remaining to be played out.
+ virtual std::string tones() const = 0;
+
+ // Returns the current tone duration value in ms.
+ // This value will be the value last set via the InsertDtmf() method, or the
+ // default value of 100 ms if InsertDtmf() was never called.
+ virtual int duration() const = 0;
+
+ // Returns the current value of the between-tone gap in ms.
+ // This value will be the value last set via the InsertDtmf() method, or the
+ // default value of 50 ms if InsertDtmf() was never called.
+ virtual int inter_tone_gap() const = 0;
+
+ protected:
+ virtual ~DtmfSenderInterface() {}
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_DTMFSENDERINTERFACE_H_
diff --git a/talk/app/webrtc/fakemediacontroller.h b/talk/app/webrtc/fakemediacontroller.h
new file mode 100644
index 0000000000..5bf3e5fcf8
--- /dev/null
+++ b/talk/app/webrtc/fakemediacontroller.h
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_FAKEMEDIACONTROLLER_H_
+#define TALK_APP_WEBRTC_FAKEMEDIACONTROLLER_H_
+
+#include "talk/app/webrtc/mediacontroller.h"
+#include "webrtc/base/checks.h"
+
+namespace cricket {
+
+class FakeMediaController : public webrtc::MediaControllerInterface {
+ public:
+ explicit FakeMediaController(cricket::ChannelManager* channel_manager,
+ webrtc::Call* call)
+ : channel_manager_(channel_manager), call_(call) {
+ RTC_DCHECK(nullptr != channel_manager_);
+ RTC_DCHECK(nullptr != call_);
+ }
+ ~FakeMediaController() override {}
+ webrtc::Call* call_w() override { return call_; }
+ cricket::ChannelManager* channel_manager() const override {
+ return channel_manager_;
+ }
+
+ private:
+ cricket::ChannelManager* channel_manager_;
+ webrtc::Call* call_;
+};
+} // namespace cricket
+#endif // TALK_APP_WEBRTC_FAKEMEDIACONTROLLER_H_
diff --git a/talk/app/webrtc/fakemetricsobserver.cc b/talk/app/webrtc/fakemetricsobserver.cc
new file mode 100644
index 0000000000..4a100a079e
--- /dev/null
+++ b/talk/app/webrtc/fakemetricsobserver.cc
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/fakemetricsobserver.h"
+#include "webrtc/base/checks.h"
+
+namespace webrtc {
+
+FakeMetricsObserver::FakeMetricsObserver() {
+ Reset();
+}
+
+void FakeMetricsObserver::Reset() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ counters_.clear();
+ memset(histogram_samples_, 0, sizeof(histogram_samples_));
+}
+
+void FakeMetricsObserver::IncrementEnumCounter(
+ PeerConnectionEnumCounterType type,
+ int counter,
+ int counter_max) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ if (counters_.size() <= static_cast<size_t>(type)) {
+ counters_.resize(type + 1);
+ }
+ auto& counters = counters_[type];
+ ++counters[counter];
+}
+
+void FakeMetricsObserver::AddHistogramSample(PeerConnectionMetricsName type,
+ int value) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK_EQ(histogram_samples_[type], 0);
+ histogram_samples_[type] = value;
+}
+
+int FakeMetricsObserver::GetEnumCounter(PeerConnectionEnumCounterType type,
+ int counter) const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_CHECK(counters_.size() > static_cast<size_t>(type));
+ const auto& it = counters_[type].find(counter);
+ if (it == counters_[type].end()) {
+ return 0;
+ }
+ return it->second;
+}
+
+int FakeMetricsObserver::GetHistogramSample(
+ PeerConnectionMetricsName type) const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return histogram_samples_[type];
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/fakemetricsobserver.h b/talk/app/webrtc/fakemetricsobserver.h
new file mode 100644
index 0000000000..e3a22841d8
--- /dev/null
+++ b/talk/app/webrtc/fakemetricsobserver.h
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_FAKEMETRICSOBSERVER_H_
+#define TALK_APP_WEBRTC_FAKEMETRICSOBSERVER_H_
+
+#include <map>
+#include <string>
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc {
+
+class FakeMetricsObserver : public MetricsObserverInterface {
+ public:
+ FakeMetricsObserver();
+ void Reset();
+
+ void IncrementEnumCounter(PeerConnectionEnumCounterType,
+ int counter,
+ int counter_max) override;
+ void AddHistogramSample(PeerConnectionMetricsName type,
+ int value) override;
+
+ // Accessors to be used by the tests.
+ int GetEnumCounter(PeerConnectionEnumCounterType type, int counter) const;
+ int GetHistogramSample(PeerConnectionMetricsName type) const;
+
+ protected:
+ ~FakeMetricsObserver() {}
+
+ private:
+ rtc::ThreadChecker thread_checker_;
+ // The vector contains maps for each counter type. In the map, it's a mapping
+ // from individual counter to its count, such that it's memory efficient when
+ // comes to sparse enum types, like the SSL ciphers in the IANA registry.
+ std::vector<std::map<int, int>> counters_;
+ int histogram_samples_[kPeerConnectionMetricsName_Max];
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_FAKEMETRICSOBSERVER_H_
diff --git a/talk/app/webrtc/fakeportallocatorfactory.h b/talk/app/webrtc/fakeportallocatorfactory.h
new file mode 100644
index 0000000000..f326b62043
--- /dev/null
+++ b/talk/app/webrtc/fakeportallocatorfactory.h
@@ -0,0 +1,76 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file defines a fake port allocator factory used for testing.
+// This implementation creates instances of cricket::FakePortAllocator.
+
+#ifndef TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
+#define TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "webrtc/p2p/client/fakeportallocator.h"
+
+namespace webrtc {
+
+class FakePortAllocatorFactory : public PortAllocatorFactoryInterface {
+ public:
+ static FakePortAllocatorFactory* Create() {
+ rtc::RefCountedObject<FakePortAllocatorFactory>* allocator =
+ new rtc::RefCountedObject<FakePortAllocatorFactory>();
+ return allocator;
+ }
+
+ virtual cricket::PortAllocator* CreatePortAllocator(
+ const std::vector<StunConfiguration>& stun_configurations,
+ const std::vector<TurnConfiguration>& turn_configurations) {
+ stun_configs_ = stun_configurations;
+ turn_configs_ = turn_configurations;
+ return new cricket::FakePortAllocator(rtc::Thread::Current(), NULL);
+ }
+
+ const std::vector<StunConfiguration>& stun_configs() const {
+ return stun_configs_;
+ }
+
+ const std::vector<TurnConfiguration>& turn_configs() const {
+ return turn_configs_;
+ }
+
+ void SetNetworkIgnoreMask(int network_ignore_mask) {}
+
+ protected:
+ FakePortAllocatorFactory() {}
+ ~FakePortAllocatorFactory() {}
+
+ private:
+ std::vector<PortAllocatorFactoryInterface::StunConfiguration> stun_configs_;
+ std::vector<PortAllocatorFactoryInterface::TurnConfiguration> turn_configs_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
diff --git a/talk/app/webrtc/java/README b/talk/app/webrtc/java/README
new file mode 100644
index 0000000000..454046cb12
--- /dev/null
+++ b/talk/app/webrtc/java/README
@@ -0,0 +1,23 @@
+This directory holds a Java implementation of the webrtc::PeerConnection API, as
+well as the JNI glue C++ code that lets the Java implementation reuse the C++
+implementation of the same API.
+
+To build the Java API and related tests, build with
+OS=linux or OS=android and include
+build_with_libjingle=1 build_with_chromium=0
+in $GYP_DEFINES.
+
+To use the Java API, start by looking at the public interface of
+org.webrtc.PeerConnection{,Factory} and the org.webrtc.PeerConnectionTest.
+
+To understand the implementation of the API, see the native code in jni/.
+
+An example command-line to build & run the unittest:
+cd path/to/trunk
+GYP_DEFINES="build_with_libjingle=1 build_with_chromium=0 java_home=path/to/JDK" gclient runhooks && \
+ ninja -C out/Debug libjingle_peerconnection_java_unittest && \
+ ./out/Debug/libjingle_peerconnection_java_unittest
+(where path/to/JDK should contain include/jni.h)
+
+During development it can be helpful to run the JVM with the -Xcheck:jni flag.
+
diff --git a/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java b/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java
new file mode 100644
index 0000000000..097d1cd906
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java
@@ -0,0 +1,119 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.Range;
+import android.util.Size;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Logging;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class Camera2Enumerator implements CameraEnumerationAndroid.Enumerator {
+ private final static String TAG = "Camera2Enumerator";
+ private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
+
+ private final CameraManager cameraManager;
+ // Each entry contains the supported formats for a given camera index. The formats are enumerated
+ // lazily in getSupportedFormats(), and cached for future reference.
+ private final Map<Integer, List<CaptureFormat>> cachedSupportedFormats =
+ new HashMap<Integer, List<CaptureFormat>>();
+
+ public static boolean isSupported() {
+ return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
+ }
+
+ public Camera2Enumerator(Context context) {
+ cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ @Override
+ public List<CaptureFormat> getSupportedFormats(int cameraId) {
+ synchronized (cachedSupportedFormats) {
+ if (cachedSupportedFormats.containsKey(cameraId)) {
+ return cachedSupportedFormats.get(cameraId);
+ }
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+
+ final CameraCharacteristics cameraCharacteristics;
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(Integer.toString(cameraId));
+ } catch (Exception ex) {
+ Logging.e(TAG, "getCameraCharacteristics(): " + ex);
+ return new ArrayList<CaptureFormat>();
+ }
+
+ // Calculate default max fps from auto-exposure ranges in case getOutputMinFrameDuration() is
+ // not supported.
+ final Range<Integer>[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ int defaultMaxFps = 0;
+ for (Range<Integer> fpsRange : fpsRanges) {
+ defaultMaxFps = Math.max(defaultMaxFps, fpsRange.getUpper());
+ }
+
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ final Size[] sizes = streamMap.getOutputSizes(ImageFormat.YUV_420_888);
+ if (sizes == null) {
+ throw new RuntimeException("ImageFormat.YUV_420_888 not supported.");
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ for (Size size : sizes) {
+ long minFrameDurationNs = 0;
+ try {
+ minFrameDurationNs = streamMap.getOutputMinFrameDuration(ImageFormat.YUV_420_888, size);
+ } catch (Exception e) {
+ // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
+ }
+ final int maxFps = (minFrameDurationNs == 0)
+ ? defaultMaxFps
+ : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs);
+ formatList.add(new CaptureFormat(size.getWidth(), size.getHeight(), 0, maxFps * 1000));
+ }
+ cachedSupportedFormats.put(cameraId, formatList);
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java
new file mode 100644
index 0000000000..3e37f6afdc
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java
@@ -0,0 +1,236 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import static java.lang.Math.abs;
+import static java.lang.Math.ceil;
+import android.hardware.Camera;
+import android.graphics.ImageFormat;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import org.webrtc.Logging;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerationAndroid {
+ private final static String TAG = "CameraEnumerationAndroid";
+ // Synchronized on |CameraEnumerationAndroid.this|.
+ private static Enumerator enumerator = new CameraEnumerator();
+
+ public interface Enumerator {
+ /**
+ * Returns a list of supported CaptureFormats for the camera with index |cameraId|.
+ */
+ List<CaptureFormat> getSupportedFormats(int cameraId);
+ }
+
+ public static synchronized void setEnumerator(Enumerator enumerator) {
+ CameraEnumerationAndroid.enumerator = enumerator;
+ }
+
+ public static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
+ return enumerator.getSupportedFormats(cameraId);
+ }
+
+ public static class CaptureFormat {
+ public final int width;
+ public final int height;
+ public final int maxFramerate;
+ public final int minFramerate;
+ // TODO(hbos): If VideoCapturerAndroid.startCapture is updated to support
+ // other image formats then this needs to be updated and
+ // VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
+ // all imageFormats.
+ public final int imageFormat = ImageFormat.YV12;
+
+ public CaptureFormat(int width, int height, int minFramerate,
+ int maxFramerate) {
+ this.width = width;
+ this.height = height;
+ this.minFramerate = minFramerate;
+ this.maxFramerate = maxFramerate;
+ }
+
+ // Calculates the frame size of this capture format.
+ public int frameSize() {
+ return frameSize(width, height, imageFormat);
+ }
+
+ // Calculates the frame size of the specified image format. Currently only
+ // supporting ImageFormat.YV12. The YV12's stride is the closest rounded up
+ // multiple of 16 of the width and width and height are always even.
+ // Android guarantees this:
+ // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
+ public static int frameSize(int width, int height, int imageFormat) {
+ if (imageFormat != ImageFormat.YV12) {
+ throw new UnsupportedOperationException("Don't know how to calculate "
+ + "the frame size of non-YV12 image formats.");
+ }
+ int yStride = roundUp(width, 16);
+ int uvStride = roundUp(yStride / 2, 16);
+ int ySize = yStride * height;
+ int uvSize = uvStride * height / 2;
+ return ySize + uvSize * 2;
+ }
+
+ // Rounds up |x| to the closest value that is a multiple of |alignment|.
+ private static int roundUp(int x, int alignment) {
+ return (int)ceil(x / (double)alignment) * alignment;
+ }
+
+ @Override
+ public String toString() {
+ return width + "x" + height + "@[" + minFramerate + ":" + maxFramerate + "]";
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (!(that instanceof CaptureFormat)) {
+ return false;
+ }
+ final CaptureFormat c = (CaptureFormat) that;
+ return width == c.width && height == c.height && maxFramerate == c.maxFramerate
+ && minFramerate == c.minFramerate;
+ }
+ }
+
+ // Returns device names that can be used to create a new VideoCapturerAndroid.
+ public static String[] getDeviceNames() {
+ String[] names = new String[Camera.getNumberOfCameras()];
+ for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ names[i] = getDeviceName(i);
+ }
+ return names;
+ }
+
+ // Returns number of cameras on device.
+ public static int getDeviceCount() {
+ return Camera.getNumberOfCameras();
+ }
+
+ // Returns the name of the camera with camera index. Returns null if the
+ // camera can not be used.
+ public static String getDeviceName(int index) {
+ Camera.CameraInfo info = new Camera.CameraInfo();
+ try {
+ Camera.getCameraInfo(index, info);
+ } catch (Exception e) {
+ Logging.e(TAG, "getCameraInfo failed on index " + index,e);
+ return null;
+ }
+
+ String facing =
+ (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+ return "Camera " + index + ", Facing " + facing
+ + ", Orientation " + info.orientation;
+ }
+
+ // Returns the name of the front facing camera. Returns null if the
+ // camera can not be used or does not exist.
+ public static String getNameOfFrontFacingDevice() {
+ return getNameOfDevice(Camera.CameraInfo.CAMERA_FACING_FRONT);
+ }
+
+ // Returns the name of the back facing camera. Returns null if the
+ // camera can not be used or does not exist.
+ public static String getNameOfBackFacingDevice() {
+ return getNameOfDevice(Camera.CameraInfo.CAMERA_FACING_BACK);
+ }
+
+ public static String getSupportedFormatsAsJson(int id) throws JSONException {
+ List<CaptureFormat> formats = getSupportedFormats(id);
+ JSONArray json_formats = new JSONArray();
+ for (CaptureFormat format : formats) {
+ JSONObject json_format = new JSONObject();
+ json_format.put("width", format.width);
+ json_format.put("height", format.height);
+ json_format.put("framerate", (format.maxFramerate + 999) / 1000);
+ json_formats.put(json_format);
+ }
+ Logging.d(TAG, "Supported formats for camera " + id + ": "
+ + json_formats.toString(2));
+ return json_formats.toString();
+ }
+
+ // Helper class for finding the closest supported format for the two functions below.
+ private static abstract class ClosestComparator<T> implements Comparator<T> {
+ // Difference between supported and requested parameter.
+ abstract int diff(T supportedParameter);
+
+ @Override
+ public int compare(T t1, T t2) {
+ return diff(t1) - diff(t2);
+ }
+ }
+
+ public static int[] getFramerateRange(Camera.Parameters parameters, final int framerate) {
+ List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+ if (listFpsRange.isEmpty()) {
+ Logging.w(TAG, "No supported preview fps range");
+ return new int[]{0, 0};
+ }
+ return Collections.min(listFpsRange,
+ new ClosestComparator<int[]>() {
+ @Override int diff(int[] range) {
+ return abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX])
+ + abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ }
+ });
+ }
+
+ public static Camera.Size getClosestSupportedSize(
+ List<Camera.Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
+ return Collections.min(supportedSizes,
+ new ClosestComparator<Camera.Size>() {
+ @Override int diff(Camera.Size size) {
+ return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+ }
+ });
+ }
+
+ private static String getNameOfDevice(int facing) {
+ final Camera.CameraInfo info = new Camera.CameraInfo();
+ for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ try {
+ Camera.getCameraInfo(i, info);
+ if (info.facing == facing) {
+ return getDeviceName(i);
+ }
+ } catch (Exception e) {
+ Logging.e(TAG, "getCameraInfo() failed on index " + i, e);
+ }
+ }
+ return null;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java
new file mode 100644
index 0000000000..2f35dc3493
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java
@@ -0,0 +1,102 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.hardware.Camera;
+import android.os.SystemClock;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Logging;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
+ private final static String TAG = "CameraEnumerator";
+ // Each entry contains the supported formats for corresponding camera index. The formats for all
+ // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
+ // reference.
+ private List<List<CaptureFormat>> cachedSupportedFormats;
+
+ @Override
+ public List<CaptureFormat> getSupportedFormats(int cameraId) {
+ synchronized (this) {
+ if (cachedSupportedFormats == null) {
+ cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
+ for (int i = 0; i < CameraEnumerationAndroid.getDeviceCount(); ++i) {
+ cachedSupportedFormats.add(enumerateFormats(i));
+ }
+ }
+ }
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ private List<CaptureFormat> enumerateFormats(int cameraId) {
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ final Camera.Parameters parameters;
+ Camera camera = null;
+ try {
+ Logging.d(TAG, "Opening camera with index " + cameraId);
+ camera = Camera.open(cameraId);
+ parameters = camera.getParameters();
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
+ return new ArrayList<CaptureFormat>();
+ } finally {
+ if (camera != null) {
+ camera.release();
+ }
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ try {
+ int minFps = 0;
+ int maxFps = 0;
+ final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+ if (listFpsRange != null) {
+ // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
+ // corresponding to the highest fps.
+ final int[] range = listFpsRange.get(listFpsRange.size() - 1);
+ minFps = range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ maxFps = range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ }
+ for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
+ formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
+ }
+ } catch (Exception e) {
+ Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
+ }
+
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase.java b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
new file mode 100644
index 0000000000..2ee36882e8
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
@@ -0,0 +1,271 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.view.SurfaceHolder;
+
+import org.webrtc.Logging;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an EGLContext, an EGLDisplay, and an EGLSurface.
+ */
+public final class EglBase {
+ private static final String TAG = "EglBase";
+ // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
+ // This is similar to how GlSurfaceView does:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
+ private static final int EGL_OPENGL_ES2_BIT = 4;
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+ // Android-specific extension.
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+ private final EGL10 egl;
+ private EGLContext eglContext;
+ private ConfigType configType;
+ private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+
+ // EGLConfig constructor type. Influences eglChooseConfig arguments.
+ public static enum ConfigType {
+ // No special parameters.
+ PLAIN,
+ // Configures with EGL_SURFACE_TYPE = EGL_PBUFFER_BIT.
+ PIXEL_BUFFER,
+ // Configures with EGL_RECORDABLE_ANDROID = 1.
+ // Discourages EGL from using pixel formats that cannot efficiently be
+ // converted to something usable by the video encoder.
+ RECORDABLE
+ }
+
+ // Create root context without any EGLSurface or parent EGLContext. This can be used for branching
+ // new contexts that share data.
+ public EglBase() {
+ this(EGL10.EGL_NO_CONTEXT, ConfigType.PLAIN);
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ public EglBase(EGLContext sharedContext, ConfigType configType) {
+ this.egl = (EGL10) EGLContext.getEGL();
+ this.configType = configType;
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configType);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+ }
+
+ // Create EGLSurface from the Android SurfaceHolder.
+ public void createSurface(SurfaceHolder surfaceHolder) {
+ createSurfaceInternal(surfaceHolder);
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+ private void createSurfaceInternal(Object nativeWindow) {
+ if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (configType == ConfigType.PIXEL_BUFFER) {
+ Logging.w(TAG, "This EGL context is configured for PIXEL_BUFFER, but uses regular Surface");
+ }
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_NONE};
+ eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create window surface");
+ }
+ }
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (configType != ConfigType.PIXEL_BUFFER) {
+ throw new RuntimeException(
+ "This EGL context is not configured to use a pixel buffer: " + configType);
+ }
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+ eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create pixel buffer surface");
+ }
+ }
+
+ public EGLContext getContext() {
+ return eglContext;
+ }
+
+ public boolean hasSurface() {
+ return eglSurface != EGL10.EGL_NO_SURFACE;
+ }
+
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
+ return widthArray[0];
+ }
+
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
+ return heightArray[0];
+ }
+
+ public void releaseSurface() {
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ egl.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ egl.eglDestroyContext(eglDisplay, eglContext);
+ egl.eglTerminate(eglDisplay);
+ eglContext = EGL10.EGL_NO_CONTEXT;
+ eglDisplay = EGL10.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ public void detachCurrent() {
+ if (!egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ egl.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL10 display");
+ }
+ int[] version = new int[2];
+ if (!egl.eglInitialize(eglDisplay, version)) {
+ throw new RuntimeException("Unable to initialize EGL10");
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLConfig getEglConfig(EGLDisplay eglDisplay, ConfigType configType) {
+ // Always RGB888, GLES2.
+ int[] configAttributes = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE, 0, // Allocate dummy fields for specific options.
+ EGL10.EGL_NONE
+ };
+
+ // Fill in dummy fields based on configType.
+ switch (configType) {
+ case PLAIN:
+ break;
+ case PIXEL_BUFFER:
+ configAttributes[configAttributes.length - 3] = EGL10.EGL_SURFACE_TYPE;
+ configAttributes[configAttributes.length - 2] = EGL10.EGL_PBUFFER_BIT;
+ break;
+ case RECORDABLE:
+ configAttributes[configAttributes.length - 3] = EGL_RECORDABLE_ANDROID;
+ configAttributes[configAttributes.length - 2] = 1;
+ break;
+ default:
+ throw new IllegalArgumentException();
+ }
+
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!egl.eglChooseConfig(
+ eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+ throw new RuntimeException("Unable to find RGB888 " + configType + " EGL config");
+ }
+ return configs[0];
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLContext createEglContext(
+ EGLContext sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+ int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
+ EGLContext eglContext =
+ egl.eglCreateContext(eglDisplay, eglConfig, sharedContext, contextAttributes);
+ if (eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Failed to create EGL context");
+ }
+ return eglContext;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
new file mode 100644
index 0000000000..2cb8af754d
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
@@ -0,0 +1,272 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+
+import org.webrtc.GlShader;
+import org.webrtc.GlUtil;
+
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
+import java.util.Arrays;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+/**
+ * Helper class to draw a quad that covers the entire viewport. Rotation, mirror, and cropping is
+ * specified using a 4x4 texture coordinate transform matrix. The frame input can either be an OES
+ * texture or YUV textures in I420 format. The GL state must be preserved between draw calls, this
+ * is intentional to maximize performance. The function release() must be called manually to free
+ * the resources held by this object.
+ */
+public class GlRectDrawer {
+ // Simple vertex shader, used for both YUV and OES.
+ private static final String VERTEX_SHADER_STRING =
+ "varying vec2 interp_tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "\n"
+ + "uniform mat4 texMatrix;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc = (texMatrix * in_tc).xy;\n"
+ + "}\n";
+
+ private static final String YUV_FRAGMENT_SHADER_STRING =
+ "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform sampler2D y_tex;\n"
+ + "uniform sampler2D u_tex;\n"
+ + "uniform sampler2D v_tex;\n"
+ + "\n"
+ + "void main() {\n"
+ // CSC according to http://www.fourcc.org/fccyvrgb.php
+ + " float y = texture2D(y_tex, interp_tc).r;\n"
+ + " float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
+ + " float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
+ + " gl_FragColor = vec4(y + 1.403 * v, "
+ + " y - 0.344 * u - 0.714 * v, "
+ + " y + 1.77 * u, 1);\n"
+ + "}\n";
+
+ private static final String RGB_FRAGMENT_SHADER_STRING =
+ "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform sampler2D rgb_tex;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_FragColor = texture2D(rgb_tex, interp_tc);\n"
+ + "}\n";
+
+ private static final String OES_FRAGMENT_SHADER_STRING =
+ "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform samplerExternalOES oes_tex;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
+ + "}\n";
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
+ // top-right.
+ private static final FloatBuffer FULL_RECTANGLE_BUF =
+ GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f // Top right.
+ });
+
+ // The keys are one of the fragments shaders above.
+ private final Map<String, GlShader> shaders = new IdentityHashMap<String, GlShader>();
+ private GlShader currentShader;
+ private float[] currentTexMatrix;
+ private int texMatrixLocation;
+ // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
+ // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader that
+ // handles stride and compare performance with intermediate copy.
+ private ByteBuffer copyBuffer;
+
+ /**
+ * Upload |planes| into |outputYuvTextures|, taking stride into consideration. |outputYuvTextures|
+ * must have been generated in advance.
+ */
+ public void uploadYuvData(
+ int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
+ // Make a first pass to see if we need a temporary copy buffer.
+ int copyCapacityNeeded = 0;
+ for (int i = 0; i < 3; ++i) {
+ final int planeWidth = (i == 0) ? width : width / 2;
+ final int planeHeight = (i == 0) ? height : height / 2;
+ if (strides[i] > planeWidth) {
+ copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidth * planeHeight);
+ }
+ }
+ // Allocate copy buffer if necessary.
+ if (copyCapacityNeeded > 0
+ && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
+ copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
+ }
+ // Upload each plane.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
+ final int planeWidth = (i == 0) ? width : width / 2;
+ final int planeHeight = (i == 0) ? height : height / 2;
+ // GLES only accepts packed data, i.e. stride == planeWidth.
+ final ByteBuffer packedByteBuffer;
+ if (strides[i] == planeWidth) {
+ // Input is packed already.
+ packedByteBuffer = planes[i];
+ } else {
+ VideoRenderer.nativeCopyPlane(
+ planes[i], planeWidth, planeHeight, strides[i], copyBuffer, planeWidth);
+ packedByteBuffer = copyBuffer;
+ }
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidth, planeHeight, 0,
+ GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+ }
+ }
+
+ /**
+ * Draw an OES texture frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ public void drawOes(int oesTextureId, float[] texMatrix) {
+ prepareShader(OES_FRAGMENT_SHADER_STRING);
+ // updateTexImage() may be called from another thread in another EGL context, so we need to
+ // bind/unbind the texture in each draw call so that GLES understads it's a new texture.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
+ drawRectangle(texMatrix);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
+
+ /**
+ * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
+ * are allocated at the first call to this function.
+ */
+ public void drawRgb(int textureId, float[] texMatrix) {
+ prepareShader(RGB_FRAGMENT_SHADER_STRING);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ drawRectangle(texMatrix);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ /**
+ * Draw a YUV frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ public void drawYuv(int[] yuvTextures, float[] texMatrix) {
+ prepareShader(YUV_FRAGMENT_SHADER_STRING);
+ // Bind the textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ }
+ drawRectangle(texMatrix);
+ // Unbind the textures as a precaution..
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+ }
+
+ private void drawRectangle(float[] texMatrix) {
+ // Try avoid uploading the texture if possible.
+ if (!Arrays.equals(currentTexMatrix, texMatrix)) {
+ currentTexMatrix = texMatrix.clone();
+ // Copy the texture transformation matrix over.
+ GLES20.glUniformMatrix4fv(texMatrixLocation, 1, false, texMatrix, 0);
+ }
+ // Draw quad.
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ }
+
+ private void prepareShader(String fragmentShader) {
+ // Lazy allocation.
+ if (!shaders.containsKey(fragmentShader)) {
+ final GlShader shader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+ shaders.put(fragmentShader, shader);
+ shader.useProgram();
+ // Initialize fragment shader uniform values.
+ if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
+ GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
+ GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
+ GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
+ } else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
+ GLES20.glUniform1i(shader.getUniformLocation("rgb_tex"), 0);
+ } else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
+ GLES20.glUniform1i(shader.getUniformLocation("oes_tex"), 0);
+ } else {
+ throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
+ }
+ GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+ // Initialize vertex shader attributes.
+ shader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
+ shader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
+ }
+
+ // Update GLES state if shader is not already current.
+ final GlShader shader = shaders.get(fragmentShader);
+ if (currentShader != shader) {
+ currentShader = shader;
+ shader.useProgram();
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ currentTexMatrix = null;
+ texMatrixLocation = shader.getUniformLocation("texMatrix");
+ }
+ }
+
+ /**
+ * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
+ */
+ public void release() {
+ for (GlShader shader : shaders.values()) {
+ shader.release();
+ }
+ shaders.clear();
+ copyBuffer = null;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/GlShader.java b/talk/app/webrtc/java/android/org/webrtc/GlShader.java
new file mode 100644
index 0000000000..966f0f5794
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/GlShader.java
@@ -0,0 +1,144 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import org.webrtc.Logging;
+
+import java.nio.FloatBuffer;
+
+// Helper class for handling OpenGL shaders and shader programs.
+public class GlShader {
+ private static final String TAG = "GlShader";
+
+ private static int compileShader(int shaderType, String source) {
+ int[] result = new int[] {
+ GLES20.GL_FALSE
+ };
+ int shader = GLES20.glCreateShader(shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
+ if (result[0] != GLES20.GL_TRUE) {
+ Logging.e(TAG, "Could not compile shader " + shaderType + ":" +
+ GLES20.glGetShaderInfoLog(shader));
+ throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
+ }
+ GlUtil.checkNoGLES2Error("compileShader");
+ return shader;
+ }
+
+ private int vertexShader;
+ private int fragmentShader;
+ private int program;
+
+ public GlShader(String vertexSource, String fragmentSource) {
+ vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ program = GLES20.glCreateProgram();
+ if (program == 0) {
+ throw new RuntimeException("Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[] {
+ GLES20.GL_FALSE
+ };
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(TAG, "Could not link program: " +
+ GLES20.glGetProgramInfoLog(program));
+ throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
+ }
+ GlUtil.checkNoGLES2Error("Creating GlShader");
+ }
+
+ public int getAttribLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetAttribLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate '" + label + "' in program");
+ }
+ return location;
+ }
+
+ /**
+ * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
+ * |buffer| with |dimension| number of components per vertex.
+ */
+ public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = getAttribLocation(label);
+ GLES20.glEnableVertexAttribArray(location);
+ GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, 0, buffer);
+ GlUtil.checkNoGLES2Error("setVertexAttribArray");
+ }
+
+ public int getUniformLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetUniformLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate uniform '" + label + "' in program");
+ }
+ return location;
+ }
+
+ public void useProgram() {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ GLES20.glUseProgram(program);
+ GlUtil.checkNoGLES2Error("glUseProgram");
+ }
+
+ public void release() {
+ Logging.d(TAG, "Deleting shader.");
+ // Flag shaders for deletion (does not delete until no longer attached to a program).
+ if (vertexShader != -1) {
+ GLES20.glDeleteShader(vertexShader);
+ vertexShader = -1;
+ }
+ if (fragmentShader != -1) {
+ GLES20.glDeleteShader(fragmentShader);
+ fragmentShader = -1;
+ }
+ // Delete program, automatically detaching any shaders from it.
+ if (program != -1) {
+ GLES20.glDeleteProgram(program);
+ program = -1;
+ }
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/GlTextureFrameBuffer.java b/talk/app/webrtc/java/android/org/webrtc/GlTextureFrameBuffer.java
new file mode 100644
index 0000000000..fd52c37eb4
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/GlTextureFrameBuffer.java
@@ -0,0 +1,142 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+/**
+ * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
+ * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
+ * conversion.
+ */
+// TODO(magjed): Add unittests for this class.
+public class GlTextureFrameBuffer {
+ private final int frameBufferId;
+ private final int textureId;
+ private final int pixelFormat;
+ private int width;
+ private int height;
+
+ /**
+ * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
+ * when calling this function. The framebuffer is not complete until setSize() is called.
+ */
+ public GlTextureFrameBuffer(int pixelFormat) {
+ switch (pixelFormat) {
+ case GLES20.GL_LUMINANCE:
+ case GLES20.GL_RGB:
+ case GLES20.GL_RGBA:
+ this.pixelFormat = pixelFormat;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
+ }
+
+ textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ this.width = 0;
+ this.height = 0;
+
+ // Create framebuffer object and bind it.
+ final int frameBuffers[] = new int[1];
+ GLES20.glGenFramebuffers(1, frameBuffers, 0);
+ frameBufferId = frameBuffers[0];
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+ GlUtil.checkNoGLES2Error("Generate framebuffer");
+
+ // Attach the texture to the framebuffer as color attachment.
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+ GLES20.GL_TEXTURE_2D, textureId, 0);
+ GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ }
+
+ /**
+ * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
+ * EGLContext must be bound on the current thread when calling this function. Must be called at
+ * least once before using the framebuffer. May be called multiple times to change size.
+ */
+ public void setSize(int width, int height) {
+ if (width == 0 || height == 0) {
+ throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
+ }
+ if (width == this.width && height == this.height) {
+ return;
+ }
+ this.width = width;
+ this.height = height;
+
+ // Bind our framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+ GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+ // Allocate texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
+ GLES20.GL_UNSIGNED_BYTE, null);
+
+ // Check that the framebuffer is in a good state.
+ final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
+ if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ throw new IllegalStateException("Framebuffer not complete, status: " + status);
+ }
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ public int getWidth() {
+ return width;
+ }
+
+ public int getHeight() {
+ return height;
+ }
+
+ public int getFrameBufferId() {
+ return frameBufferId;
+ }
+
+ public int getTextureId() {
+ return textureId;
+ }
+
+ /**
+ * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
+ * this function. This object should not be used after this call.
+ */
+ public void release() {
+ GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
+ GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
+ width = 0;
+ height = 0;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/GlUtil.java b/talk/app/webrtc/java/android/org/webrtc/GlUtil.java
new file mode 100644
index 0000000000..8b4357969d
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/GlUtil.java
@@ -0,0 +1,75 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Some OpenGL static utility functions.
+ */
+public class GlUtil {
+ private GlUtil() {}
+
+ // Assert that no OpenGL ES 2.0 error has been raised.
+ public static void checkNoGLES2Error(String msg) {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ throw new RuntimeException(msg + ": GLES20 error: " + error);
+ }
+ }
+
+ public static FloatBuffer createFloatBuffer(float[] coords) {
+ // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
+ ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
+ bb.order(ByteOrder.nativeOrder());
+ FloatBuffer fb = bb.asFloatBuffer();
+ fb.put(coords);
+ fb.position(0);
+ return fb;
+ }
+
+ /**
+ * Generate texture with standard parameters.
+ */
+ public static int generateTexture(int target) {
+ final int textureArray[] = new int[1];
+ GLES20.glGenTextures(1, textureArray, 0);
+ final int textureId = textureArray[0];
+ GLES20.glBindTexture(target, textureId);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ checkNoGLES2Error("generateTexture");
+ return textureId;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/NetworkMonitor.java b/talk/app/webrtc/java/android/org/webrtc/NetworkMonitor.java
new file mode 100644
index 0000000000..581a223c19
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/NetworkMonitor.java
@@ -0,0 +1,228 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
+import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
+
+import android.content.Context;
+import android.util.Log;
+
+import java.util.ArrayList;
+
+/**
+ * Borrowed from Chromium's src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
+ *
+ * Triggers updates to the underlying network state from OS networking events.
+ *
+ * WARNING: This class is not thread-safe.
+ */
+public class NetworkMonitor {
+ /**
+ * Alerted when the connection type of the network changes.
+ * The alert is fired on the UI thread.
+ */
+ public interface NetworkObserver {
+ public void onConnectionTypeChanged(ConnectionType connectionType);
+ }
+
+ private static final String TAG = "NetworkMonitor";
+ private static NetworkMonitor instance;
+
+ private final Context applicationContext;
+
+ // Native observers of the connection type changes.
+ private final ArrayList<Long> nativeNetworkObservers;
+ // Java observers of the connection type changes.
+ private final ArrayList<NetworkObserver> networkObservers;
+
+ // Object that detects the connection type changes.
+ private NetworkMonitorAutoDetect autoDetector;
+
+ private ConnectionType currentConnectionType = ConnectionType.CONNECTION_UNKNOWN;
+
+ private NetworkMonitor(Context context) {
+ assertIsTrue(context != null);
+ applicationContext =
+ context.getApplicationContext() == null ? context : context.getApplicationContext();
+
+ nativeNetworkObservers = new ArrayList<Long>();
+ networkObservers = new ArrayList<NetworkObserver>();
+ }
+
+ /**
+ * Initializes the singleton once.
+ * Called from the native code.
+ */
+ public static NetworkMonitor init(Context context) {
+ if (!isInitialized()) {
+ instance = new NetworkMonitor(context);
+ }
+ return instance;
+ }
+
+ public static boolean isInitialized() {
+ return instance != null;
+ }
+
+ /**
+ * Returns the singleton instance.
+ */
+ public static NetworkMonitor getInstance() {
+ return instance;
+ }
+
+ /**
+ * Enables auto detection of the current network state based on notifications from the system.
+ * Note that passing true here requires the embedding app have the platform ACCESS_NETWORK_STATE
+ * permission.
+ *
+ * @param shouldAutoDetect true if the NetworkMonitor should listen for system changes in
+ * network connectivity.
+ */
+ public static void setAutoDetectConnectivityState(boolean shouldAutoDetect) {
+ getInstance().setAutoDetectConnectivityStateInternal(shouldAutoDetect);
+ }
+
+ private static void assertIsTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected to be true");
+ }
+ }
+
+ // Called by the native code.
+ private void startMonitoring(long nativeObserver) {
+ Log.d(TAG, "Start monitoring from native observer " + nativeObserver);
+ nativeNetworkObservers.add(nativeObserver);
+ setAutoDetectConnectivityStateInternal(true);
+ }
+
+ // Called by the native code.
+ private void stopMonitoring(long nativeObserver) {
+ Log.d(TAG, "Stop monitoring from native observer " + nativeObserver);
+ setAutoDetectConnectivityStateInternal(false);
+ nativeNetworkObservers.remove(nativeObserver);
+ }
+
+ private ConnectionType getCurrentConnectionType() {
+ return currentConnectionType;
+ }
+
+ private int getCurrentDefaultNetId() {
+ return autoDetector == null ? INVALID_NET_ID : autoDetector.getDefaultNetId();
+ }
+
+ private void destroyAutoDetector() {
+ if (autoDetector != null) {
+ autoDetector.destroy();
+ autoDetector = null;
+ }
+ }
+
+ private void setAutoDetectConnectivityStateInternal(boolean shouldAutoDetect) {
+ if (!shouldAutoDetect) {
+ destroyAutoDetector();
+ return;
+ }
+ if (autoDetector == null) {
+ autoDetector = new NetworkMonitorAutoDetect(
+ new NetworkMonitorAutoDetect.Observer() {
+ @Override
+ public void onConnectionTypeChanged(ConnectionType newConnectionType) {
+ updateCurrentConnectionType(newConnectionType);
+ }
+ },
+ applicationContext);
+ final NetworkMonitorAutoDetect.NetworkState networkState =
+ autoDetector.getCurrentNetworkState();
+ updateCurrentConnectionType(autoDetector.getCurrentConnectionType(networkState));
+ }
+ }
+
+ private void updateCurrentConnectionType(ConnectionType newConnectionType) {
+ currentConnectionType = newConnectionType;
+ notifyObserversOfConnectionTypeChange(newConnectionType);
+ }
+
+ /**
+ * Alerts all observers of a connection change.
+ */
+ private void notifyObserversOfConnectionTypeChange(ConnectionType newConnectionType) {
+ for (long nativeObserver : nativeNetworkObservers) {
+ nativeNotifyConnectionTypeChanged(nativeObserver);
+ }
+ for (NetworkObserver observer : networkObservers) {
+ observer.onConnectionTypeChanged(newConnectionType);
+ }
+ }
+
+ /**
+ * Adds an observer for any connection type changes.
+ */
+ public static void addNetworkObserver(NetworkObserver observer) {
+ getInstance().addNetworkObserverInternal(observer);
+ }
+
+ private void addNetworkObserverInternal(NetworkObserver observer) {
+ networkObservers.add(observer);
+ }
+
+ /**
+ * Removes an observer for any connection type changes.
+ */
+ public static void removeNetworkObserver(NetworkObserver observer) {
+ getInstance().removeNetworkObserverInternal(observer);
+ }
+
+ private void removeNetworkObserverInternal(NetworkObserver observer) {
+ networkObservers.remove(observer);
+ }
+
+ /**
+ * Checks if there currently is connectivity.
+ */
+ public static boolean isOnline() {
+ ConnectionType connectionType = getInstance().getCurrentConnectionType();
+ return connectionType != ConnectionType.CONNECTION_UNKNOWN
+ && connectionType != ConnectionType.CONNECTION_NONE;
+ }
+
+ private native long nativeCreateNetworkMonitor();
+
+ private native void nativeNotifyConnectionTypeChanged(long nativePtr);
+
+ // For testing only.
+ static void resetInstanceForTests(Context context) {
+ instance = new NetworkMonitor(context);
+ }
+
+ // For testing only.
+ public static NetworkMonitorAutoDetect getAutoDetectorForTest() {
+ return getInstance().autoDetector;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java b/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java
new file mode 100644
index 0000000000..e3a7850db4
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java
@@ -0,0 +1,424 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import static android.net.NetworkCapabilities.NET_CAPABILITY_INTERNET;
+
+import android.Manifest.permission;
+import android.annotation.SuppressLint;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.pm.PackageManager;
+import android.net.ConnectivityManager;
+import android.net.Network;
+import android.net.NetworkCapabilities;
+import android.net.NetworkInfo;
+import android.net.wifi.WifiInfo;
+import android.net.wifi.WifiManager;
+import android.os.Build;
+import android.telephony.TelephonyManager;
+import android.util.Log;
+
+/**
+ * Borrowed from Chromium's
+ * src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
+ *
+ * Used by the NetworkMonitor to listen to platform changes in connectivity.
+ * Note that use of this class requires that the app have the platform
+ * ACCESS_NETWORK_STATE permission.
+ */
+public class NetworkMonitorAutoDetect extends BroadcastReceiver {
+ static enum ConnectionType {
+ CONNECTION_UNKNOWN,
+ CONNECTION_ETHERNET,
+ CONNECTION_WIFI,
+ CONNECTION_4G,
+ CONNECTION_3G,
+ CONNECTION_2G,
+ CONNECTION_BLUETOOTH,
+ CONNECTION_NONE
+ }
+
+ static class NetworkState {
+ private final boolean connected;
+ // Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
+ // further divided into 2G, 3G, or 4G from the subtype.
+ private final int type;
+ // Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
+ // Will be useful to find the maximum bandwidth.
+ private final int subtype;
+
+ public NetworkState(boolean connected, int type, int subtype) {
+ this.connected = connected;
+ this.type = type;
+ this.subtype = subtype;
+ }
+
+ public boolean isConnected() {
+ return connected;
+ }
+
+ public int getNetworkType() {
+ return type;
+ }
+
+ public int getNetworkSubType() {
+ return subtype;
+ }
+ }
+
+ /** Queries the ConnectivityManager for information about the current connection. */
+ static class ConnectivityManagerDelegate {
+ private final ConnectivityManager connectivityManager;
+
+ ConnectivityManagerDelegate(Context context) {
+ connectivityManager =
+ (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
+ }
+
+ // For testing.
+ ConnectivityManagerDelegate() {
+ // All the methods below should be overridden.
+ connectivityManager = null;
+ }
+
+ /**
+ * Returns connection type and status information about the current
+ * default network.
+ */
+ NetworkState getNetworkState() {
+ return getNetworkState(connectivityManager.getActiveNetworkInfo());
+ }
+
+ /**
+ * Returns connection type and status information about |network|.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ NetworkState getNetworkState(Network network) {
+ return getNetworkState(connectivityManager.getNetworkInfo(network));
+ }
+
+ /**
+ * Returns connection type and status information gleaned from networkInfo.
+ */
+ NetworkState getNetworkState(NetworkInfo networkInfo) {
+ if (networkInfo == null || !networkInfo.isConnected()) {
+ return new NetworkState(false, -1, -1);
+ }
+ return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype());
+ }
+
+ /**
+ * Returns all connected networks.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ Network[] getAllNetworks() {
+ return connectivityManager.getAllNetworks();
+ }
+
+ /**
+ * Returns the NetID of the current default network. Returns
+ * INVALID_NET_ID if no current default network connected.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ int getDefaultNetId() {
+ // Android Lollipop had no API to get the default network; only an
+ // API to return the NetworkInfo for the default network. To
+ // determine the default network one can find the network with
+ // type matching that of the default network.
+ final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
+ if (defaultNetworkInfo == null) {
+ return INVALID_NET_ID;
+ }
+ final Network[] networks = getAllNetworks();
+ int defaultNetId = INVALID_NET_ID;
+ for (Network network : networks) {
+ if (!hasInternetCapability(network)) {
+ continue;
+ }
+ final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
+ if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
+ // There should not be multiple connected networks of the
+ // same type. At least as of Android Marshmallow this is
+ // not supported. If this becomes supported this assertion
+ // may trigger. At that point we could consider using
+ // ConnectivityManager.getDefaultNetwork() though this
+ // may give confusing results with VPNs and is only
+ // available with Android Marshmallow.
+ assert defaultNetId == INVALID_NET_ID;
+ defaultNetId = networkToNetId(network);
+ }
+ }
+ return defaultNetId;
+ }
+
+ /**
+ * Returns true if {@code network} can provide Internet access. Can be used to
+ * ignore specialized networks (e.g. IMS, FOTA).
+ */
+ @SuppressLint("NewApi")
+ boolean hasInternetCapability(Network network) {
+ final NetworkCapabilities capabilities =
+ connectivityManager.getNetworkCapabilities(network);
+ return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
+ }
+ }
+
+ /** Queries the WifiManager for SSID of the current Wifi connection. */
+ static class WifiManagerDelegate {
+ private final Context context;
+ private final WifiManager wifiManager;
+ private final boolean hasWifiPermission;
+
+ WifiManagerDelegate(Context context) {
+ this.context = context;
+
+ hasWifiPermission = context.getPackageManager().checkPermission(
+ permission.ACCESS_WIFI_STATE, context.getPackageName())
+ == PackageManager.PERMISSION_GRANTED;
+ wifiManager = hasWifiPermission
+ ? (WifiManager) context.getSystemService(Context.WIFI_SERVICE) : null;
+ }
+
+ // For testing.
+ WifiManagerDelegate() {
+ // All the methods below should be overridden.
+ context = null;
+ wifiManager = null;
+ hasWifiPermission = false;
+ }
+
+ String getWifiSSID() {
+ final Intent intent = context.registerReceiver(null,
+ new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
+ if (intent != null) {
+ final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
+ if (wifiInfo != null) {
+ final String ssid = wifiInfo.getSSID();
+ if (ssid != null) {
+ return ssid;
+ }
+ }
+ }
+ return "";
+ }
+
+ boolean getHasWifiPermission() {
+ return hasWifiPermission;
+ }
+ }
+
+ static final int INVALID_NET_ID = -1;
+ private static final String TAG = "NetworkMonitorAutoDetect";
+ private static final int UNKNOWN_LINK_SPEED = -1;
+ private final IntentFilter intentFilter;
+
+ // Observer for the connection type change.
+ private final Observer observer;
+
+ private final Context context;
+ // connectivityManagerDelegates and wifiManagerDelegate are only non-final for testing.
+ private ConnectivityManagerDelegate connectivityManagerDelegate;
+ private WifiManagerDelegate wifiManagerDelegate;
+ private boolean isRegistered;
+ private ConnectionType connectionType;
+ private String wifiSSID;
+
+ /**
+ * Observer interface by which observer is notified of network changes.
+ */
+ public static interface Observer {
+ /**
+ * Called when default network changes.
+ */
+ public void onConnectionTypeChanged(ConnectionType newConnectionType);
+ }
+
+ /**
+ * Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
+ */
+ public NetworkMonitorAutoDetect(Observer observer, Context context) {
+ this.observer = observer;
+ this.context = context;
+ connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
+ wifiManagerDelegate = new WifiManagerDelegate(context);
+
+ final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
+ connectionType = getCurrentConnectionType(networkState);
+ wifiSSID = getCurrentWifiSSID(networkState);
+ intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
+ registerReceiver();
+ }
+
+ /**
+ * Allows overriding the ConnectivityManagerDelegate for tests.
+ */
+ void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
+ connectivityManagerDelegate = delegate;
+ }
+
+ /**
+ * Allows overriding the WifiManagerDelegate for tests.
+ */
+ void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
+ wifiManagerDelegate = delegate;
+ }
+
+ /**
+ * Returns whether the object has registered to receive network connectivity intents.
+ * Visible for testing.
+ */
+ boolean isReceiverRegisteredForTesting() {
+ return isRegistered;
+ }
+
+ public void destroy() {
+ unregisterReceiver();
+ }
+
+ /**
+ * Registers a BroadcastReceiver in the given context.
+ */
+ private void registerReceiver() {
+ if (!isRegistered) {
+ isRegistered = true;
+ context.registerReceiver(this, intentFilter);
+ }
+ }
+
+ /**
+ * Unregisters the BroadcastReceiver in the given context.
+ */
+ private void unregisterReceiver() {
+ if (isRegistered) {
+ isRegistered = false;
+ context.unregisterReceiver(this);
+ }
+ }
+
+ public NetworkState getCurrentNetworkState() {
+ return connectivityManagerDelegate.getNetworkState();
+ }
+
+ /**
+ * Returns NetID of device's current default connected network used for
+ * communication.
+ * Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
+ * when not implemented.
+ */
+ public int getDefaultNetId() {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
+ return INVALID_NET_ID;
+ }
+ return connectivityManagerDelegate.getDefaultNetId();
+ }
+
+ public ConnectionType getCurrentConnectionType(NetworkState networkState) {
+ if (!networkState.isConnected()) {
+ return ConnectionType.CONNECTION_NONE;
+ }
+
+ switch (networkState.getNetworkType()) {
+ case ConnectivityManager.TYPE_ETHERNET:
+ return ConnectionType.CONNECTION_ETHERNET;
+ case ConnectivityManager.TYPE_WIFI:
+ return ConnectionType.CONNECTION_WIFI;
+ case ConnectivityManager.TYPE_WIMAX:
+ return ConnectionType.CONNECTION_4G;
+ case ConnectivityManager.TYPE_BLUETOOTH:
+ return ConnectionType.CONNECTION_BLUETOOTH;
+ case ConnectivityManager.TYPE_MOBILE:
+ // Use information from TelephonyManager to classify the connection.
+ switch (networkState.getNetworkSubType()) {
+ case TelephonyManager.NETWORK_TYPE_GPRS:
+ case TelephonyManager.NETWORK_TYPE_EDGE:
+ case TelephonyManager.NETWORK_TYPE_CDMA:
+ case TelephonyManager.NETWORK_TYPE_1xRTT:
+ case TelephonyManager.NETWORK_TYPE_IDEN:
+ return ConnectionType.CONNECTION_2G;
+ case TelephonyManager.NETWORK_TYPE_UMTS:
+ case TelephonyManager.NETWORK_TYPE_EVDO_0:
+ case TelephonyManager.NETWORK_TYPE_EVDO_A:
+ case TelephonyManager.NETWORK_TYPE_HSDPA:
+ case TelephonyManager.NETWORK_TYPE_HSUPA:
+ case TelephonyManager.NETWORK_TYPE_HSPA:
+ case TelephonyManager.NETWORK_TYPE_EVDO_B:
+ case TelephonyManager.NETWORK_TYPE_EHRPD:
+ case TelephonyManager.NETWORK_TYPE_HSPAP:
+ return ConnectionType.CONNECTION_3G;
+ case TelephonyManager.NETWORK_TYPE_LTE:
+ return ConnectionType.CONNECTION_4G;
+ default:
+ return ConnectionType.CONNECTION_UNKNOWN;
+ }
+ default:
+ return ConnectionType.CONNECTION_UNKNOWN;
+ }
+ }
+
+ private String getCurrentWifiSSID(NetworkState networkState) {
+ if (getCurrentConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return "";
+ return wifiManagerDelegate.getWifiSSID();
+ }
+
+ // BroadcastReceiver
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ final NetworkState networkState = getCurrentNetworkState();
+ if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
+ connectionTypeChanged(networkState);
+ }
+ }
+
+ private void connectionTypeChanged(NetworkState networkState) {
+ ConnectionType newConnectionType = getCurrentConnectionType(networkState);
+ String newWifiSSID = getCurrentWifiSSID(networkState);
+ if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return;
+
+ connectionType = newConnectionType;
+ wifiSSID = newWifiSSID;
+ Log.d(TAG, "Network connectivity changed, type is: " + connectionType);
+ observer.onConnectionTypeChanged(newConnectionType);
+ }
+
+ /**
+ * Extracts NetID of network. Only available on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ private static int networkToNetId(Network network) {
+ // NOTE(pauljensen): This depends on Android framework implementation details.
+ // Fortunately this functionality is unlikely to ever change.
+ // TODO(honghaiz): When we update to Android M SDK, use Network.getNetworkHandle().
+ return Integer.parseInt(network.toString());
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/OWNERS b/talk/app/webrtc/java/android/org/webrtc/OWNERS
new file mode 100644
index 0000000000..4d31ffb663
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
new file mode 100644
index 0000000000..94d180da5a
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
@@ -0,0 +1,190 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.opengl.Matrix;
+
+/**
+ * Static helper functions for renderer implementations.
+ */
+public class RendererCommon {
+ /** Interface for reporting rendering events. */
+ public static interface RendererEvents {
+ /**
+ * Callback fired once first frame is rendered.
+ */
+ public void onFirstFrameRendered();
+
+ /**
+ * Callback fired when rendered frame resolution or rotation has changed.
+ */
+ public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
+ }
+
+ // Types of video scaling:
+ // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
+ // maintaining the aspect ratio (black borders may be displayed).
+ // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
+ // maintaining the aspect ratio. Some portion of the video frame may be
+ // clipped.
+ // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
+ // possible of the view while maintaining aspect ratio, under the constraint that at least
+ // |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
+ public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
+ // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
+ // This limits excessive cropping when adjusting display size.
+ private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+ public static final float[] identityMatrix() {
+ return new float[] {
+ 1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 0, 0, 1};
+ }
+ // Matrix with transform y' = 1 - y.
+ public static final float[] verticalFlipMatrix() {
+ return new float[] {
+ 1, 0, 0, 0,
+ 0, -1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 1, 0, 1};
+ }
+
+ // Matrix with transform x' = 1 - x.
+ public static final float[] horizontalFlipMatrix() {
+ return new float[] {
+ -1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 1, 0, 0, 1};
+ }
+
+ /**
+ * Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
+ * clockwise when rendered.
+ */
+ public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
+ final float[] rotationMatrix = new float[16];
+ Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
+ adjustOrigin(rotationMatrix);
+ return multiplyMatrices(textureMatrix, rotationMatrix);
+ }
+
+ /**
+ * Returns new matrix with the result of a * b.
+ */
+ public static float[] multiplyMatrices(float[] a, float[] b) {
+ final float[] resultMatrix = new float[16];
+ Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
+ return resultMatrix;
+ }
+
+ /**
+ * Returns layout transformation matrix that applies an optional mirror effect and compensates
+ * for video vs display aspect ratio.
+ */
+ public static float[] getLayoutMatrix(
+ boolean mirror, float videoAspectRatio, float displayAspectRatio) {
+ float scaleX = 1;
+ float scaleY = 1;
+ // Scale X or Y dimension so that video and display size have same aspect ratio.
+ if (displayAspectRatio > videoAspectRatio) {
+ scaleY = videoAspectRatio / displayAspectRatio;
+ } else {
+ scaleX = displayAspectRatio / videoAspectRatio;
+ }
+ // Apply optional horizontal flip.
+ if (mirror) {
+ scaleX *= -1;
+ }
+ final float matrix[] = new float[16];
+ Matrix.setIdentityM(matrix, 0);
+ Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
+ adjustOrigin(matrix);
+ return matrix;
+ }
+
+ /**
+ * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
+ */
+ public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
+ int maxDisplayWidth, int maxDisplayHeight) {
+ return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
+ maxDisplayWidth, maxDisplayHeight);
+ }
+
+ /**
+ * Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
+ * that are in the range 0 to 1.
+ */
+ private static void adjustOrigin(float[] matrix) {
+ // Note that OpenGL is using column-major order.
+ // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
+ matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
+ matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
+ // Post translate with 0.5 to move coordinates to range [0, 1].
+ matrix[12] += 0.5f;
+ matrix[13] += 0.5f;
+ }
+
+ /**
+ * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
+ * that must remain visible.
+ */
+ private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
+ switch (scalingType) {
+ case SCALE_ASPECT_FIT:
+ return 1.0f;
+ case SCALE_ASPECT_FILL:
+ return 0.0f;
+ case SCALE_ASPECT_BALANCED:
+ return BALANCED_VISIBLE_FRACTION;
+ default:
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
+ * Calculate display size based on minimum fraction of the video that must remain visible,
+ * video aspect ratio, and maximum display size.
+ */
+ private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
+ int maxDisplayWidth, int maxDisplayHeight) {
+ // If there is no constraint on the amount of cropping, fill the allowed display area.
+ if (minVisibleFraction == 0 || videoAspectRatio == 0) {
+ return new Point(maxDisplayWidth, maxDisplayHeight);
+ }
+ // Each dimension is constrained on max display size and how much we are allowed to crop.
+ final int width = Math.min(maxDisplayWidth,
+ (int) (maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+ final int height = Math.min(maxDisplayHeight,
+ (int) (maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+ return new Point(width, height);
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
new file mode 100644
index 0000000000..b9c158f848
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
@@ -0,0 +1,229 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/**
+ * Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
+ * of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
+ * the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
+ * called in order to receive a new frame. Call disconnect() to stop receiveing new frames and
+ * release all resources.
+ * Note that there is a C++ counter part of this class that optionally can be used. It is used for
+ * wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
+ * when the webrtc::VideoFrame is no longer used.
+ */
+final class SurfaceTextureHelper {
+ private static final String TAG = "SurfaceTextureHelper";
+ /**
+ * Callback interface for being notified that a new texture frame is available. The calls will be
+ * made on a dedicated thread with a bound EGLContext. The thread will be the same throughout the
+ * lifetime of the SurfaceTextureHelper instance, but different from the thread calling the
+ * SurfaceTextureHelper constructor. The callee is not allowed to make another EGLContext current
+ * on the calling thread.
+ */
+ public interface OnTextureFrameAvailableListener {
+ abstract void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs);
+ }
+
+ public static SurfaceTextureHelper create(EGLContext sharedContext) {
+ return create(sharedContext, null);
+ }
+
+ /**
+ * Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. If
+ * |handler| is non-null, the callback will be executed on that handler's thread. If |handler| is
+ * null, a dedicated private thread is created for the callbacks.
+ */
+ public static SurfaceTextureHelper create(final EGLContext sharedContext, final Handler handler) {
+ final Handler finalHandler;
+ if (handler != null) {
+ finalHandler = handler;
+ } else {
+ final HandlerThread thread = new HandlerThread(TAG);
+ thread.start();
+ finalHandler = new Handler(thread.getLooper());
+ }
+ // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
+ // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
+ // is constructed on the |handler| thread.
+ return ThreadUtils.invokeUninterruptibly(finalHandler, new Callable<SurfaceTextureHelper>() {
+ @Override public SurfaceTextureHelper call() {
+ return new SurfaceTextureHelper(sharedContext, finalHandler, (handler == null));
+ }
+ });
+ }
+
+ private final Handler handler;
+ private final boolean isOwningThread;
+ private final EglBase eglBase;
+ private final SurfaceTexture surfaceTexture;
+ private final int oesTextureId;
+ private OnTextureFrameAvailableListener listener;
+ // The possible states of this class.
+ private boolean hasPendingTexture = false;
+ private boolean isTextureInUse = false;
+ private boolean isQuitting = false;
+
+ private SurfaceTextureHelper(EGLContext sharedContext, Handler handler, boolean isOwningThread) {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
+ }
+ this.handler = handler;
+ this.isOwningThread = isOwningThread;
+
+ eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ surfaceTexture = new SurfaceTexture(oesTextureId);
+ }
+
+ /**
+ * Start to stream textures to the given |listener|.
+ * A Listener can only be set once.
+ */
+ public void setListener(OnTextureFrameAvailableListener listener) {
+ if (this.listener != null) {
+ throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
+ }
+ this.listener = listener;
+ surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ hasPendingTexture = true;
+ tryDeliverTextureFrame();
+ }
+ });
+ }
+
+ /**
+ * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
+ * producer such as a camera or decoder.
+ */
+ public SurfaceTexture getSurfaceTexture() {
+ return surfaceTexture;
+ }
+
+ /**
+ * Call this function to signal that you are done with the frame received in
+ * onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
+ * this function in order to receive a new frame.
+ */
+ public void returnTextureFrame() {
+ handler.post(new Runnable() {
+ @Override public void run() {
+ isTextureInUse = false;
+ if (isQuitting) {
+ release();
+ } else {
+ tryDeliverTextureFrame();
+ }
+ }
+ });
+ }
+
+ /**
+ * Call disconnect() to stop receiving frames. Resources are released when the texture frame has
+ * been returned by a call to returnTextureFrame(). You are guaranteed to not receive any more
+ * onTextureFrameAvailable() after this function returns.
+ */
+ public void disconnect() {
+ if (handler.getLooper().getThread() == Thread.currentThread()) {
+ isQuitting = true;
+ if (!isTextureInUse) {
+ release();
+ }
+ return;
+ }
+ final CountDownLatch barrier = new CountDownLatch(1);
+ handler.postAtFrontOfQueue(new Runnable() {
+ @Override public void run() {
+ isQuitting = true;
+ barrier.countDown();
+ if (!isTextureInUse) {
+ release();
+ }
+ }
+ });
+ ThreadUtils.awaitUninterruptibly(barrier);
+ }
+
+ private void tryDeliverTextureFrame() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isQuitting || !hasPendingTexture || isTextureInUse) {
+ return;
+ }
+ isTextureInUse = true;
+ hasPendingTexture = false;
+
+ eglBase.makeCurrent();
+ surfaceTexture.updateTexImage();
+
+ final float[] transformMatrix = new float[16];
+ surfaceTexture.getTransformMatrix(transformMatrix);
+ final long timestampNs = (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
+ ? surfaceTexture.getTimestamp()
+ : TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+ listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
+ }
+
+ private void release() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isTextureInUse || !isQuitting) {
+ throw new IllegalStateException("Unexpected release.");
+ }
+ eglBase.makeCurrent();
+ GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+ surfaceTexture.release();
+ eglBase.release();
+ if (isOwningThread) {
+ handler.getLooper().quit();
+ }
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
new file mode 100644
index 0000000000..d7c9e2af0a
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
@@ -0,0 +1,541 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.Point;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.AttributeSet;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+import org.webrtc.Logging;
+
+import java.util.concurrent.CountDownLatch;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/**
+ * Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
+ * renderFrame() is asynchronous to avoid blocking the calling thread.
+ * This class is thread safe and handles access from potentially four different threads:
+ * Interaction from the main app in init, release, setMirror, and setScalingtype.
+ * Interaction from C++ webrtc::VideoRendererInterface in renderFrame and canApplyRotation.
+ * Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
+ * Interaction with the layout framework in onMeasure and onSizeChanged.
+ */
+public class SurfaceViewRenderer extends SurfaceView
+ implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
+ private static final String TAG = "SurfaceViewRenderer";
+
+ // Dedicated render thread.
+ private HandlerThread renderThread;
+ // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
+ // on |handlerLock|.
+ private final Object handlerLock = new Object();
+ private Handler renderThreadHandler;
+
+ // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
+ // from the render thread.
+ private EglBase eglBase;
+ private GlRectDrawer drawer;
+ // Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
+ private int[] yuvTextures = null;
+
+ // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
+ private final Object frameLock = new Object();
+ private VideoRenderer.I420Frame pendingFrame;
+
+ // These variables are synchronized on |layoutLock|.
+ private final Object layoutLock = new Object();
+ // These three different dimension values are used to keep track of the state in these functions:
+ // requestLayout() -> onMeasure() -> onLayout() -> surfaceChanged().
+ // requestLayout() is triggered internally by frame size changes, but can also be triggered
+ // externally by layout update requests.
+ // Most recent measurement specification from onMeasure().
+ private int widthSpec;
+ private int heightSpec;
+ // Current size on screen in pixels. Updated in onLayout(), and should be consistent with
+ // |widthSpec|/|heightSpec| after that.
+ private int layoutWidth;
+ private int layoutHeight;
+ // Current surface size of the underlying Surface. Updated in surfaceChanged(), and should be
+ // consistent with |layoutWidth|/|layoutHeight| after that.
+ // TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple
+ // layout and surface size.
+ private int surfaceWidth;
+ private int surfaceHeight;
+ // |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed().
+ private boolean isSurfaceCreated;
+ // Last rendered frame dimensions, or 0 if no frame has been rendered yet.
+ private int frameWidth;
+ private int frameHeight;
+ private int frameRotation;
+ // |scalingType| determines how the video will fill the allowed layout area in onMeasure().
+ private RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_BALANCED;
+ // If true, mirrors the video stream horizontally.
+ private boolean mirror;
+ // Callback for reporting renderer events.
+ private RendererCommon.RendererEvents rendererEvents;
+
+ // These variables are synchronized on |statisticsLock|.
+ private final Object statisticsLock = new Object();
+ // Total number of video frames received in renderFrame() call.
+ private int framesReceived;
+ // Number of video frames dropped by renderFrame() because previous frame has not been rendered
+ // yet.
+ private int framesDropped;
+ // Number of rendered video frames.
+ private int framesRendered;
+ // Time in ns when the first video frame was rendered.
+ private long firstFrameTimeNs;
+ // Time in ns spent in renderFrameOnRenderThread() function.
+ private long renderTimeNs;
+
+ // Runnable for posting frames to render thread..
+ private final Runnable renderFrameRunnable = new Runnable() {
+ @Override public void run() {
+ renderFrameOnRenderThread();
+ }
+ };
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context) {
+ super(context);
+ getHolder().addCallback(this);
+ }
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ getHolder().addCallback(this);
+ }
+
+ /**
+ * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
+ * reinitialize the renderer after a previous init()/release() cycle.
+ */
+ public void init(
+ EGLContext sharedContext, RendererCommon.RendererEvents rendererEvents) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ throw new IllegalStateException("Already initialized");
+ }
+ Logging.d(TAG, "Initializing");
+ this.rendererEvents = rendererEvents;
+ renderThread = new HandlerThread(TAG);
+ renderThread.start();
+ drawer = new GlRectDrawer();
+ eglBase = new EglBase(sharedContext, EglBase.ConfigType.PLAIN);
+ renderThreadHandler = new Handler(renderThread.getLooper());
+ }
+ tryCreateEglSurface();
+ }
+
+ /**
+ * Create and make an EGLSurface current if both init() and surfaceCreated() have been called.
+ */
+ public void tryCreateEglSurface() {
+ // |renderThreadHandler| is only created after |eglBase| is created in init(), so the
+ // following code will only execute if eglBase != null.
+ runOnRenderThread(new Runnable() {
+ @Override public void run() {
+ synchronized (layoutLock) {
+ if (isSurfaceCreated) {
+ eglBase.createSurface(getHolder());
+ eglBase.makeCurrent();
+ // Necessary for YUV frames with odd width.
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ }
+ }
+ }
+ });
+ }
+
+ /**
+ * Block until any pending frame is returned and all GL resources released, even if an interrupt
+ * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+ * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+ * don't call this function, the GL resources might leak.
+ */
+ public void release() {
+ final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ Logging.d(TAG, "Already released");
+ return;
+ }
+ // Release EGL and GL resources on render thread.
+ // TODO(magjed): This might not be necessary - all OpenGL resources are automatically deleted
+ // when the EGL context is lost. It might be dangerous to delete them manually in
+ // Activity.onDestroy().
+ renderThreadHandler.postAtFrontOfQueue(new Runnable() {
+ @Override public void run() {
+ drawer.release();
+ drawer = null;
+ if (yuvTextures != null) {
+ GLES20.glDeleteTextures(3, yuvTextures, 0);
+ yuvTextures = null;
+ }
+ if (eglBase.hasSurface()) {
+ // Clear last rendered image to black.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ }
+ eglBase.release();
+ eglBase = null;
+ eglCleanupBarrier.countDown();
+ }
+ });
+ // Don't accept any more frames or messages to the render thread.
+ renderThreadHandler = null;
+ }
+ // Make sure the EGL/GL cleanup posted above is executed.
+ ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
+ renderThread.quit();
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ VideoRenderer.renderFrameDone(pendingFrame);
+ pendingFrame = null;
+ }
+ }
+ // The |renderThread| cleanup is not safe to cancel and we need to wait until it's done.
+ ThreadUtils.joinUninterruptibly(renderThread);
+ renderThread = null;
+ // Reset statistics and event reporting.
+ synchronized (layoutLock) {
+ frameWidth = 0;
+ frameHeight = 0;
+ frameRotation = 0;
+ rendererEvents = null;
+ }
+ synchronized (statisticsLock) {
+ framesReceived = 0;
+ framesDropped = 0;
+ framesRendered = 0;
+ firstFrameTimeNs = 0;
+ renderTimeNs = 0;
+ }
+ }
+
+ /**
+ * Set if the video stream should be mirrored or not.
+ */
+ public void setMirror(final boolean mirror) {
+ synchronized (layoutLock) {
+ this.mirror = mirror;
+ }
+ }
+
+ /**
+ * Set how the video will fill the allowed layout area.
+ */
+ public void setScalingType(RendererCommon.ScalingType scalingType) {
+ synchronized (layoutLock) {
+ this.scalingType = scalingType;
+ }
+ }
+
+ // VideoRenderer.Callbacks interface.
+ @Override
+ public void renderFrame(VideoRenderer.I420Frame frame) {
+ synchronized (statisticsLock) {
+ ++framesReceived;
+ }
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ Logging.d(TAG, "Dropping frame - SurfaceViewRenderer not initialized or already released.");
+ } else {
+ synchronized (frameLock) {
+ if (pendingFrame == null) {
+ updateFrameDimensionsAndReportEvents(frame);
+ pendingFrame = frame;
+ renderThreadHandler.post(renderFrameRunnable);
+ return;
+ }
+ }
+ }
+ }
+ // Drop frame.
+ synchronized (statisticsLock) {
+ ++framesDropped;
+ }
+ VideoRenderer.renderFrameDone(frame);
+ }
+
+ // Returns desired layout size given current measure specification and video aspect ratio.
+ private Point getDesiredLayoutSize() {
+ synchronized (layoutLock) {
+ final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
+ final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
+ final Point size =
+ RendererCommon.getDisplaySize(scalingType, frameAspectRatio(), maxWidth, maxHeight);
+ if (MeasureSpec.getMode(widthSpec) == MeasureSpec.EXACTLY) {
+ size.x = maxWidth;
+ }
+ if (MeasureSpec.getMode(heightSpec) == MeasureSpec.EXACTLY) {
+ size.y = maxHeight;
+ }
+ return size;
+ }
+ }
+
+ // View layout interface.
+ @Override
+ protected void onMeasure(int widthSpec, int heightSpec) {
+ synchronized (layoutLock) {
+ this.widthSpec = widthSpec;
+ this.heightSpec = heightSpec;
+ final Point size = getDesiredLayoutSize();
+ setMeasuredDimension(size.x, size.y);
+ }
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+ synchronized (layoutLock) {
+ layoutWidth = right - left;
+ layoutHeight = bottom - top;
+ }
+ // Might have a pending frame waiting for a layout of correct size.
+ runOnRenderThread(renderFrameRunnable);
+ }
+
+ // SurfaceHolder.Callback interface.
+ @Override
+ public void surfaceCreated(final SurfaceHolder holder) {
+ Logging.d(TAG, "Surface created");
+ synchronized (layoutLock) {
+ isSurfaceCreated = true;
+ }
+ tryCreateEglSurface();
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ Logging.d(TAG, "Surface destroyed");
+ synchronized (layoutLock) {
+ isSurfaceCreated = false;
+ surfaceWidth = 0;
+ surfaceHeight = 0;
+ }
+ runOnRenderThread(new Runnable() {
+ @Override public void run() {
+ eglBase.releaseSurface();
+ }
+ });
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ Logging.d(TAG, "Surface changed: " + width + "x" + height);
+ synchronized (layoutLock) {
+ surfaceWidth = width;
+ surfaceHeight = height;
+ }
+ // Might have a pending frame waiting for a surface of correct size.
+ runOnRenderThread(renderFrameRunnable);
+ }
+
+ /**
+ * Private helper function to post tasks safely.
+ */
+ private void runOnRenderThread(Runnable runnable) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.post(runnable);
+ }
+ }
+ }
+
+ /**
+ * Requests new layout if necessary. Returns true if layout and surface size are consistent.
+ */
+ private boolean checkConsistentLayout() {
+ synchronized (layoutLock) {
+ final Point desiredLayoutSize = getDesiredLayoutSize();
+ if (desiredLayoutSize.x != layoutWidth || desiredLayoutSize.y != layoutHeight) {
+ Logging.d(TAG, "Requesting new layout with size: "
+ + desiredLayoutSize.x + "x" + desiredLayoutSize.y);
+ // Request layout update on UI thread.
+ post(new Runnable() {
+ @Override public void run() {
+ requestLayout();
+ }
+ });
+ return false;
+ }
+ // Wait for requestLayout() to propagate through this sequence before returning true:
+ // requestLayout() -> onMeasure() -> onLayout() -> surfaceChanged().
+ return surfaceWidth == layoutWidth && surfaceHeight == layoutHeight;
+ }
+ }
+
+ /**
+ * Renders and releases |pendingFrame|.
+ */
+ private void renderFrameOnRenderThread() {
+ if (eglBase == null || !eglBase.hasSurface()) {
+ Logging.d(TAG, "No surface to draw on");
+ return;
+ }
+ if (!checkConsistentLayout()) {
+ // Output intermediate black frames while the layout is updated.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ return;
+ }
+ // After a surface size change, the EGLSurface might still have a buffer of the old size in the
+ // pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
+ // changed. Such a buffer will be rendered incorrectly, so flush it with a black frame.
+ synchronized (layoutLock) {
+ if (eglBase.surfaceWidth() != surfaceWidth || eglBase.surfaceHeight() != surfaceHeight) {
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ }
+ }
+ // Fetch and render |pendingFrame|.
+ final VideoRenderer.I420Frame frame;
+ synchronized (frameLock) {
+ if (pendingFrame == null) {
+ return;
+ }
+ frame = pendingFrame;
+ pendingFrame = null;
+ }
+
+ final long startTimeNs = System.nanoTime();
+ final float[] samplingMatrix;
+ if (frame.yuvFrame) {
+ // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+ // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
+ // bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling
+ // matrix.
+ samplingMatrix = RendererCommon.verticalFlipMatrix();
+ } else {
+ // TODO(magjed): Move updateTexImage() to the video source instead.
+ SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
+ surfaceTexture.updateTexImage();
+ samplingMatrix = new float[16];
+ surfaceTexture.getTransformMatrix(samplingMatrix);
+ }
+
+ final float[] texMatrix;
+ synchronized (layoutLock) {
+ final float[] rotatedSamplingMatrix =
+ RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree);
+ final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
+ mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
+ texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+ }
+
+ GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight);
+ if (frame.yuvFrame) {
+ // Make sure YUV textures are allocated.
+ if (yuvTextures == null) {
+ yuvTextures = new int[3];
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ }
+ drawer.uploadYuvData(
+ yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
+ drawer.drawYuv(yuvTextures, texMatrix);
+ } else {
+ drawer.drawOes(frame.textureId, texMatrix);
+ }
+
+ eglBase.swapBuffers();
+ VideoRenderer.renderFrameDone(frame);
+ synchronized (statisticsLock) {
+ if (framesRendered == 0) {
+ firstFrameTimeNs = startTimeNs;
+ }
+ ++framesRendered;
+ renderTimeNs += (System.nanoTime() - startTimeNs);
+ if (framesRendered % 300 == 0) {
+ logStatistics();
+ }
+ }
+ }
+
+ // Return current frame aspect ratio, taking rotation into account.
+ private float frameAspectRatio() {
+ synchronized (layoutLock) {
+ if (frameWidth == 0 || frameHeight == 0) {
+ return 0.0f;
+ }
+ return (frameRotation % 180 == 0) ? (float) frameWidth / frameHeight
+ : (float) frameHeight / frameWidth;
+ }
+ }
+
+ // Update frame dimensions and report any changes to |rendererEvents|.
+ private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
+ synchronized (layoutLock) {
+ if (frameWidth != frame.width || frameHeight != frame.height
+ || frameRotation != frame.rotationDegree) {
+ if (rendererEvents != null) {
+ final String id = getResources().getResourceEntryName(getId());
+ if (frameWidth == 0 || frameHeight == 0) {
+ Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
+ rendererEvents.onFirstFrameRendered();
+ }
+ Logging.d(TAG, "ID: " + id + ". Reporting frame resolution changed to "
+ + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
+ rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
+ }
+ frameWidth = frame.width;
+ frameHeight = frame.height;
+ frameRotation = frame.rotationDegree;
+ }
+ }
+ }
+
+ private void logStatistics() {
+ synchronized (statisticsLock) {
+ Logging.d(TAG, "ID: " + getResources().getResourceEntryName(getId()) + ". Frames received: "
+ + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+ if (framesReceived > 0 && framesRendered > 0) {
+ final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
+ Logging.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
+ " ms. FPS: " + (float) framesRendered * 1e9 / timeSinceFirstFrameNs);
+ Logging.d(TAG, "Average render time: "
+ + (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
+ }
+ }
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java b/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java
new file mode 100644
index 0000000000..0d8968aba9
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java
@@ -0,0 +1,143 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.os.Handler;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+
+final class ThreadUtils {
+ /**
+ * Utility class to be used for checking that a method is called on the correct thread.
+ */
+ public static class ThreadChecker {
+ private Thread thread = Thread.currentThread();
+
+ public void checkIsOnValidThread() {
+ if (thread == null) {
+ thread = Thread.currentThread();
+ }
+ if (Thread.currentThread() != thread) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ public void detachThread() {
+ thread = null;
+ }
+ }
+
+ /**
+ * Utility interface to be used with executeUninterruptibly() to wait for blocking operations
+ * to complete without getting interrupted..
+ */
+ public interface BlockingOperation {
+ void run() throws InterruptedException;
+ }
+
+ /**
+ * Utility method to make sure a blocking operation is executed to completion without getting
+ * interrupted. This should be used in cases where the operation is waiting for some critical
+ * work, e.g. cleanup, that must complete before returning. If the thread is interrupted during
+ * the blocking operation, this function will re-run the operation until completion, and only then
+ * re-interrupt the thread.
+ */
+ public static void executeUninterruptibly(BlockingOperation operation) {
+ boolean wasInterrupted = false;
+ while (true) {
+ try {
+ operation.run();
+ break;
+ } catch (InterruptedException e) {
+ // Someone is asking us to return early at our convenience. We can't cancel this operation,
+ // but we should preserve the information and pass it along.
+ wasInterrupted = true;
+ }
+ }
+ // Pass interruption information along.
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ public static void joinUninterruptibly(final Thread thread) {
+ executeUninterruptibly(new BlockingOperation() {
+ @Override
+ public void run() throws InterruptedException {
+ thread.join();
+ }
+ });
+ }
+
+ public static void awaitUninterruptibly(final CountDownLatch latch) {
+ executeUninterruptibly(new BlockingOperation() {
+ @Override
+ public void run() throws InterruptedException {
+ latch.await();
+ }
+ });
+ }
+
+ /**
+ * Post |callable| to |handler| and wait for the result.
+ */
+ public static <V> V invokeUninterruptibly(final Handler handler, final Callable<V> callable) {
+ class Result {
+ public V value;
+ }
+ final Result result = new Result();
+ final CountDownLatch barrier = new CountDownLatch(1);
+ handler.post(new Runnable() {
+ @Override public void run() {
+ try {
+ result.value = callable.call();
+ } catch (Exception e) {
+ throw new RuntimeException("Callable threw exception: " + e);
+ }
+ barrier.countDown();
+ }
+ });
+ awaitUninterruptibly(barrier);
+ return result.value;
+ }
+
+ /**
+ * Post |runner| to |handler| and wait for the result.
+ */
+ public static void invokeUninterruptibly(final Handler handler, final Runnable runner) {
+ final CountDownLatch barrier = new CountDownLatch(1);
+ handler.post(new Runnable() {
+ @Override public void run() {
+ runner.run();
+ barrier.countDown();
+ }
+ });
+ awaitUninterruptibly(barrier);
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
new file mode 100644
index 0000000000..4caefc513d
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -0,0 +1,896 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.hardware.Camera.PreviewCallback;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import org.json.JSONException;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Logging;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGL10;
+
+// Android specific implementation of VideoCapturer.
+// An instance of this class can be created by an application using
+// VideoCapturerAndroid.create();
+// This class extends VideoCapturer with a method to easily switch between the
+// front and back camera. It also provides methods for enumerating valid device
+// names.
+//
+// Threading notes: this class is called from C++ code, Android Camera callbacks, and possibly
+// arbitrary Java threads. All public entry points are thread safe, and delegate the work to the
+// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
+// the camera has been stopped.
+@SuppressWarnings("deprecation")
+public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallback,
+ SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final static String TAG = "VideoCapturerAndroid";
+ private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+
+ private Camera camera; // Only non-null while capturing.
+ private HandlerThread cameraThread;
+ private final Handler cameraThreadHandler;
+ private Context applicationContext;
+ // Synchronization lock for |id|.
+ private final Object cameraIdLock = new Object();
+ private int id;
+ private Camera.CameraInfo info;
+ private final FramePool videoBuffers;
+ private final CameraStatistics cameraStatistics = new CameraStatistics();
+ // Remember the requested format in case we want to switch cameras.
+ private int requestedWidth;
+ private int requestedHeight;
+ private int requestedFramerate;
+ // The capture format will be the closest supported format to the requested format.
+ private CaptureFormat captureFormat;
+ private final Object pendingCameraSwitchLock = new Object();
+ private volatile boolean pendingCameraSwitch;
+ private CapturerObserver frameObserver = null;
+ private final CameraEventsHandler eventsHandler;
+ private boolean firstFrameReported;
+ private final boolean isCapturingToTexture;
+ private final SurfaceTextureHelper surfaceHelper;
+ // The camera API can output one old frame after the camera has been switched or the resolution
+ // has been changed. This flag is used for dropping the first frame after camera restart.
+ private boolean dropNextFrame = false;
+
+ // Camera error callback.
+ private final Camera.ErrorCallback cameraErrorCallback =
+ new Camera.ErrorCallback() {
+ @Override
+ public void onError(int error, Camera camera) {
+ String errorMessage;
+ if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
+ errorMessage = "Camera server died!";
+ } else {
+ errorMessage = "Camera error: " + error;
+ }
+ Logging.e(TAG, errorMessage);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError(errorMessage);
+ }
+ }
+ };
+
+ // Camera observer - monitors camera framerate. Observer is executed on camera thread.
+ private final Runnable cameraObserver = new Runnable() {
+ @Override
+ public void run() {
+ int cameraFramesCount = cameraStatistics.getAndResetFrameCount();
+ int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
+ / CAMERA_OBSERVER_PERIOD_MS;
+
+ Logging.d(TAG, "Camera fps: " + cameraFps +
+ ". Pending buffers: " + cameraStatistics.pendingFramesTimeStamps());
+ if (cameraFramesCount == 0) {
+ Logging.e(TAG, "Camera freezed.");
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError("Camera failure.");
+ }
+ } else {
+ cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+ }
+ }
+ };
+
+ private static class CameraStatistics {
+ private int frameCount = 0;
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+ private final Set<Long> timeStampsNs = new HashSet<Long>();
+
+ CameraStatistics() {
+ threadChecker.detachThread();
+ }
+
+ public void addPendingFrame(long timestamp) {
+ threadChecker.checkIsOnValidThread();
+ ++frameCount;
+ timeStampsNs.add(timestamp);
+ }
+
+ public void frameReturned(long timestamp) {
+ threadChecker.checkIsOnValidThread();
+ if (!timeStampsNs.contains(timestamp)) {
+ throw new IllegalStateException(
+ "CameraStatistics.frameReturned called with unknown timestamp " + timestamp);
+ }
+ timeStampsNs.remove(timestamp);
+ }
+
+ public int getAndResetFrameCount() {
+ threadChecker.checkIsOnValidThread();
+ int count = frameCount;
+ frameCount = 0;
+ return count;
+ }
+
+ // Return number of pending frames that have not been returned.
+ public int pendingFramesCount() {
+ threadChecker.checkIsOnValidThread();
+ return timeStampsNs.size();
+ }
+
+ public String pendingFramesTimeStamps() {
+ threadChecker.checkIsOnValidThread();
+ List<Long> timeStampsMs = new ArrayList<Long>();
+ for (long ts : timeStampsNs) {
+ timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(ts));
+ }
+ return timeStampsMs.toString();
+ }
+ }
+
+ public static interface CameraEventsHandler {
+ // Camera error handler - invoked when camera stops receiving frames
+ // or any camera exception happens on camera thread.
+ void onCameraError(String errorDescription);
+
+ // Callback invoked when camera is opening.
+ void onCameraOpening(int cameraId);
+
+ // Callback invoked when first camera frame is available after camera is opened.
+ void onFirstFrameAvailable();
+
+ // Callback invoked when camera closed.
+ void onCameraClosed();
+ }
+
+ // Camera switch handler - one of these functions are invoked with the result of switchCamera().
+ // The callback may be called on an arbitrary thread.
+ public interface CameraSwitchHandler {
+ // Invoked on success. |isFrontCamera| is true if the new camera is front facing.
+ void onCameraSwitchDone(boolean isFrontCamera);
+ // Invoked on failure, e.g. camera is stopped or only one camera available.
+ void onCameraSwitchError(String errorDescription);
+ }
+
+ public static VideoCapturerAndroid create(String name,
+ CameraEventsHandler eventsHandler) {
+ return VideoCapturerAndroid.create(name, eventsHandler, null);
+ }
+
+ public static VideoCapturerAndroid create(String name,
+ CameraEventsHandler eventsHandler, EGLContext sharedEglContext) {
+ final int cameraId = lookupDeviceName(name);
+ if (cameraId == -1) {
+ return null;
+ }
+
+ final VideoCapturerAndroid capturer = new VideoCapturerAndroid(cameraId, eventsHandler,
+ sharedEglContext);
+ capturer.setNativeCapturer(nativeCreateVideoCapturer(capturer));
+ return capturer;
+ }
+
+ public void printStackTrace() {
+ if (cameraThread != null) {
+ StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
+ if (cameraStackTraces.length > 0) {
+ Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
+ for (StackTraceElement stackTrace : cameraStackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ // Switch camera to the next valid camera id. This can only be called while
+ // the camera is running.
+ public void switchCamera(final CameraSwitchHandler handler) {
+ if (Camera.getNumberOfCameras() < 2) {
+ if (handler != null) {
+ handler.onCameraSwitchError("No camera to switch to.");
+ }
+ return;
+ }
+ synchronized (pendingCameraSwitchLock) {
+ if (pendingCameraSwitch) {
+ // Do not handle multiple camera switch request to avoid blocking
+ // camera thread by handling too many switch request from a queue.
+ Logging.w(TAG, "Ignoring camera switch request.");
+ if (handler != null) {
+ handler.onCameraSwitchError("Pending camera switch already in progress.");
+ }
+ return;
+ }
+ pendingCameraSwitch = true;
+ }
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ if (camera == null) {
+ if (handler != null) {
+ handler.onCameraSwitchError("Camera is stopped.");
+ }
+ return;
+ }
+ switchCameraOnCameraThread();
+ synchronized (pendingCameraSwitchLock) {
+ pendingCameraSwitch = false;
+ }
+ if (handler != null) {
+ handler.onCameraSwitchDone(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
+ }
+ }
+ });
+ }
+
+ // Requests a new output format from the video capturer. Captured frames
+ // by the camera will be scaled/or dropped by the video capturer.
+ // TODO(magjed/perkj): Document what this function does. Change name?
+ public void onOutputFormatRequest(final int width, final int height, final int framerate) {
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ onOutputFormatRequestOnCameraThread(width, height, framerate);
+ }
+ });
+ }
+
+ // Reconfigure the camera to capture in a new format. This should only be called while the camera
+ // is running.
+ public void changeCaptureFormat(final int width, final int height, final int framerate) {
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ startPreviewOnCameraThread(width, height, framerate);
+ }
+ });
+ }
+
+ // Helper function to retrieve the current camera id synchronously. Note that the camera id might
+ // change at any point by switchCamera() calls.
+ private int getCurrentCameraId() {
+ synchronized (cameraIdLock) {
+ return id;
+ }
+ }
+
+ public List<CaptureFormat> getSupportedFormats() {
+ return CameraEnumerationAndroid.getSupportedFormats(getCurrentCameraId());
+ }
+
+ // Returns true if this VideoCapturer is setup to capture video frames to a SurfaceTexture.
+ public boolean isCapturingToTexture() {
+ return isCapturingToTexture;
+ }
+
+ // Called from native code.
+ private String getSupportedFormatsAsJson() throws JSONException {
+ return CameraEnumerationAndroid.getSupportedFormatsAsJson(getCurrentCameraId());
+ }
+
+ // Called from native VideoCapturer_nativeCreateVideoCapturer.
+ private VideoCapturerAndroid(int cameraId) {
+ this(cameraId, null, null);
+ }
+
+ private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
+ EGLContext sharedContext) {
+ Logging.d(TAG, "VideoCapturerAndroid");
+ this.id = cameraId;
+ this.eventsHandler = eventsHandler;
+ cameraThread = new HandlerThread(TAG);
+ cameraThread.start();
+ cameraThreadHandler = new Handler(cameraThread.getLooper());
+ videoBuffers = new FramePool(cameraThread);
+ isCapturingToTexture = (sharedContext != null);
+ surfaceHelper = SurfaceTextureHelper.create(
+ isCapturingToTexture ? sharedContext : EGL10.EGL_NO_CONTEXT, cameraThreadHandler);
+ if (isCapturingToTexture) {
+ surfaceHelper.setListener(this);
+ }
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThread) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ // Returns the camera index for camera with name |deviceName|, or -1 if no such camera can be
+ // found. If |deviceName| is empty, the first available device is used.
+ private static int lookupDeviceName(String deviceName) {
+ Logging.d(TAG, "lookupDeviceName: " + deviceName);
+ if (deviceName == null || Camera.getNumberOfCameras() == 0) {
+ return -1;
+ }
+ if (deviceName.isEmpty()) {
+ return 0;
+ }
+ for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ // Called by native code to quit the camera thread. This needs to be done manually, otherwise the
+ // thread and handler will not be garbage collected.
+ private void release() {
+ Logging.d(TAG, "release");
+ if (isReleased()) {
+ throw new IllegalStateException("Already released");
+ }
+ ThreadUtils.invokeUninterruptibly(cameraThreadHandler, new Runnable() {
+ @Override
+ public void run() {
+ if (camera != null) {
+ throw new IllegalStateException("Release called while camera is running");
+ }
+ if (cameraStatistics.pendingFramesCount() != 0) {
+ throw new IllegalStateException("Release called with pending frames left");
+ }
+ }
+ });
+ surfaceHelper.disconnect();
+ cameraThread.quit();
+ ThreadUtils.joinUninterruptibly(cameraThread);
+ cameraThread = null;
+ }
+
+ // Used for testing purposes to check if release() has been called.
+ public boolean isReleased() {
+ return (cameraThread == null);
+ }
+
+ // Called by native code.
+ //
+ // Note that this actually opens the camera, and Camera callbacks run on the
+ // thread that calls open(), so this is done on the CameraThread.
+ void startCapture(
+ final int width, final int height, final int framerate,
+ final Context applicationContext, final CapturerObserver frameObserver) {
+ Logging.d(TAG, "startCapture requested: " + width + "x" + height
+ + "@" + framerate);
+ if (applicationContext == null) {
+ throw new RuntimeException("applicationContext not set.");
+ }
+ if (frameObserver == null) {
+ throw new RuntimeException("frameObserver not set.");
+ }
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ startCaptureOnCameraThread(width, height, framerate, frameObserver,
+ applicationContext);
+ }
+ });
+ }
+
+ private void startCaptureOnCameraThread(
+ int width, int height, int framerate, CapturerObserver frameObserver,
+ Context applicationContext) {
+ Throwable error = null;
+ checkIsOnCameraThread();
+ if (camera != null) {
+ throw new RuntimeException("Camera has already been started.");
+ }
+ this.applicationContext = applicationContext;
+ this.frameObserver = frameObserver;
+ try {
+ synchronized (cameraIdLock) {
+ Logging.d(TAG, "Opening camera " + id);
+ firstFrameReported = false;
+ if (eventsHandler != null) {
+ eventsHandler.onCameraOpening(id);
+ }
+ camera = Camera.open(id);
+ info = new Camera.CameraInfo();
+ Camera.getCameraInfo(id, info);
+ }
+ try {
+ camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
+ } catch (IOException e) {
+ Logging.e(TAG, "setPreviewTexture failed", error);
+ throw new RuntimeException(e);
+ }
+
+ Logging.d(TAG, "Camera orientation: " + info.orientation +
+ " .Device orientation: " + getDeviceOrientation());
+ camera.setErrorCallback(cameraErrorCallback);
+ startPreviewOnCameraThread(width, height, framerate);
+ frameObserver.onCapturerStarted(true);
+
+ // Start camera observer.
+ cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
+ return;
+ } catch (RuntimeException e) {
+ error = e;
+ }
+ Logging.e(TAG, "startCapture failed", error);
+ stopCaptureOnCameraThread();
+ frameObserver.onCapturerStarted(false);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError("Camera can not be started.");
+ }
+ return;
+ }
+
+ // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
+ private void startPreviewOnCameraThread(int width, int height, int framerate) {
+ checkIsOnCameraThread();
+ Logging.d(
+ TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate);
+ if (camera == null) {
+ Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera.");
+ return;
+ }
+
+ requestedWidth = width;
+ requestedHeight = height;
+ requestedFramerate = framerate;
+
+ // Find closest supported format for |width| x |height| @ |framerate|.
+ final Camera.Parameters parameters = camera.getParameters();
+ final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
+ final Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
+ parameters.getSupportedPreviewSizes(), width, height);
+ final CaptureFormat captureFormat = new CaptureFormat(
+ previewSize.width, previewSize.height,
+ range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+
+ // Check if we are already using this capture format, then we don't need to do anything.
+ if (captureFormat.equals(this.captureFormat)) {
+ return;
+ }
+
+ // Update camera parameters.
+ Logging.d(TAG, "isVideoStabilizationSupported: " +
+ parameters.isVideoStabilizationSupported());
+ if (parameters.isVideoStabilizationSupported()) {
+ parameters.setVideoStabilization(true);
+ }
+ // Note: setRecordingHint(true) actually decrease frame rate on N5.
+ // parameters.setRecordingHint(true);
+ if (captureFormat.maxFramerate > 0) {
+ parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate);
+ }
+ parameters.setPreviewSize(captureFormat.width, captureFormat.height);
+ parameters.setPreviewFormat(captureFormat.imageFormat);
+ // Picture size is for taking pictures and not for preview/video, but we need to set it anyway
+ // as a workaround for an aspect ratio problem on Nexus 7.
+ final Camera.Size pictureSize = CameraEnumerationAndroid.getClosestSupportedSize(
+ parameters.getSupportedPictureSizes(), width, height);
+ parameters.setPictureSize(pictureSize.width, pictureSize.height);
+
+ // Temporarily stop preview if it's already running.
+ if (this.captureFormat != null) {
+ camera.stopPreview();
+ dropNextFrame = true;
+ // Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer
+ // queue, but sometimes we receive a frame with the old resolution after this call anyway.
+ camera.setPreviewCallbackWithBuffer(null);
+ }
+
+ // (Re)start preview.
+ Logging.d(TAG, "Start capturing: " + captureFormat);
+ this.captureFormat = captureFormat;
+
+ List<String> focusModes = parameters.getSupportedFocusModes();
+ if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ }
+
+ camera.setParameters(parameters);
+ if (!isCapturingToTexture) {
+ videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera);
+ camera.setPreviewCallbackWithBuffer(this);
+ }
+ camera.startPreview();
+ }
+
+ // Called by native code. Returns true when camera is known to be stopped.
+ void stopCapture() throws InterruptedException {
+ Logging.d(TAG, "stopCapture");
+ final CountDownLatch barrier = new CountDownLatch(1);
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ stopCaptureOnCameraThread();
+ barrier.countDown();
+ }
+ });
+ barrier.await();
+ Logging.d(TAG, "stopCapture done");
+ }
+
+ private void stopCaptureOnCameraThread() {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "stopCaptureOnCameraThread");
+ if (camera == null) {
+ Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
+ return;
+ }
+
+ cameraThreadHandler.removeCallbacks(cameraObserver);
+ cameraStatistics.getAndResetFrameCount();
+ Logging.d(TAG, "Stop preview.");
+ camera.stopPreview();
+ camera.setPreviewCallbackWithBuffer(null);
+ if (!isCapturingToTexture()) {
+ videoBuffers.stopReturnBuffersToCamera();
+ Logging.d(TAG, "stopReturnBuffersToCamera called."
+ + (cameraStatistics.pendingFramesCount() == 0?
+ " All buffers have been returned."
+ : " Pending buffers: " + cameraStatistics.pendingFramesTimeStamps() + "."));
+ }
+ captureFormat = null;
+
+ Logging.d(TAG, "Release camera.");
+ camera.release();
+ camera = null;
+ if (eventsHandler != null) {
+ eventsHandler.onCameraClosed();
+ }
+ }
+
+ private void switchCameraOnCameraThread() {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "switchCameraOnCameraThread");
+ stopCaptureOnCameraThread();
+ synchronized (cameraIdLock) {
+ id = (id + 1) % Camera.getNumberOfCameras();
+ }
+ dropNextFrame = true;
+ startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
+ applicationContext);
+ Logging.d(TAG, "switchCameraOnCameraThread done");
+ }
+
+ private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) {
+ checkIsOnCameraThread();
+ if (camera == null) {
+ Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera.");
+ return;
+ }
+ Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
+ "@" + framerate);
+ frameObserver.onOutputFormatRequest(width, height, framerate);
+ }
+
+ public void returnBuffer(final long timeStamp) {
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ cameraStatistics.frameReturned(timeStamp);
+ if (isCapturingToTexture) {
+ surfaceHelper.returnTextureFrame();
+ } else {
+ videoBuffers.returnBuffer(timeStamp);
+ }
+ }
+ });
+ }
+
+ private int getDeviceOrientation() {
+ int orientation = 0;
+
+ WindowManager wm = (WindowManager) applicationContext.getSystemService(
+ Context.WINDOW_SERVICE);
+ switch(wm.getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_90:
+ orientation = 90;
+ break;
+ case Surface.ROTATION_180:
+ orientation = 180;
+ break;
+ case Surface.ROTATION_270:
+ orientation = 270;
+ break;
+ case Surface.ROTATION_0:
+ default:
+ orientation = 0;
+ break;
+ }
+ return orientation;
+ }
+
+ private int getFrameOrientation() {
+ int rotation = getDeviceOrientation();
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+ rotation = 360 - rotation;
+ }
+ return (info.orientation + rotation) % 360;
+ }
+
+ // Called on cameraThread so must not "synchronized".
+ @Override
+ public void onPreviewFrame(byte[] data, Camera callbackCamera) {
+ checkIsOnCameraThread();
+ if (camera == null) {
+ return;
+ }
+ if (camera != callbackCamera) {
+ throw new RuntimeException("Unexpected camera in callback!");
+ }
+
+ final long captureTimeNs =
+ TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+
+ if (eventsHandler != null && !firstFrameReported) {
+ eventsHandler.onFirstFrameAvailable();
+ firstFrameReported = true;
+ }
+
+ // Mark the frame owning |data| as used.
+ // Note that since data is directBuffer,
+ // data.length >= videoBuffers.frameSize.
+ if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) {
+ cameraStatistics.addPendingFrame(captureTimeNs);
+ frameObserver.onByteBufferFrameCaptured(data, videoBuffers.frameSize, captureFormat.width,
+ captureFormat.height, getFrameOrientation(), captureTimeNs);
+ } else {
+ Logging.w(TAG, "reserveByteBuffer failed - dropping frame.");
+ }
+ }
+
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ checkIsOnCameraThread();
+ if (camera == null) {
+ // Camera is stopped, we need to return the buffer immediately.
+ surfaceHelper.returnTextureFrame();
+ return;
+ }
+ if (!dropNextFrame) {
+ surfaceHelper.returnTextureFrame();
+ dropNextFrame = true;
+ return;
+ }
+
+ int rotation = getFrameOrientation();
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ transformMatrix =
+ RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
+ }
+ transformMatrix = RendererCommon.rotateTextureMatrix(transformMatrix, rotation);
+
+ final int rotatedWidth = (rotation % 180 == 0) ? captureFormat.width : captureFormat.height;
+ final int rotatedHeight = (rotation % 180 == 0) ? captureFormat.height : captureFormat.width;
+ cameraStatistics.addPendingFrame(timestampNs);
+ frameObserver.onTextureFrameCaptured(rotatedWidth, rotatedHeight, oesTextureId,
+ transformMatrix, timestampNs);
+ }
+
+ // Class used for allocating and bookkeeping video frames. All buffers are
+ // direct allocated so that they can be directly used from native code. This class is
+ // not thread-safe, and enforces single thread use.
+ private static class FramePool {
+ // Thread that all calls should be made on.
+ private final Thread thread;
+ // Arbitrary queue depth. Higher number means more memory allocated & held,
+ // lower number means more sensitivity to processing time in the client (and
+ // potentially stalling the capturer if it runs out of buffers to write to).
+ private static final int numCaptureBuffers = 3;
+ // This container tracks the buffers added as camera callback buffers. It is needed for finding
+ // the corresponding ByteBuffer given a byte[].
+ private final Map<byte[], ByteBuffer> queuedBuffers = new IdentityHashMap<byte[], ByteBuffer>();
+ // This container tracks the frames that have been sent but not returned. It is needed for
+ // keeping the buffers alive and for finding the corresponding ByteBuffer given a timestamp.
+ private final Map<Long, ByteBuffer> pendingBuffers = new HashMap<Long, ByteBuffer>();
+ private int frameSize = 0;
+ private Camera camera;
+
+ public FramePool(Thread thread) {
+ this.thread = thread;
+ }
+
+ private void checkIsOnValidThread() {
+ if (Thread.currentThread() != thread) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ // Discards previous queued buffers and adds new callback buffers to camera.
+ public void queueCameraBuffers(int frameSize, Camera camera) {
+ checkIsOnValidThread();
+ this.camera = camera;
+ this.frameSize = frameSize;
+
+ queuedBuffers.clear();
+ for (int i = 0; i < numCaptureBuffers; ++i) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+ camera.addCallbackBuffer(buffer.array());
+ queuedBuffers.put(buffer.array(), buffer);
+ }
+ Logging.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers
+ + " buffers of size " + frameSize + ".");
+ }
+
+ public void stopReturnBuffersToCamera() {
+ checkIsOnValidThread();
+ this.camera = null;
+ queuedBuffers.clear();
+ // Frames in |pendingBuffers| need to be kept alive until they are returned.
+ }
+
+ public boolean reserveByteBuffer(byte[] data, long timeStamp) {
+ checkIsOnValidThread();
+ final ByteBuffer buffer = queuedBuffers.remove(data);
+ if (buffer == null) {
+ // Frames might be posted to |onPreviewFrame| with the previous format while changing
+ // capture format in |startPreviewOnCameraThread|. Drop these old frames.
+ Logging.w(TAG, "Received callback buffer from previous configuration with length: "
+ + (data == null ? "null" : data.length));
+ return false;
+ }
+ if (buffer.capacity() != frameSize) {
+ throw new IllegalStateException("Callback buffer has unexpected frame size");
+ }
+ if (pendingBuffers.containsKey(timeStamp)) {
+ Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique");
+ return false;
+ }
+ pendingBuffers.put(timeStamp, buffer);
+ if (queuedBuffers.isEmpty()) {
+ Logging.d(TAG, "Camera is running out of capture buffers.");
+ }
+ return true;
+ }
+
+ public void returnBuffer(long timeStamp) {
+ checkIsOnValidThread();
+ final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
+ if (returnedFrame == null) {
+ throw new RuntimeException("unknown data buffer with time stamp "
+ + timeStamp + "returned?!?");
+ }
+
+ if (camera != null && returnedFrame.capacity() == frameSize) {
+ camera.addCallbackBuffer(returnedFrame.array());
+ if (queuedBuffers.isEmpty()) {
+ Logging.d(TAG, "Frame returned when camera is running out of capture"
+ + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp));
+ }
+ queuedBuffers.put(returnedFrame.array(), returnedFrame);
+ return;
+ }
+
+ if (returnedFrame.capacity() != frameSize) {
+ Logging.d(TAG, "returnBuffer with time stamp "
+ + TimeUnit.NANOSECONDS.toMillis(timeStamp)
+ + " called with old frame size, " + returnedFrame.capacity() + ".");
+ // Since this frame has the wrong size, don't requeue it. Frames with the correct size are
+ // created in queueCameraBuffers so this must be an old buffer.
+ return;
+ }
+
+ Logging.d(TAG, "returnBuffer with time stamp "
+ + TimeUnit.NANOSECONDS.toMillis(timeStamp)
+ + " called after camera has been stopped.");
+ }
+ }
+
+ // Interface used for providing callbacks to an observer.
+ interface CapturerObserver {
+ // Notify if the camera have been started successfully or not.
+ // Called on a Java thread owned by VideoCapturerAndroid.
+ abstract void onCapturerStarted(boolean success);
+
+ // Delivers a captured frame. Called on a Java thread owned by
+ // VideoCapturerAndroid.
+ abstract void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
+ int rotation, long timeStamp);
+
+ // Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
+ // owned by VideoCapturerAndroid.
+ abstract void onTextureFrameCaptured(
+ int width, int height, int oesTextureId, float[] transformMatrix, long timestamp);
+
+ // Requests an output format from the video capturer. Captured frames
+ // by the camera will be scaled/or dropped by the video capturer.
+ // Called on a Java thread owned by VideoCapturerAndroid.
+ abstract void onOutputFormatRequest(int width, int height, int framerate);
+ }
+
+ // An implementation of CapturerObserver that forwards all calls from
+ // Java to the C layer.
+ static class NativeObserver implements CapturerObserver {
+ private final long nativeCapturer;
+
+ public NativeObserver(long nativeCapturer) {
+ this.nativeCapturer = nativeCapturer;
+ }
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeCapturerStarted(nativeCapturer, success);
+ }
+
+ @Override
+ public void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
+ int rotation, long timeStamp) {
+ nativeOnByteBufferFrameCaptured(nativeCapturer, data, length, width, height, rotation,
+ timeStamp);
+ }
+
+ @Override
+ public void onTextureFrameCaptured(
+ int width, int height, int oesTextureId, float[] transformMatrix, long timestamp) {
+ nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
+ timestamp);
+ }
+
+ @Override
+ public void onOutputFormatRequest(int width, int height, int framerate) {
+ nativeOnOutputFormatRequest(nativeCapturer, width, height, framerate);
+ }
+
+ private native void nativeCapturerStarted(long nativeCapturer,
+ boolean success);
+ private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
+ byte[] data, int length, int width, int height, int rotation, long timeStamp);
+ private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
+ int oesTextureId, float[] transformMatrix, long timestamp);
+ private native void nativeOnOutputFormatRequest(long nativeCapturer,
+ int width, int height, int framerate);
+ }
+
+ private static native long nativeCreateVideoCapturer(VideoCapturerAndroid videoCapturer);
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
new file mode 100644
index 0000000000..bacd0cf11f
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -0,0 +1,663 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.concurrent.CountDownLatch;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.annotation.SuppressLint;
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+
+import org.webrtc.Logging;
+import org.webrtc.VideoRenderer.I420Frame;
+
+/**
+ * Efficiently renders YUV frames using the GPU for CSC.
+ * Clients will want first to call setView() to pass GLSurfaceView
+ * and then for each video stream either create instance of VideoRenderer using
+ * createGui() call or VideoRenderer.Callbacks interface using create() call.
+ * Only one instance of the class can be created.
+ */
+public class VideoRendererGui implements GLSurfaceView.Renderer {
+ // |instance|, |instance.surface|, |eglContext|, and |eglContextReady| are synchronized on
+ // |VideoRendererGui.class|.
+ private static VideoRendererGui instance = null;
+ private static Runnable eglContextReady = null;
+ private static final String TAG = "VideoRendererGui";
+ private GLSurfaceView surface;
+ private static EGLContext eglContext = null;
+ // Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
+ // If true then for every newly created yuv image renderer createTexture()
+ // should be called. The variable is accessed on multiple threads and
+ // all accesses are synchronized on yuvImageRenderers' object lock.
+ private boolean onSurfaceCreatedCalled;
+ private int screenWidth;
+ private int screenHeight;
+ // List of yuv renderers.
+ private final ArrayList<YuvImageRenderer> yuvImageRenderers;
+ // |drawer| is synchronized on |yuvImageRenderers|.
+ private GlRectDrawer drawer;
+ // Render and draw threads.
+ private static Thread renderFrameThread;
+ private static Thread drawThread;
+
+ private VideoRendererGui(GLSurfaceView surface) {
+ this.surface = surface;
+ // Create an OpenGL ES 2.0 context.
+ surface.setPreserveEGLContextOnPause(true);
+ surface.setEGLContextClientVersion(2);
+ surface.setRenderer(this);
+ surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+
+ yuvImageRenderers = new ArrayList<YuvImageRenderer>();
+ }
+
+ /**
+ * Class used to display stream of YUV420 frames at particular location
+ * on a screen. New video frames are sent to display using renderFrame()
+ * call.
+ */
+ private static class YuvImageRenderer implements VideoRenderer.Callbacks {
+ // |surface| is synchronized on |this|.
+ private GLSurfaceView surface;
+ private int id;
+ // TODO(magjed): Delete GL resources in release(). Must be synchronized with draw(). We are
+ // currently leaking resources to avoid a rare crash in release() where the EGLContext has
+ // become invalid beforehand.
+ private int[] yuvTextures = { 0, 0, 0 };
+ // Resources for making a deep copy of incoming OES texture frame.
+ private GlTextureFrameBuffer textureCopy;
+
+ // Pending frame to render. Serves as a queue with size 1. |pendingFrame| is accessed by two
+ // threads - frames are received in renderFrame() and consumed in draw(). Frames are dropped in
+ // renderFrame() if the previous frame has not been rendered yet.
+ private I420Frame pendingFrame;
+ private final Object pendingFrameLock = new Object();
+ // Type of video frame used for recent frame rendering.
+ private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
+ private RendererType rendererType;
+ private RendererCommon.ScalingType scalingType;
+ private boolean mirror;
+ private RendererCommon.RendererEvents rendererEvents;
+ // Flag if renderFrame() was ever called.
+ boolean seenFrame;
+ // Total number of video frames received in renderFrame() call.
+ private int framesReceived;
+ // Number of video frames dropped by renderFrame() because previous
+ // frame has not been rendered yet.
+ private int framesDropped;
+ // Number of rendered video frames.
+ private int framesRendered;
+ // Time in ns when the first video frame was rendered.
+ private long startTimeNs = -1;
+ // Time in ns spent in draw() function.
+ private long drawTimeNs;
+ // Time in ns spent in draw() copying resources from |pendingFrame| - including uploading frame
+ // data to rendering planes.
+ private long copyTimeNs;
+ // The allowed view area in percentage of screen size.
+ private final Rect layoutInPercentage;
+ // The actual view area in pixels. It is a centered subrectangle of the rectangle defined by
+ // |layoutInPercentage|.
+ private final Rect displayLayout = new Rect();
+ // Cached layout transformation matrix, calculated from current layout parameters.
+ private float[] layoutMatrix;
+ // Flag if layout transformation matrix update is needed.
+ private boolean updateLayoutProperties;
+ // Layout properties update lock. Guards |updateLayoutProperties|, |screenWidth|,
+ // |screenHeight|, |videoWidth|, |videoHeight|, |rotationDegree|, |scalingType|, and |mirror|.
+ private final Object updateLayoutLock = new Object();
+ // Texture sampling matrix.
+ private float[] rotatedSamplingMatrix;
+ // Viewport dimensions.
+ private int screenWidth;
+ private int screenHeight;
+ // Video dimension.
+ private int videoWidth;
+ private int videoHeight;
+
+ // This is the degree that the frame should be rotated clockwisely to have
+ // it rendered up right.
+ private int rotationDegree;
+
+ private YuvImageRenderer(
+ GLSurfaceView surface, int id,
+ int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
+ this.surface = surface;
+ this.id = id;
+ this.scalingType = scalingType;
+ this.mirror = mirror;
+ layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
+ updateLayoutProperties = false;
+ rotationDegree = 0;
+ }
+
+ public synchronized void reset() {
+ seenFrame = false;
+ }
+
+ private synchronized void release() {
+ surface = null;
+ synchronized (pendingFrameLock) {
+ if (pendingFrame != null) {
+ VideoRenderer.renderFrameDone(pendingFrame);
+ pendingFrame = null;
+ }
+ }
+ }
+
+ private void createTextures() {
+ Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
+ Thread.currentThread().getId());
+
+ // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ // Generate texture and framebuffer for offscreen texture copy.
+ textureCopy = new GlTextureFrameBuffer(GLES20.GL_RGB);
+ }
+
+ private void updateLayoutMatrix() {
+ synchronized(updateLayoutLock) {
+ if (!updateLayoutProperties) {
+ return;
+ }
+ // Initialize to maximum allowed area. Round to integer coordinates inwards the layout
+ // bounding box (ceil left/top and floor right/bottom) to not break constraints.
+ displayLayout.set(
+ (screenWidth * layoutInPercentage.left + 99) / 100,
+ (screenHeight * layoutInPercentage.top + 99) / 100,
+ (screenWidth * layoutInPercentage.right) / 100,
+ (screenHeight * layoutInPercentage.bottom) / 100);
+ Logging.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
+ + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
+ + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
+ final float videoAspectRatio = (rotationDegree % 180 == 0)
+ ? (float) videoWidth / videoHeight
+ : (float) videoHeight / videoWidth;
+ // Adjust display size based on |scalingType|.
+ final Point displaySize = RendererCommon.getDisplaySize(scalingType,
+ videoAspectRatio, displayLayout.width(), displayLayout.height());
+ displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
+ (displayLayout.height() - displaySize.y) / 2);
+ Logging.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
+ + displayLayout.height());
+ layoutMatrix = RendererCommon.getLayoutMatrix(
+ mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
+ updateLayoutProperties = false;
+ Logging.d(TAG, " AdjustTextureCoords done");
+ }
+ }
+
+ private void draw(GlRectDrawer drawer) {
+ if (!seenFrame) {
+ // No frame received yet - nothing to render.
+ return;
+ }
+ long now = System.nanoTime();
+
+ final boolean isNewFrame;
+ synchronized (pendingFrameLock) {
+ isNewFrame = (pendingFrame != null);
+ if (isNewFrame && startTimeNs == -1) {
+ startTimeNs = now;
+ }
+
+ if (isNewFrame) {
+ if (pendingFrame.yuvFrame) {
+ rendererType = RendererType.RENDERER_YUV;
+ drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
+ pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
+ // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+ // top-left corner of the image, but in glTexImage2D() the first element corresponds to
+ // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
+ // sampling matrix.
+ final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
+ rotatedSamplingMatrix =
+ RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
+ } else {
+ rendererType = RendererType.RENDERER_TEXTURE;
+ // External texture rendering. Update texture image to latest and make a deep copy of
+ // the external texture.
+ // TODO(magjed): Move updateTexImage() to the video source instead.
+ final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
+ surfaceTexture.updateTexImage();
+ final float[] samplingMatrix = new float[16];
+ surfaceTexture.getTransformMatrix(samplingMatrix);
+ rotatedSamplingMatrix =
+ RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
+
+ // Reallocate offscreen texture if necessary.
+ textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
+
+ // Bind our offscreen framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
+ GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+ // Copy the OES texture content. This will also normalize the sampling matrix.
+ GLES20.glViewport(0, 0, textureCopy.getWidth(), textureCopy.getHeight());
+ drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix);
+ rotatedSamplingMatrix = RendererCommon.identityMatrix();
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ }
+ copyTimeNs += (System.nanoTime() - now);
+ VideoRenderer.renderFrameDone(pendingFrame);
+ pendingFrame = null;
+ }
+ }
+
+ // OpenGL defaults to lower left origin - flip vertically.
+ GLES20.glViewport(displayLayout.left, screenHeight - displayLayout.bottom,
+ displayLayout.width(), displayLayout.height());
+
+ updateLayoutMatrix();
+ final float[] texMatrix =
+ RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+ if (rendererType == RendererType.RENDERER_YUV) {
+ drawer.drawYuv(yuvTextures, texMatrix);
+ } else {
+ drawer.drawRgb(textureCopy.getTextureId(), texMatrix);
+ }
+
+ if (isNewFrame) {
+ framesRendered++;
+ drawTimeNs += (System.nanoTime() - now);
+ if ((framesRendered % 300) == 0) {
+ logStatistics();
+ }
+ }
+ }
+
+ private void logStatistics() {
+ long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
+ Logging.d(TAG, "ID: " + id + ". Type: " + rendererType +
+ ". Frames received: " + framesReceived +
+ ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+ if (framesReceived > 0 && framesRendered > 0) {
+ Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
+ " ms. FPS: " + (float)framesRendered * 1e9 / timeSinceFirstFrameNs);
+ Logging.d(TAG, "Draw time: " +
+ (int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
+ (int) (copyTimeNs / (1000 * framesReceived)) + " us");
+ }
+ }
+
+ public void setScreenSize(final int screenWidth, final int screenHeight) {
+ synchronized(updateLayoutLock) {
+ if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
+ return;
+ }
+ Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " +
+ screenWidth + " x " + screenHeight);
+ this.screenWidth = screenWidth;
+ this.screenHeight = screenHeight;
+ updateLayoutProperties = true;
+ }
+ }
+
+ public void setPosition(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ final Rect layoutInPercentage =
+ new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
+ synchronized(updateLayoutLock) {
+ if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
+ && mirror == this.mirror) {
+ return;
+ }
+ Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
+ ") " + width + " x " + height + ". Scaling: " + scalingType +
+ ". Mirror: " + mirror);
+ this.layoutInPercentage.set(layoutInPercentage);
+ this.scalingType = scalingType;
+ this.mirror = mirror;
+ updateLayoutProperties = true;
+ }
+ }
+
+ private void setSize(final int videoWidth, final int videoHeight, final int rotation) {
+ if (videoWidth == this.videoWidth && videoHeight == this.videoHeight
+ && rotation == rotationDegree) {
+ return;
+ }
+ if (rendererEvents != null) {
+ Logging.d(TAG, "ID: " + id +
+ ". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight);
+ rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
+ }
+
+ synchronized (updateLayoutLock) {
+ Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
+ videoWidth + " x " + videoHeight + " rotation " + rotation);
+
+ this.videoWidth = videoWidth;
+ this.videoHeight = videoHeight;
+ rotationDegree = rotation;
+ updateLayoutProperties = true;
+ Logging.d(TAG, " YuvImageRenderer.setSize done.");
+ }
+ }
+
+ @Override
+ public synchronized void renderFrame(I420Frame frame) {
+ if (surface == null) {
+ // This object has been released.
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ if (renderFrameThread == null) {
+ renderFrameThread = Thread.currentThread();
+ }
+ if (!seenFrame && rendererEvents != null) {
+ Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
+ rendererEvents.onFirstFrameRendered();
+ }
+ framesReceived++;
+ synchronized (pendingFrameLock) {
+ // Check input frame parameters.
+ if (frame.yuvFrame) {
+ if (frame.yuvStrides[0] < frame.width ||
+ frame.yuvStrides[1] < frame.width / 2 ||
+ frame.yuvStrides[2] < frame.width / 2) {
+ Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
+ frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ }
+
+ if (pendingFrame != null) {
+ // Skip rendering of this frame if previous frame was not rendered yet.
+ framesDropped++;
+ VideoRenderer.renderFrameDone(frame);
+ seenFrame = true;
+ return;
+ }
+ pendingFrame = frame;
+ }
+ setSize(frame.width, frame.height, frame.rotationDegree);
+ seenFrame = true;
+
+ // Request rendering.
+ surface.requestRender();
+ }
+ }
+
+ /** Passes GLSurfaceView to video renderer. */
+ public static synchronized void setView(GLSurfaceView surface,
+ Runnable eglContextReadyCallback) {
+ Logging.d(TAG, "VideoRendererGui.setView");
+ instance = new VideoRendererGui(surface);
+ eglContextReady = eglContextReadyCallback;
+ }
+
+ public static synchronized EGLContext getEGLContext() {
+ return eglContext;
+ }
+
+ /** Releases GLSurfaceView video renderer. */
+ public static synchronized void dispose() {
+ if (instance == null){
+ return;
+ }
+ Logging.d(TAG, "VideoRendererGui.dispose");
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ yuvImageRenderer.release();
+ }
+ instance.yuvImageRenderers.clear();
+ }
+ renderFrameThread = null;
+ drawThread = null;
+ instance.surface = null;
+ eglContext = null;
+ eglContextReady = null;
+ instance = null;
+ }
+
+ /**
+ * Creates VideoRenderer with top left corner at (x, y) and resolution
+ * (width, height). All parameters are in percentage of screen resolution.
+ */
+ public static VideoRenderer createGui(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
+ YuvImageRenderer javaGuiRenderer = create(
+ x, y, width, height, scalingType, mirror);
+ return new VideoRenderer(javaGuiRenderer);
+ }
+
+ public static VideoRenderer.Callbacks createGuiRenderer(
+ int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ return create(x, y, width, height, scalingType, mirror);
+ }
+
+ /**
+ * Creates VideoRenderer.Callbacks with top left corner at (x, y) and
+ * resolution (width, height). All parameters are in percentage of
+ * screen resolution.
+ */
+ public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ // Check display region parameters.
+ if (x < 0 || x > 100 || y < 0 || y > 100 ||
+ width < 0 || width > 100 || height < 0 || height > 100 ||
+ x + width > 100 || y + height > 100) {
+ throw new RuntimeException("Incorrect window parameters.");
+ }
+
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to create yuv renderer before setting GLSurfaceView");
+ }
+ final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
+ instance.surface, instance.yuvImageRenderers.size(),
+ x, y, width, height, scalingType, mirror);
+ synchronized (instance.yuvImageRenderers) {
+ if (instance.onSurfaceCreatedCalled) {
+ // onSurfaceCreated has already been called for VideoRendererGui -
+ // need to create texture for new image and add image to the
+ // rendering list.
+ final CountDownLatch countDownLatch = new CountDownLatch(1);
+ instance.surface.queueEvent(new Runnable() {
+ public void run() {
+ yuvImageRenderer.createTextures();
+ yuvImageRenderer.setScreenSize(
+ instance.screenWidth, instance.screenHeight);
+ countDownLatch.countDown();
+ }
+ });
+ // Wait for task completion.
+ try {
+ countDownLatch.await();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ // Add yuv renderer to rendering list.
+ instance.yuvImageRenderers.add(yuvImageRenderer);
+ }
+ return yuvImageRenderer;
+ }
+
+ public static synchronized void update(
+ VideoRenderer.Callbacks renderer, int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ Logging.d(TAG, "VideoRendererGui.update");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to update yuv renderer before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ if (yuvImageRenderer == renderer) {
+ yuvImageRenderer.setPosition(x, y, width, height, scalingType, mirror);
+ }
+ }
+ }
+ }
+
+ public static synchronized void setRendererEvents(
+ VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
+ Logging.d(TAG, "VideoRendererGui.setRendererEvents");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to set renderer events before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ if (yuvImageRenderer == renderer) {
+ yuvImageRenderer.rendererEvents = rendererEvents;
+ }
+ }
+ }
+ }
+
+ public static synchronized void remove(VideoRenderer.Callbacks renderer) {
+ Logging.d(TAG, "VideoRendererGui.remove");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to remove renderer before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ final int index = instance.yuvImageRenderers.indexOf(renderer);
+ if (index == -1) {
+ Logging.w(TAG, "Couldn't remove renderer (not present in current list)");
+ } else {
+ instance.yuvImageRenderers.remove(index).release();
+ }
+ }
+ }
+
+ public static synchronized void reset(VideoRenderer.Callbacks renderer) {
+ Logging.d(TAG, "VideoRendererGui.reset");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to reset renderer before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ if (yuvImageRenderer == renderer) {
+ yuvImageRenderer.reset();
+ }
+ }
+ }
+ }
+
+ private static void printStackTrace(Thread thread, String threadName) {
+ if (thread != null) {
+ StackTraceElement[] stackTraces = thread.getStackTrace();
+ if (stackTraces.length > 0) {
+ Logging.d(TAG, threadName + " stacks trace:");
+ for (StackTraceElement stackTrace : stackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ public static synchronized void printStackTraces() {
+ if (instance == null) {
+ return;
+ }
+ printStackTrace(renderFrameThread, "Render frame thread");
+ printStackTrace(drawThread, "Draw thread");
+ }
+
+ @SuppressLint("NewApi")
+ @Override
+ public void onSurfaceCreated(GL10 unused, EGLConfig config) {
+ Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
+ // Store render EGL context.
+ synchronized (VideoRendererGui.class) {
+ eglContext = ((EGL10) EGLContext.getEGL()).eglGetCurrentContext();
+ Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
+ }
+
+ synchronized (yuvImageRenderers) {
+ // Create drawer for YUV/OES frames.
+ drawer = new GlRectDrawer();
+ // Create textures for all images.
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.createTextures();
+ }
+ onSurfaceCreatedCalled = true;
+ }
+ GlUtil.checkNoGLES2Error("onSurfaceCreated done");
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f);
+
+ // Fire EGL context ready event.
+ synchronized (VideoRendererGui.class) {
+ if (eglContextReady != null) {
+ eglContextReady.run();
+ }
+ }
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 unused, int width, int height) {
+ Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
+ width + " x " + height + " ");
+ screenWidth = width;
+ screenHeight = height;
+ synchronized (yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
+ }
+ }
+ }
+
+ @Override
+ public void onDrawFrame(GL10 unused) {
+ if (drawThread == null) {
+ drawThread = Thread.currentThread();
+ }
+ GLES20.glViewport(0, 0, screenWidth, screenHeight);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ synchronized (yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.draw(drawer);
+ }
+ }
+ }
+
+}
diff --git a/talk/app/webrtc/java/jni/OWNERS b/talk/app/webrtc/java/jni/OWNERS
new file mode 100644
index 0000000000..4d31ffb663
--- /dev/null
+++ b/talk/app/webrtc/java/jni/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/talk/app/webrtc/java/jni/androidmediacodeccommon.h b/talk/app/webrtc/java/jni/androidmediacodeccommon.h
new file mode 100644
index 0000000000..348a716496
--- /dev/null
+++ b/talk/app/webrtc/java/jni/androidmediacodeccommon.h
@@ -0,0 +1,113 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+#define TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+
+#include <android/log.h>
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+namespace webrtc_jni {
+
+// Uncomment this define to enable verbose logging for every encoded/decoded
+// video frame.
+//#define TRACK_BUFFER_TIMING
+
+#define TAG "MediaCodecVideo"
+#ifdef TRACK_BUFFER_TIMING
+#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
+#else
+#define ALOGV(...)
+#endif
+#define ALOGD LOG_TAG(rtc::LS_INFO, TAG)
+#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG)
+#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG)
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecVideoEncoder.java
+enum COLOR_FORMATTYPE {
+ COLOR_FormatYUV420Planar = 0x13,
+ COLOR_FormatYUV420SemiPlanar = 0x15,
+ COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ // This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
+ // but requires some (16, 32?) byte alignment.
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
+};
+
+// Arbitrary interval to poll the codec for new outputs.
+enum { kMediaCodecPollMs = 10 };
+// Media codec maximum output buffer ready timeout.
+enum { kMediaCodecTimeoutMs = 1000 };
+// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
+enum { kMediaCodecStatisticsIntervalMs = 3000 };
+// Maximum amount of pending frames for VP8 decoder.
+enum { kMaxPendingFramesVp8 = 1 };
+// Maximum amount of pending frames for H.264 decoder.
+enum { kMaxPendingFramesH264 = 30 };
+// Maximum amount of decoded frames for which per-frame logging is enabled.
+enum { kMaxDecodedLogFrames = 5 };
+
+static inline int64_t GetCurrentTimeMs() {
+ return webrtc::TickTime::Now().Ticks() / 1000000LL;
+}
+
+static inline void AllowBlockingCalls() {
+ rtc::Thread* current_thread = rtc::Thread::Current();
+ if (current_thread != NULL)
+ current_thread->SetAllowBlockingCalls(true);
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static inline jobject JavaEnumFromIndex(
+ JNIEnv* jni, const std::string& state_class_fragment, int index) {
+ const std::string state_class = "org/webrtc/" + state_class_fragment;
+ return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+ state_class, index);
+}
+
+// Checks for any Java exception, prints stack backtrace and clears
+// currently thrown exception.
+static inline bool CheckException(JNIEnv* jni) {
+ if (jni->ExceptionCheck()) {
+ ALOGE << "Java JNI exception.";
+ jni->ExceptionDescribe();
+ jni->ExceptionClear();
+ return true;
+ }
+ return false;
+}
+
+} // namespace webrtc_jni
+
+#endif // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
new file mode 100644
index 0000000000..b664f16e2e
--- /dev/null
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
@@ -0,0 +1,865 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <algorithm>
+#include <vector>
+
+#include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h"
+#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/common_video/interface/i420_buffer_pool.h"
+#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::DecodedImageCallback;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::TickTime;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+
+namespace webrtc_jni {
+
+class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
+ public rtc::MessageHandler {
+ public:
+ explicit MediaCodecVideoDecoder(
+ JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context);
+ virtual ~MediaCodecVideoDecoder();
+
+ int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
+ override;
+
+ int32_t Decode(
+ const EncodedImage& inputImage, bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo = NULL,
+ int64_t renderTimeMs = -1) override;
+
+ int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
+ override;
+
+ int32_t Release() override;
+
+ int32_t Reset() override;
+ // rtc::MessageHandler implementation.
+ void OnMessage(rtc::Message* msg) override;
+
+ private:
+ // CHECK-fail if not running on |codec_thread_|.
+ void CheckOnCodecThread();
+
+ int32_t InitDecodeOnCodecThread();
+ int32_t ReleaseOnCodecThread();
+ int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
+ // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+ // true on success.
+ bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
+ int32_t ProcessHWErrorOnCodecThread();
+
+ // Type of video codec.
+ VideoCodecType codecType_;
+
+ bool key_frame_required_;
+ bool inited_;
+ bool sw_fallback_required_;
+ bool use_surface_;
+ VideoCodec codec_;
+ webrtc::I420BufferPool decoded_frame_pool_;
+ NativeHandleImpl native_handle_;
+ DecodedImageCallback* callback_;
+ int frames_received_; // Number of frames received by decoder.
+ int frames_decoded_; // Number of frames decoded by decoder.
+ int64_t start_time_ms_; // Start time for statistics.
+ int current_frames_; // Number of frames in the current statistics interval.
+ int current_bytes_; // Encoded bytes in the current statistics interval.
+ int current_decoding_time_ms_; // Overall decoding time in the current second
+ uint32_t max_pending_frames_; // Maximum number of pending input frames
+ std::vector<int32_t> timestamps_;
+ std::vector<int64_t> ntp_times_ms_;
+ std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
+ // decoder input.
+
+ // State that is constant for the lifetime of this object once the ctor
+ // returns.
+ scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
+ ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
+ ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
+ jmethodID j_init_decode_method_;
+ jmethodID j_release_method_;
+ jmethodID j_dequeue_input_buffer_method_;
+ jmethodID j_queue_input_buffer_method_;
+ jmethodID j_dequeue_output_buffer_method_;
+ jmethodID j_return_decoded_byte_buffer_method_;
+ // MediaCodecVideoDecoder fields.
+ jfieldID j_input_buffers_field_;
+ jfieldID j_output_buffers_field_;
+ jfieldID j_color_format_field_;
+ jfieldID j_width_field_;
+ jfieldID j_height_field_;
+ jfieldID j_stride_field_;
+ jfieldID j_slice_height_field_;
+ jfieldID j_surface_texture_field_;
+ // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
+ jfieldID j_textureID_field_;
+ jfieldID j_texture_presentation_timestamp_us_field_;
+ // MediaCodecVideoDecoder.DecodedByteBuffer fields.
+ jfieldID j_info_index_field_;
+ jfieldID j_info_offset_field_;
+ jfieldID j_info_size_field_;
+ jfieldID j_info_presentation_timestamp_us_field_;
+
+ // Global references; must be deleted in Release().
+ std::vector<jobject> input_buffers_;
+ jobject surface_texture_;
+ jobject previous_surface_texture_;
+
+ // Render EGL context - owned by factory, should not be allocated/destroyed
+ // by VideoDecoder.
+ jobject render_egl_context_;
+};
+
+MediaCodecVideoDecoder::MediaCodecVideoDecoder(
+ JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
+ codecType_(codecType),
+ render_egl_context_(render_egl_context),
+ key_frame_required_(true),
+ inited_(false),
+ sw_fallback_required_(false),
+ surface_texture_(NULL),
+ previous_surface_texture_(NULL),
+ codec_thread_(new Thread()),
+ j_media_codec_video_decoder_class_(
+ jni,
+ FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
+ j_media_codec_video_decoder_(
+ jni,
+ jni->NewObject(*j_media_codec_video_decoder_class_,
+ GetMethodID(jni,
+ *j_media_codec_video_decoder_class_,
+ "<init>",
+ "()V"))) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
+ RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
+
+ j_init_decode_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "initDecode",
+ "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
+ "IILjavax/microedition/khronos/egl/EGLContext;)Z");
+ j_release_method_ =
+ GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
+ j_dequeue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
+ j_queue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
+ j_dequeue_output_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
+ "(I)Ljava/lang/Object;");
+ j_return_decoded_byte_buffer_method_ =
+ GetMethodID(jni, *j_media_codec_video_decoder_class_,
+ "returnDecodedByteBuffer", "(I)V");
+
+ j_input_buffers_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_,
+ "inputBuffers", "[Ljava/nio/ByteBuffer;");
+ j_output_buffers_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_,
+ "outputBuffers", "[Ljava/nio/ByteBuffer;");
+ j_color_format_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
+ j_width_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "width", "I");
+ j_height_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "height", "I");
+ j_stride_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "stride", "I");
+ j_slice_height_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
+ j_surface_texture_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
+ "Landroid/graphics/SurfaceTexture;");
+
+ jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+ j_textureID_field_ = GetFieldID(
+ jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
+ j_texture_presentation_timestamp_us_field_ =
+ GetFieldID(jni, j_decoder_decoded_texture_buffer_class,
+ "presentationTimestampUs", "J");
+
+ jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
+ j_info_index_field_ = GetFieldID(
+ jni, j_decoder_decoded_byte_buffer_class, "index", "I");
+ j_info_offset_field_ = GetFieldID(
+ jni, j_decoder_decoded_byte_buffer_class, "offset", "I");
+ j_info_size_field_ = GetFieldID(
+ jni, j_decoder_decoded_byte_buffer_class, "size", "I");
+ j_info_presentation_timestamp_us_field_ = GetFieldID(
+ jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J");
+
+ CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
+ use_surface_ = (render_egl_context_ != NULL);
+ ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
+ memset(&codec_, 0, sizeof(codec_));
+ AllowBlockingCalls();
+}
+
+MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
+ // Call Release() to ensure no more callbacks to us after we are deleted.
+ Release();
+ // Delete global references.
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ if (previous_surface_texture_ != NULL) {
+ jni->DeleteGlobalRef(previous_surface_texture_);
+ }
+ if (surface_texture_ != NULL) {
+ jni->DeleteGlobalRef(surface_texture_);
+ }
+}
+
+int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
+ int32_t numberOfCores) {
+ ALOGD << "InitDecode.";
+ if (inst == NULL) {
+ ALOGE << "NULL VideoCodec instance";
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ // Factory should guard against other codecs being used with us.
+ RTC_CHECK(inst->codecType == codecType_)
+ << "Unsupported codec " << inst->codecType << " for " << codecType_;
+
+ if (sw_fallback_required_) {
+ ALOGE << "InitDecode() - fallback to SW decoder";
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ // Save VideoCodec instance for later.
+ if (&codec_ != inst) {
+ codec_ = *inst;
+ }
+ // If maxFramerate is not set then assume 30 fps.
+ codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30;
+
+ // Call Java init.
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". "
+ << codec_.width << " x " << codec_.height << ". Fps: " <<
+ (int)codec_.maxFramerate;
+
+ // Release previous codec first if it was allocated before.
+ int ret_val = ReleaseOnCodecThread();
+ if (ret_val < 0) {
+ ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // Always start with a complete key frame.
+ key_frame_required_ = true;
+ frames_received_ = 0;
+ frames_decoded_ = 0;
+
+ jobject j_video_codec_enum = JavaEnumFromIndex(
+ jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
+ bool success = jni->CallBooleanMethod(
+ *j_media_codec_video_decoder_,
+ j_init_decode_method_,
+ j_video_codec_enum,
+ codec_.width,
+ codec_.height,
+ use_surface_ ? render_egl_context_ : nullptr);
+ if (CheckException(jni) || !success) {
+ ALOGE << "Codec initialization error - fallback to SW codec.";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ inited_ = true;
+
+ switch (codecType_) {
+ case kVideoCodecVP8:
+ max_pending_frames_ = kMaxPendingFramesVp8;
+ break;
+ case kVideoCodecH264:
+ max_pending_frames_ = kMaxPendingFramesH264;
+ break;
+ default:
+ max_pending_frames_ = 0;
+ }
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_decoding_time_ms_ = 0;
+ timestamps_.clear();
+ ntp_times_ms_.clear();
+ frame_rtc_times_ms_.clear();
+
+ jobjectArray input_buffers = (jobjectArray)GetObjectField(
+ jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
+ size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+ ALOGD << "Maximum amount of pending frames: " << max_pending_frames_;
+ input_buffers_.resize(num_input_buffers);
+ for (size_t i = 0; i < num_input_buffers; ++i) {
+ input_buffers_[i] =
+ jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+ if (CheckException(jni)) {
+ ALOGE << "NewGlobalRef error - fallback to SW codec.";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+
+ if (use_surface_) {
+ jobject surface_texture = GetObjectField(
+ jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
+ if (previous_surface_texture_ != NULL) {
+ jni->DeleteGlobalRef(previous_surface_texture_);
+ }
+ previous_surface_texture_ = surface_texture_;
+ surface_texture_ = jni->NewGlobalRef(surface_texture);
+ }
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::Release() {
+ ALOGD << "DecoderRelease request";
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ALOGD << "DecoderReleaseOnCodecThread: Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ ScopedLocalRefFrame local_ref_frame(jni);
+ for (size_t i = 0; i < input_buffers_.size(); i++) {
+ jni->DeleteGlobalRef(input_buffers_[i]);
+ }
+ input_buffers_.clear();
+ jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
+ inited_ = false;
+ rtc::MessageQueueManager::Clear(this);
+ if (CheckException(jni)) {
+ ALOGE << "Decoder release exception";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ALOGD << "DecoderReleaseOnCodecThread done";
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoDecoder::CheckOnCodecThread() {
+ RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
+ << "Running on wrong thread!";
+}
+
+int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
+ CheckOnCodecThread();
+ int ret_val = ReleaseOnCodecThread();
+ if (ret_val < 0) {
+ ALOGE << "ProcessHWError: Release failure";
+ }
+ if (codecType_ == kVideoCodecH264) {
+ // For now there is no SW H.264 which can be used as fallback codec.
+ // So try to restart hw codec for now.
+ ret_val = InitDecodeOnCodecThread();
+ ALOGE << "Reset H.264 codec done. Status: " << ret_val;
+ if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
+ // H.264 codec was succesfully reset - return regular error code.
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ } else {
+ // Fail to restart H.264 codec - return error code which should stop the
+ // call.
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ } else {
+ sw_fallback_required_ = true;
+ ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+}
+
+int32_t MediaCodecVideoDecoder::Decode(
+ const EncodedImage& inputImage,
+ bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo,
+ int64_t renderTimeMs) {
+ if (sw_fallback_required_) {
+ ALOGE << "Decode() - fallback to SW codec";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ if (callback_ == NULL) {
+ ALOGE << "Decode() - callback_ is NULL";
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ if (inputImage._buffer == NULL && inputImage._length > 0) {
+ ALOGE << "Decode() - inputImage is incorrect";
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ if (!inited_) {
+ ALOGE << "Decode() - decoder is not initialized";
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
+ // Check if encoded frame dimension has changed.
+ if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
+ (inputImage._encodedWidth != codec_.width ||
+ inputImage._encodedHeight != codec_.height)) {
+ codec_.width = inputImage._encodedWidth;
+ codec_.height = inputImage._encodedHeight;
+ int32_t ret = InitDecode(&codec_, 1);
+ if (ret < 0) {
+ ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ }
+
+ // Always start with a complete key frame.
+ if (key_frame_required_) {
+ if (inputImage._frameType != webrtc::kVideoFrameKey) {
+ ALOGE << "Decode() - key frame is required";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ if (!inputImage._completeFrame) {
+ ALOGE << "Decode() - complete frame is required";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ key_frame_required_ = false;
+ }
+ if (inputImage._length == 0) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ return codec_thread_->Invoke<int32_t>(Bind(
+ &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
+}
+
+int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
+ const EncodedImage& inputImage) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ // Try to drain the decoder and wait until output is not too
+ // much behind the input.
+ if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ ALOGV("Received: %d. Decoded: %d. Wait for output...",
+ frames_received_, frames_decoded_);
+ if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
+ ALOGE << "DeliverPendingOutputs error. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
+ }
+ if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ ALOGE << "Output buffer dequeue timeout. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
+ }
+ }
+
+ // Get input buffer.
+ int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
+ j_dequeue_input_buffer_method_);
+ if (CheckException(jni) || j_input_buffer_index < 0) {
+ ALOGE << "dequeueInputBuffer error";
+ return ProcessHWErrorOnCodecThread();
+ }
+
+ // Copy encoded data to Java ByteBuffer.
+ jobject j_input_buffer = input_buffers_[j_input_buffer_index];
+ uint8_t* buffer =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+ RTC_CHECK(buffer) << "Indirect buffer??";
+ int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
+ if (CheckException(jni) || buffer_capacity < inputImage._length) {
+ ALOGE << "Input frame size "<< inputImage._length <<
+ " is bigger than buffer size " << buffer_capacity;
+ return ProcessHWErrorOnCodecThread();
+ }
+ jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
+ if (frames_decoded_ < kMaxDecodedLogFrames) {
+ ALOGD << "Decoder frame in # " << frames_received_ << ". Type: "
+ << inputImage._frameType << ". Buffer # " <<
+ j_input_buffer_index << ". TS: " << (int)(timestamp_us / 1000)
+ << ". Size: " << inputImage._length;
+ }
+ memcpy(buffer, inputImage._buffer, inputImage._length);
+
+ // Save input image timestamps for later output.
+ frames_received_++;
+ current_bytes_ += inputImage._length;
+ timestamps_.push_back(inputImage._timeStamp);
+ ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
+ frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+
+ // Feed input to decoder.
+ bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
+ j_queue_input_buffer_method_,
+ j_input_buffer_index,
+ inputImage._length,
+ timestamp_us);
+ if (CheckException(jni) || !success) {
+ ALOGE << "queueInputBuffer error";
+ return ProcessHWErrorOnCodecThread();
+ }
+
+ // Try to drain the decoder
+ if (!DeliverPendingOutputs(jni, 0)) {
+ ALOGE << "DeliverPendingOutputs error";
+ return ProcessHWErrorOnCodecThread();
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoDecoder::DeliverPendingOutputs(
+ JNIEnv* jni, int dequeue_timeout_us) {
+ if (frames_received_ <= frames_decoded_) {
+ // No need to query for output buffers - decoder is drained.
+ return true;
+ }
+ // Get decoder output.
+ jobject j_decoder_output_buffer = jni->CallObjectMethod(
+ *j_media_codec_video_decoder_,
+ j_dequeue_output_buffer_method_,
+ dequeue_timeout_us);
+ if (CheckException(jni)) {
+ ALOGE << "dequeueOutputBuffer() error";
+ return false;
+ }
+ if (IsNull(jni, j_decoder_output_buffer)) {
+ // No decoded frame ready.
+ return true;
+ }
+
+ // Get decoded video frame properties.
+ int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
+ j_color_format_field_);
+ int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
+ int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
+ int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
+ int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
+ j_slice_height_field_);
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
+ long output_timestamps_ms = 0;
+ if (use_surface_) {
+ // Extract data from Java DecodedTextureBuffer.
+ const int texture_id =
+ GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
+ const int64_t timestamp_us =
+ GetLongField(jni, j_decoder_output_buffer,
+ j_texture_presentation_timestamp_us_field_);
+ output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
+ // Create webrtc::VideoFrameBuffer with native texture handle.
+ native_handle_.SetTextureObject(surface_texture_, texture_id);
+ frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>(
+ &native_handle_, width, height);
+ } else {
+ // Extract data from Java ByteBuffer and create output yuv420 frame -
+ // for non surface decoding only.
+ const int output_buffer_index =
+ GetIntField(jni, j_decoder_output_buffer, j_info_index_field_);
+ const int output_buffer_offset =
+ GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_);
+ const int output_buffer_size =
+ GetIntField(jni, j_decoder_output_buffer, j_info_size_field_);
+ const int64_t timestamp_us = GetLongField(
+ jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
+ output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
+
+ if (output_buffer_size < width * height * 3 / 2) {
+ ALOGE << "Insufficient output buffer size: " << output_buffer_size;
+ return false;
+ }
+ jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
+ jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
+ jobject output_buffer =
+ jni->GetObjectArrayElement(output_buffers, output_buffer_index);
+ uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
+ output_buffer));
+ if (CheckException(jni)) {
+ return false;
+ }
+ payload += output_buffer_offset;
+
+ // Create yuv420 frame.
+ frame_buffer = decoded_frame_pool_.CreateBuffer(width, height);
+ if (color_format == COLOR_FormatYUV420Planar) {
+ RTC_CHECK_EQ(0, stride % 2);
+ RTC_CHECK_EQ(0, slice_height % 2);
+ const int uv_stride = stride / 2;
+ const int u_slice_height = slice_height / 2;
+ const uint8_t* y_ptr = payload;
+ const uint8_t* u_ptr = y_ptr + stride * slice_height;
+ const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height;
+ libyuv::I420Copy(y_ptr, stride,
+ u_ptr, uv_stride,
+ v_ptr, uv_stride,
+ frame_buffer->MutableData(webrtc::kYPlane),
+ frame_buffer->stride(webrtc::kYPlane),
+ frame_buffer->MutableData(webrtc::kUPlane),
+ frame_buffer->stride(webrtc::kUPlane),
+ frame_buffer->MutableData(webrtc::kVPlane),
+ frame_buffer->stride(webrtc::kVPlane),
+ width, height);
+ } else {
+ // All other supported formats are nv12.
+ const uint8_t* y_ptr = payload;
+ const uint8_t* uv_ptr = y_ptr + stride * slice_height;
+ libyuv::NV12ToI420(
+ y_ptr, stride,
+ uv_ptr, stride,
+ frame_buffer->MutableData(webrtc::kYPlane),
+ frame_buffer->stride(webrtc::kYPlane),
+ frame_buffer->MutableData(webrtc::kUPlane),
+ frame_buffer->stride(webrtc::kUPlane),
+ frame_buffer->MutableData(webrtc::kVPlane),
+ frame_buffer->stride(webrtc::kVPlane),
+ width, height);
+ }
+ // Return output byte buffer back to codec.
+ jni->CallVoidMethod(
+ *j_media_codec_video_decoder_,
+ j_return_decoded_byte_buffer_method_,
+ output_buffer_index);
+ if (CheckException(jni)) {
+ ALOGE << "returnDecodedByteBuffer error";
+ return false;
+ }
+ }
+ VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
+
+ // Get frame timestamps from a queue.
+ if (timestamps_.size() > 0) {
+ decoded_frame.set_timestamp(timestamps_.front());
+ timestamps_.erase(timestamps_.begin());
+ }
+ if (ntp_times_ms_.size() > 0) {
+ decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
+ ntp_times_ms_.erase(ntp_times_ms_.begin());
+ }
+ int64_t frame_decoding_time_ms = 0;
+ if (frame_rtc_times_ms_.size() > 0) {
+ frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
+ frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+ }
+ if (frames_decoded_ < kMaxDecodedLogFrames) {
+ ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width <<
+ " x " << height << ". " << stride << " x " << slice_height <<
+ ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms <<
+ ". DecTime: " << (int)frame_decoding_time_ms;
+ }
+
+ // Calculate and print decoding statistics - every 3 seconds.
+ frames_decoded_++;
+ current_frames_++;
+ current_decoding_time_ms_ += frame_decoding_time_ms;
+ int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
+ if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
+ current_frames_ > 0) {
+ ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
+ (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
+ ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
+ << ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
+ " for last " << statistic_time_ms << " ms.";
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_decoding_time_ms_ = 0;
+ }
+
+ // Callback - output decoded frame.
+ const int32_t callback_status = callback_->Decoded(decoded_frame);
+ if (callback_status > 0) {
+ ALOGE << "callback error";
+ }
+
+ return true;
+}
+
+int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::Reset() {
+ ALOGD << "DecoderReset";
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ return InitDecode(&codec_, 1);
+}
+
+void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ if (!inited_) {
+ return;
+ }
+ // We only ever send one message to |this| directly (not through a Bind()'d
+ // functor), so expect no ID/data.
+ RTC_CHECK(!msg->message_id) << "Unexpected message!";
+ RTC_CHECK(!msg->pdata) << "Unexpected message!";
+ CheckOnCodecThread();
+
+ if (!DeliverPendingOutputs(jni, 0)) {
+ ALOGE << "OnMessage: DeliverPendingOutputs error";
+ ProcessHWErrorOnCodecThread();
+ return;
+ }
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+}
+
+MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
+ render_egl_context_(NULL) {
+ ALOGD << "MediaCodecVideoDecoderFactory ctor";
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+ supported_codec_types_.clear();
+
+ bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_vp8_hw_supported = false;
+ }
+ if (is_vp8_hw_supported) {
+ ALOGD << "VP8 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecVP8);
+ }
+
+ bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_h264_hw_supported = false;
+ }
+ if (is_h264_hw_supported) {
+ ALOGD << "H264 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecH264);
+ }
+}
+
+MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
+ ALOGD << "MediaCodecVideoDecoderFactory dtor";
+ if (render_egl_context_) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jni->DeleteGlobalRef(render_egl_context_);
+ render_egl_context_ = NULL;
+ }
+}
+
+void MediaCodecVideoDecoderFactory::SetEGLContext(
+ JNIEnv* jni, jobject render_egl_context) {
+ ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext";
+ if (render_egl_context_) {
+ jni->DeleteGlobalRef(render_egl_context_);
+ render_egl_context_ = NULL;
+ }
+ if (!IsNull(jni, render_egl_context)) {
+ render_egl_context_ = jni->NewGlobalRef(render_egl_context);
+ if (CheckException(jni)) {
+ ALOGE << "error calling NewGlobalRef for EGL Context.";
+ render_egl_context_ = NULL;
+ } else {
+ jclass j_egl_context_class =
+ FindClass(jni, "javax/microedition/khronos/egl/EGLContext");
+ if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
+ ALOGE << "Wrong EGL Context.";
+ jni->DeleteGlobalRef(render_egl_context_);
+ render_egl_context_ = NULL;
+ }
+ }
+ }
+ if (render_egl_context_ == NULL) {
+ ALOGW << "NULL VideoDecoder EGL context - HW surface decoding is disabled.";
+ }
+}
+
+webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
+ VideoCodecType type) {
+ if (supported_codec_types_.empty()) {
+ ALOGE << "No HW video decoder for type " << (int)type;
+ return NULL;
+ }
+ for (VideoCodecType codec_type : supported_codec_types_) {
+ if (codec_type == type) {
+ ALOGD << "Create HW video decoder for type " << (int)type;
+ return new MediaCodecVideoDecoder(
+ AttachCurrentThreadIfNeeded(), type, render_egl_context_);
+ }
+ }
+ ALOGE << "Can not find HW video decoder for type " << (int)type;
+ return NULL;
+}
+
+void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
+ webrtc::VideoDecoder* decoder) {
+ ALOGD << "Destroy video decoder.";
+ delete decoder;
+}
+
+} // namespace webrtc_jni
+
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.h b/talk/app/webrtc/java/jni/androidmediadecoder_jni.h
new file mode 100644
index 0000000000..673f9983f4
--- /dev/null
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+#define TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "talk/media/webrtc/webrtcvideodecoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based decoder factory.
+class MediaCodecVideoDecoderFactory
+ : public cricket::WebRtcVideoDecoderFactory {
+ public:
+ MediaCodecVideoDecoderFactory();
+ virtual ~MediaCodecVideoDecoderFactory();
+
+ void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
+ // WebRtcVideoDecoderFactory implementation.
+ webrtc::VideoDecoder* CreateVideoDecoder(webrtc::VideoCodecType type)
+ override;
+
+ void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) override;
+
+ private:
+ jobject render_egl_context_; // Render EGL context.
+ std::vector<webrtc::VideoCodecType> supported_codec_types_;
+};
+
+} // namespace webrtc_jni
+
+#endif // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
new file mode 100644
index 0000000000..ac349e7faf
--- /dev/null
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
@@ -0,0 +1,967 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
+#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h"
+#include "webrtc/system_wrappers/include/field_trial.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+
+namespace webrtc_jni {
+
+// H.264 start code length.
+#define H264_SC_LENGTH 4
+// Maximum allowed NALUs in one output frame.
+#define MAX_NALUS_PERFRAME 32
+// Maximum supported HW video encoder resolution.
+#define MAX_VIDEO_WIDTH 1280
+#define MAX_VIDEO_HEIGHT 1280
+// Maximum supported HW video encoder fps.
+#define MAX_VIDEO_FPS 30
+
+// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
+// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
+// HW-backed video encode. This C++ class is implemented as a very thin shim,
+// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
+// MediaCodecVideoEncoder is created, operated, and destroyed on a single
+// thread, currently the libjingle Worker thread.
+class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
+ public rtc::MessageHandler {
+ public:
+ virtual ~MediaCodecVideoEncoder();
+ explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType);
+
+ // webrtc::VideoEncoder implementation. Everything trampolines to
+ // |codec_thread_| for execution.
+ int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
+ int32_t /* number_of_cores */,
+ size_t /* max_payload_size */) override;
+ int32_t Encode(const webrtc::VideoFrame& input_image,
+ const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+ const std::vector<webrtc::FrameType>* frame_types) override;
+ int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) override;
+ int32_t Release() override;
+ int32_t SetChannelParameters(uint32_t /* packet_loss */,
+ int64_t /* rtt */) override;
+ int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
+
+ // rtc::MessageHandler implementation.
+ void OnMessage(rtc::Message* msg) override;
+
+ void OnDroppedFrame() override;
+
+ int GetTargetFramerate() override;
+
+ private:
+ // CHECK-fail if not running on |codec_thread_|.
+ void CheckOnCodecThread();
+
+ // Release() and InitEncode() in an attempt to restore the codec to an
+ // operable state. Necessary after all manner of OMX-layer errors.
+ void ResetCodec();
+
+ // Implementation of webrtc::VideoEncoder methods above, all running on the
+ // codec thread exclusively.
+ //
+ // If width==0 then this is assumed to be a re-initialization and the
+ // previously-current values are reused instead of the passed parameters
+ // (makes it easier to reason about thread-safety).
+ int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
+ int32_t EncodeOnCodecThread(
+ const webrtc::VideoFrame& input_image,
+ const std::vector<webrtc::FrameType>* frame_types);
+ int32_t RegisterEncodeCompleteCallbackOnCodecThread(
+ webrtc::EncodedImageCallback* callback);
+ int32_t ReleaseOnCodecThread();
+ int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
+
+ // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
+ int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
+ jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
+ bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
+ jlong GetOutputBufferInfoPresentationTimestampUs(
+ JNIEnv* jni, jobject j_output_buffer_info);
+
+ // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+ // true on success.
+ bool DeliverPendingOutputs(JNIEnv* jni);
+
+ // Search for H.264 start codes.
+ int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
+
+ // Type of video codec.
+ VideoCodecType codecType_;
+
+ // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
+ // |codec_thread_| synchronously.
+ webrtc::EncodedImageCallback* callback_;
+
+ // State that is constant for the lifetime of this object once the ctor
+ // returns.
+ scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
+ ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
+ ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
+ jmethodID j_init_encode_method_;
+ jmethodID j_dequeue_input_buffer_method_;
+ jmethodID j_encode_method_;
+ jmethodID j_release_method_;
+ jmethodID j_set_rates_method_;
+ jmethodID j_dequeue_output_buffer_method_;
+ jmethodID j_release_output_buffer_method_;
+ jfieldID j_color_format_field_;
+ jfieldID j_info_index_field_;
+ jfieldID j_info_buffer_field_;
+ jfieldID j_info_is_key_frame_field_;
+ jfieldID j_info_presentation_timestamp_us_field_;
+
+ // State that is valid only between InitEncode() and the next Release().
+ // Touched only on codec_thread_ so no explicit synchronization necessary.
+ int width_; // Frame width in pixels.
+ int height_; // Frame height in pixels.
+ bool inited_;
+ uint16_t picture_id_;
+ enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
+ int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
+ int last_set_fps_; // Last-requested frame rate.
+ int64_t current_timestamp_us_; // Current frame timestamps in us.
+ int frames_received_; // Number of frames received by encoder.
+ int frames_encoded_; // Number of frames encoded by encoder.
+ int frames_dropped_; // Number of frames dropped by encoder.
+ int frames_in_queue_; // Number of frames in encoder queue.
+ int64_t start_time_ms_; // Start time for statistics.
+ int current_frames_; // Number of frames in the current statistics interval.
+ int current_bytes_; // Encoded bytes in the current statistics interval.
+ int current_encoding_time_ms_; // Overall encoding time in the current second
+ int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
+ int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
+ std::vector<int32_t> timestamps_; // Video frames timestamp queue.
+ std::vector<int64_t> render_times_ms_; // Video frames render time queue.
+ std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
+ // encoder input.
+ int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q.
+ int64_t output_render_time_ms_; // Last output frame render time from
+ // render_times_ms_ queue.
+ // Frame size in bytes fed to MediaCodec.
+ int yuv_size_;
+ // True only when between a callback_->Encoded() call return a positive value
+ // and the next Encode() call being ignored.
+ bool drop_next_input_frame_;
+ // Global references; must be deleted in Release().
+ std::vector<jobject> input_buffers_;
+ webrtc::QualityScaler quality_scaler_;
+ // Dynamic resolution change, off by default.
+ bool scale_;
+
+ // H264 bitstream parser, used to extract QP from encoded bitstreams.
+ webrtc::H264BitstreamParser h264_bitstream_parser_;
+};
+
+MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
+ // Call Release() to ensure no more callbacks to us after we are deleted.
+ Release();
+}
+
+MediaCodecVideoEncoder::MediaCodecVideoEncoder(
+ JNIEnv* jni, VideoCodecType codecType) :
+ codecType_(codecType),
+ callback_(NULL),
+ inited_(false),
+ picture_id_(0),
+ codec_thread_(new Thread()),
+ j_media_codec_video_encoder_class_(
+ jni,
+ FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
+ j_media_codec_video_encoder_(
+ jni,
+ jni->NewObject(*j_media_codec_video_encoder_class_,
+ GetMethodID(jni,
+ *j_media_codec_video_encoder_class_,
+ "<init>",
+ "()V"))) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ // It would be nice to avoid spinning up a new thread per MediaCodec, and
+ // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
+ // 2732 means that deadlocks abound. This class synchronously trampolines
+ // to |codec_thread_|, so if anything else can be coming to _us_ from
+ // |codec_thread_|, or from any thread holding the |_sendCritSect| described
+ // in the bug, we have a problem. For now work around that with a dedicated
+ // thread.
+ codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
+ RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
+
+ jclass j_output_buffer_info_class =
+ FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+ j_init_encode_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "initEncode",
+ "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)"
+ "[Ljava/nio/ByteBuffer;");
+ j_dequeue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
+ j_encode_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
+ j_release_method_ =
+ GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
+ j_set_rates_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
+ j_dequeue_output_buffer_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "dequeueOutputBuffer",
+ "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
+ j_release_output_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
+
+ j_color_format_field_ =
+ GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
+ j_info_index_field_ =
+ GetFieldID(jni, j_output_buffer_info_class, "index", "I");
+ j_info_buffer_field_ = GetFieldID(
+ jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
+ j_info_is_key_frame_field_ =
+ GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
+ j_info_presentation_timestamp_us_field_ = GetFieldID(
+ jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
+ CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
+ AllowBlockingCalls();
+}
+
+int32_t MediaCodecVideoEncoder::InitEncode(
+ const webrtc::VideoCodec* codec_settings,
+ int32_t /* number_of_cores */,
+ size_t /* max_payload_size */) {
+ const int kMinWidth = 320;
+ const int kMinHeight = 180;
+ const int kLowQpThresholdDenominator = 3;
+ if (codec_settings == NULL) {
+ ALOGE << "NULL VideoCodec instance";
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ // Factory should guard against other codecs being used with us.
+ RTC_CHECK(codec_settings->codecType == codecType_)
+ << "Unsupported codec " << codec_settings->codecType << " for "
+ << codecType_;
+
+ ALOGD << "InitEncode request";
+ scale_ = webrtc::field_trial::FindFullName(
+ "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled";
+ ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
+ if (scale_) {
+ if (codecType_ == kVideoCodecVP8) {
+ // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
+ // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is
+ // always = 127. Note that in SW, QP is that of the user-level range [0,
+ // 63].
+ const int kMaxQp = 127;
+ // TODO(pbos): Investigate whether high-QP thresholds make sense for VP8.
+ // This effectively disables high QP as VP8 QP can't go above this
+ // threshold.
+ const int kDisabledBadQpThreshold = kMaxQp + 1;
+ quality_scaler_.Init(kMaxQp / kLowQpThresholdDenominator,
+ kDisabledBadQpThreshold, true);
+ } else if (codecType_ == kVideoCodecH264) {
+ // H264 QP is in the range [0, 51].
+ const int kMaxQp = 51;
+ const int kBadQpThreshold = 40;
+ quality_scaler_.Init(kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold,
+ false);
+ } else {
+ // When adding codec support to additional hardware codecs, also configure
+ // their QP thresholds for scaling.
+ RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
+ }
+ quality_scaler_.SetMinResolution(kMinWidth, kMinHeight);
+ quality_scaler_.ReportFramerate(codec_settings->maxFramerate);
+ }
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
+ this,
+ codec_settings->width,
+ codec_settings->height,
+ codec_settings->startBitrate,
+ codec_settings->maxFramerate));
+}
+
+int32_t MediaCodecVideoEncoder::Encode(
+ const webrtc::VideoFrame& frame,
+ const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+ const std::vector<webrtc::FrameType>* frame_types) {
+ return codec_thread_->Invoke<int32_t>(Bind(
+ &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) {
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
+ this,
+ callback));
+}
+
+int32_t MediaCodecVideoEncoder::Release() {
+ ALOGD << "EncoderRelease request";
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
+ int64_t /* rtt */) {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
+ uint32_t frame_rate) {
+ if (scale_)
+ quality_scaler_.ReportFramerate(frame_rate);
+
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
+ this,
+ new_bit_rate,
+ frame_rate));
+}
+
+void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ // We only ever send one message to |this| directly (not through a Bind()'d
+ // functor), so expect no ID/data.
+ RTC_CHECK(!msg->message_id) << "Unexpected message!";
+ RTC_CHECK(!msg->pdata) << "Unexpected message!";
+ CheckOnCodecThread();
+ if (!inited_) {
+ return;
+ }
+
+ // It would be nice to recover from a failure here if one happened, but it's
+ // unclear how to signal such a failure to the app, so instead we stay silent
+ // about it and let the next app-called API method reveal the borkedness.
+ DeliverPendingOutputs(jni);
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+}
+
+void MediaCodecVideoEncoder::CheckOnCodecThread() {
+ RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
+ << "Running on wrong thread!";
+}
+
+void MediaCodecVideoEncoder::ResetCodec() {
+ ALOGE << "ResetCodec";
+ if (Release() != WEBRTC_VIDEO_CODEC_OK ||
+ codec_thread_->Invoke<int32_t>(Bind(
+ &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
+ width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
+ // TODO(fischman): wouldn't it be nice if there was a way to gracefully
+ // degrade to a SW encoder at this point? There isn't one AFAICT :(
+ // https://code.google.com/p/webrtc/issues/detail?id=2920
+ }
+}
+
+int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
+ int width, int height, int kbps, int fps) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " <<
+ width << " x " << height << ". Bitrate: " << kbps <<
+ " kbps. Fps: " << fps;
+ if (kbps == 0) {
+ kbps = last_set_bitrate_kbps_;
+ }
+ if (fps == 0) {
+ fps = last_set_fps_;
+ }
+
+ width_ = width;
+ height_ = height;
+ last_set_bitrate_kbps_ = kbps;
+ last_set_fps_ = fps;
+ yuv_size_ = width_ * height_ * 3 / 2;
+ frames_received_ = 0;
+ frames_encoded_ = 0;
+ frames_dropped_ = 0;
+ frames_in_queue_ = 0;
+ current_timestamp_us_ = 0;
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_encoding_time_ms_ = 0;
+ last_input_timestamp_ms_ = -1;
+ last_output_timestamp_ms_ = -1;
+ output_timestamp_ = 0;
+ output_render_time_ms_ = 0;
+ timestamps_.clear();
+ render_times_ms_.clear();
+ frame_rtc_times_ms_.clear();
+ drop_next_input_frame_ = false;
+ picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+ // We enforce no extra stride/padding in the format creation step.
+ jobject j_video_codec_enum = JavaEnumFromIndex(
+ jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
+ jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
+ jni->CallObjectMethod(*j_media_codec_video_encoder_,
+ j_init_encode_method_,
+ j_video_codec_enum,
+ width_,
+ height_,
+ kbps,
+ fps));
+ CHECK_EXCEPTION(jni);
+ if (IsNull(jni, input_buffers)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ inited_ = true;
+ switch (GetIntField(jni, *j_media_codec_video_encoder_,
+ j_color_format_field_)) {
+ case COLOR_FormatYUV420Planar:
+ encoder_fourcc_ = libyuv::FOURCC_YU12;
+ break;
+ case COLOR_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+ encoder_fourcc_ = libyuv::FOURCC_NV12;
+ break;
+ default:
+ LOG(LS_ERROR) << "Wrong color format.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+ RTC_CHECK(input_buffers_.empty())
+ << "Unexpected double InitEncode without Release";
+ input_buffers_.resize(num_input_buffers);
+ for (size_t i = 0; i < num_input_buffers; ++i) {
+ input_buffers_[i] =
+ jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+ int64_t yuv_buffer_capacity =
+ jni->GetDirectBufferCapacity(input_buffers_[i]);
+ CHECK_EXCEPTION(jni);
+ RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+ }
+ CHECK_EXCEPTION(jni);
+
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
+ const webrtc::VideoFrame& frame,
+ const std::vector<webrtc::FrameType>* frame_types) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ frames_received_++;
+ if (!DeliverPendingOutputs(jni)) {
+ ResetCodec();
+ // Continue as if everything's fine.
+ }
+
+ if (drop_next_input_frame_) {
+ ALOGV("Encoder drop frame - failed callback.");
+ drop_next_input_frame_ = false;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
+ // Check framerate before spatial resolution change.
+ if (scale_)
+ quality_scaler_.OnEncodeFrame(frame);
+
+ const VideoFrame& input_frame =
+ scale_ ? quality_scaler_.GetScaledFrame(frame) : frame;
+
+ if (input_frame.width() != width_ || input_frame.height() != height_) {
+ ALOGD << "Frame resolution change from " << width_ << " x " << height_ <<
+ " to " << input_frame.width() << " x " << input_frame.height();
+ width_ = input_frame.width();
+ height_ = input_frame.height();
+ ResetCodec();
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ // Check if we accumulated too many frames in encoder input buffers
+ // or the encoder latency exceeds 70 ms and drop frame if so.
+ if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
+ int encoder_latency_ms = last_input_timestamp_ms_ -
+ last_output_timestamp_ms_;
+ if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
+ ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms <<
+ " ms. Q size: " << frames_in_queue_;
+ frames_dropped_++;
+ // Report dropped frame to quality_scaler_.
+ OnDroppedFrame();
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ }
+
+ int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
+ j_dequeue_input_buffer_method_);
+ CHECK_EXCEPTION(jni);
+ if (j_input_buffer_index == -1) {
+ // Video codec falls behind - no input buffer available.
+ ALOGV("Encoder drop frame - no input buffers available");
+ frames_dropped_++;
+ // Report dropped frame to quality_scaler_.
+ OnDroppedFrame();
+ return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
+ }
+ if (j_input_buffer_index == -2) {
+ ResetCodec();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
+ frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
+
+ jobject j_input_buffer = input_buffers_[j_input_buffer_index];
+ uint8_t* yuv_buffer =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+ CHECK_EXCEPTION(jni);
+ RTC_CHECK(yuv_buffer) << "Indirect buffer??";
+ RTC_CHECK(!libyuv::ConvertFromI420(
+ input_frame.buffer(webrtc::kYPlane), input_frame.stride(webrtc::kYPlane),
+ input_frame.buffer(webrtc::kUPlane), input_frame.stride(webrtc::kUPlane),
+ input_frame.buffer(webrtc::kVPlane), input_frame.stride(webrtc::kVPlane),
+ yuv_buffer, width_, width_, height_, encoder_fourcc_))
+ << "ConvertFromI420 failed";
+ last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
+ frames_in_queue_++;
+
+ // Save input image timestamps for later output
+ timestamps_.push_back(input_frame.timestamp());
+ render_times_ms_.push_back(input_frame.render_time_ms());
+ frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+
+ bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
+ bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_encode_method_,
+ key_frame,
+ j_input_buffer_index,
+ yuv_size_,
+ current_timestamp_us_);
+ CHECK_EXCEPTION(jni);
+ current_timestamp_us_ += 1000000 / last_set_fps_;
+
+ if (!encode_status || !DeliverPendingOutputs(jni)) {
+ ResetCodec();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
+ webrtc::EncodedImageCallback* callback) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
+ frames_received_ << ". Encoded: " << frames_encoded_ <<
+ ". Dropped: " << frames_dropped_;
+ ScopedLocalRefFrame local_ref_frame(jni);
+ for (size_t i = 0; i < input_buffers_.size(); ++i)
+ jni->DeleteGlobalRef(input_buffers_[i]);
+ input_buffers_.clear();
+ jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
+ CHECK_EXCEPTION(jni);
+ rtc::MessageQueueManager::Clear(this);
+ inited_ = false;
+ ALOGD << "EncoderReleaseOnCodecThread done.";
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
+ uint32_t frame_rate) {
+ CheckOnCodecThread();
+ if (last_set_bitrate_kbps_ == new_bit_rate &&
+ last_set_fps_ == frame_rate) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ if (new_bit_rate > 0) {
+ last_set_bitrate_kbps_ = new_bit_rate;
+ }
+ if (frame_rate > 0) {
+ last_set_fps_ = frame_rate;
+ }
+ bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_set_rates_method_,
+ last_set_bitrate_kbps_,
+ last_set_fps_);
+ CHECK_EXCEPTION(jni);
+ if (!ret) {
+ ResetCodec();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
+}
+
+jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
+}
+
+bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
+}
+
+jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetLongField(
+ jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
+}
+
+bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+ while (true) {
+ jobject j_output_buffer_info = jni->CallObjectMethod(
+ *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
+ CHECK_EXCEPTION(jni);
+ if (IsNull(jni, j_output_buffer_info)) {
+ break;
+ }
+
+ int output_buffer_index =
+ GetOutputBufferInfoIndex(jni, j_output_buffer_info);
+ if (output_buffer_index == -1) {
+ ResetCodec();
+ return false;
+ }
+
+ // Get key and config frame flags.
+ jobject j_output_buffer =
+ GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
+ bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
+
+ // Get frame timestamps from a queue - for non config frames only.
+ int64_t frame_encoding_time_ms = 0;
+ last_output_timestamp_ms_ =
+ GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
+ 1000;
+ if (frames_in_queue_ > 0) {
+ output_timestamp_ = timestamps_.front();
+ timestamps_.erase(timestamps_.begin());
+ output_render_time_ms_ = render_times_ms_.front();
+ render_times_ms_.erase(render_times_ms_.begin());
+ frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
+ frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+ frames_in_queue_--;
+ }
+
+ // Extract payload.
+ size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
+ uint8_t* payload = reinterpret_cast<uint8_t*>(
+ jni->GetDirectBufferAddress(j_output_buffer));
+ CHECK_EXCEPTION(jni);
+
+ ALOGV("Encoder frame out # %d. Key: %d. Size: %d. TS: %lld."
+ " Latency: %lld. EncTime: %lld",
+ frames_encoded_, key_frame, payload_size,
+ last_output_timestamp_ms_,
+ last_input_timestamp_ms_ - last_output_timestamp_ms_,
+ frame_encoding_time_ms);
+
+ // Calculate and print encoding statistics - every 3 seconds.
+ frames_encoded_++;
+ current_frames_++;
+ current_bytes_ += payload_size;
+ current_encoding_time_ms_ += frame_encoding_time_ms;
+ int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
+ if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
+ current_frames_ > 0) {
+ ALOGD << "Encoded frames: " << frames_encoded_ << ". Bitrate: " <<
+ (current_bytes_ * 8 / statistic_time_ms) <<
+ ", target: " << last_set_bitrate_kbps_ << " kbps, fps: " <<
+ ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
+ << ", encTime: " <<
+ (current_encoding_time_ms_ / current_frames_) << " for last " <<
+ statistic_time_ms << " ms.";
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_encoding_time_ms_ = 0;
+ }
+
+ // Callback - return encoded frame.
+ int32_t callback_status = 0;
+ if (callback_) {
+ scoped_ptr<webrtc::EncodedImage> image(
+ new webrtc::EncodedImage(payload, payload_size, payload_size));
+ image->_encodedWidth = width_;
+ image->_encodedHeight = height_;
+ image->_timeStamp = output_timestamp_;
+ image->capture_time_ms_ = output_render_time_ms_;
+ image->_frameType =
+ (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
+ image->_completeFrame = true;
+ image->adapt_reason_.quality_resolution_downscales =
+ scale_ ? quality_scaler_.downscale_shift() : -1;
+
+ webrtc::CodecSpecificInfo info;
+ memset(&info, 0, sizeof(info));
+ info.codecType = codecType_;
+ if (codecType_ == kVideoCodecVP8) {
+ info.codecSpecific.VP8.pictureId = picture_id_;
+ info.codecSpecific.VP8.nonReference = false;
+ info.codecSpecific.VP8.simulcastIdx = 0;
+ info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
+ info.codecSpecific.VP8.layerSync = false;
+ info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
+ info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
+ picture_id_ = (picture_id_ + 1) & 0x7FFF;
+ }
+
+ // Generate a header describing a single fragment.
+ webrtc::RTPFragmentationHeader header;
+ memset(&header, 0, sizeof(header));
+ if (codecType_ == kVideoCodecVP8) {
+ header.VerifyAndAllocateFragmentationHeader(1);
+ header.fragmentationOffset[0] = 0;
+ header.fragmentationLength[0] = image->_length;
+ header.fragmentationPlType[0] = 0;
+ header.fragmentationTimeDiff[0] = 0;
+ if (scale_) {
+ int qp;
+ if (webrtc::vp8::GetQp(payload, payload_size, &qp))
+ quality_scaler_.ReportQP(qp);
+ }
+ } else if (codecType_ == kVideoCodecH264) {
+ if (scale_) {
+ h264_bitstream_parser_.ParseBitstream(payload, payload_size);
+ int qp;
+ if (h264_bitstream_parser_.GetLastSliceQp(&qp))
+ quality_scaler_.ReportQP(qp);
+ }
+ // For H.264 search for start codes.
+ int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
+ int32_t scPositionsLength = 0;
+ int32_t scPosition = 0;
+ while (scPositionsLength < MAX_NALUS_PERFRAME) {
+ int32_t naluPosition = NextNaluPosition(
+ payload + scPosition, payload_size - scPosition);
+ if (naluPosition < 0) {
+ break;
+ }
+ scPosition += naluPosition;
+ scPositions[scPositionsLength++] = scPosition;
+ scPosition += H264_SC_LENGTH;
+ }
+ if (scPositionsLength == 0) {
+ ALOGE << "Start code is not found!";
+ ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
+ << " " << image->_buffer[2] << " " << image->_buffer[3]
+ << " " << image->_buffer[4] << " " << image->_buffer[5];
+ ResetCodec();
+ return false;
+ }
+ scPositions[scPositionsLength] = payload_size;
+ header.VerifyAndAllocateFragmentationHeader(scPositionsLength);
+ for (size_t i = 0; i < scPositionsLength; i++) {
+ header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH;
+ header.fragmentationLength[i] =
+ scPositions[i + 1] - header.fragmentationOffset[i];
+ header.fragmentationPlType[i] = 0;
+ header.fragmentationTimeDiff[i] = 0;
+ }
+ }
+
+ callback_status = callback_->Encoded(*image, &info, &header);
+ }
+
+ // Return output buffer back to the encoder.
+ bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_release_output_buffer_method_,
+ output_buffer_index);
+ CHECK_EXCEPTION(jni);
+ if (!success) {
+ ResetCodec();
+ return false;
+ }
+
+ if (callback_status > 0) {
+ drop_next_input_frame_ = true;
+ // Theoretically could handle callback_status<0 here, but unclear what
+ // that would mean for us.
+ }
+ }
+
+ return true;
+}
+
+int32_t MediaCodecVideoEncoder::NextNaluPosition(
+ uint8_t *buffer, size_t buffer_size) {
+ if (buffer_size < H264_SC_LENGTH) {
+ return -1;
+ }
+ uint8_t *head = buffer;
+ // Set end buffer pointer to 4 bytes before actual buffer end so we can
+ // access head[1], head[2] and head[3] in a loop without buffer overrun.
+ uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
+
+ while (head < end) {
+ if (head[0]) {
+ head++;
+ continue;
+ }
+ if (head[1]) { // got 00xx
+ head += 2;
+ continue;
+ }
+ if (head[2]) { // got 0000xx
+ head += 3;
+ continue;
+ }
+ if (head[3] != 0x01) { // got 000000xx
+ head++; // xx != 1, continue searching.
+ continue;
+ }
+ return (int32_t)(head - buffer);
+ }
+ return -1;
+}
+
+void MediaCodecVideoEncoder::OnDroppedFrame() {
+ if (scale_)
+ quality_scaler_.ReportDroppedFrame();
+}
+
+int MediaCodecVideoEncoder::GetTargetFramerate() {
+ return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
+}
+
+MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+ supported_codecs_.clear();
+
+ bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_vp8_hw_supported) {
+ ALOGD << "VP8 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+
+ bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_h264_hw_supported) {
+ ALOGD << "H.264 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+}
+
+MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
+
+webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
+ VideoCodecType type) {
+ if (supported_codecs_.empty()) {
+ return NULL;
+ }
+ for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
+ it != supported_codecs_.end(); ++it) {
+ if (it->type == type) {
+ ALOGD << "Create HW video encoder for type " << (int)type <<
+ " (" << it->name << ").";
+ return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type);
+ }
+ }
+ return NULL;
+}
+
+const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
+MediaCodecVideoEncoderFactory::codecs() const {
+ return supported_codecs_;
+}
+
+void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
+ webrtc::VideoEncoder* encoder) {
+ ALOGD << "Destroy video encoder.";
+ delete encoder;
+}
+
+} // namespace webrtc_jni
+
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.h b/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
new file mode 100644
index 0000000000..ff124aa146
--- /dev/null
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+#define TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+
+#include <vector>
+
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based encoder factory.
+class MediaCodecVideoEncoderFactory
+ : public cricket::WebRtcVideoEncoderFactory {
+ public:
+ MediaCodecVideoEncoderFactory();
+ virtual ~MediaCodecVideoEncoderFactory();
+
+ // WebRtcVideoEncoderFactory implementation.
+ webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
+ override;
+ const std::vector<VideoCodec>& codecs() const override;
+ void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
+
+ private:
+ // Empty if platform support is lacking, const after ctor returns.
+ std::vector<VideoCodec> supported_codecs_;
+};
+
+} // namespace webrtc_jni
+
+#endif // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
diff --git a/talk/app/webrtc/java/jni/androidnetworkmonitor_jni.cc b/talk/app/webrtc/java/jni/androidnetworkmonitor_jni.cc
new file mode 100644
index 0000000000..f7a8c07c60
--- /dev/null
+++ b/talk/app/webrtc/java/jni/androidnetworkmonitor_jni.cc
@@ -0,0 +1,85 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/java/jni/androidnetworkmonitor_jni.h"
+
+#include "webrtc/base/common.h"
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+jobject AndroidNetworkMonitor::application_context_ = nullptr;
+
+// static
+void AndroidNetworkMonitor::SetAndroidContext(JNIEnv* jni, jobject context) {
+ if (application_context_) {
+ jni->DeleteGlobalRef(application_context_);
+ }
+ application_context_ = NewGlobalRef(jni, context);
+}
+
+AndroidNetworkMonitor::AndroidNetworkMonitor()
+ : j_network_monitor_class_(jni(),
+ FindClass(jni(), "org/webrtc/NetworkMonitor")),
+ j_network_monitor_(
+ jni(),
+ jni()->CallStaticObjectMethod(
+ *j_network_monitor_class_,
+ GetStaticMethodID(
+ jni(),
+ *j_network_monitor_class_,
+ "init",
+ "(Landroid/content/Context;)Lorg/webrtc/NetworkMonitor;"),
+ application_context_)) {
+ ASSERT(application_context_ != nullptr);
+ CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.init";
+}
+
+void AndroidNetworkMonitor::Start() {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ jmethodID m =
+ GetMethodID(jni(), *j_network_monitor_class_, "startMonitoring", "(J)V");
+ jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+ CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.startMonitoring";
+}
+
+void AndroidNetworkMonitor::Stop() {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ jmethodID m =
+ GetMethodID(jni(), *j_network_monitor_class_, "stopMonitoring", "(J)V");
+ jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+ CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.stopMonitoring";
+}
+
+JOW(void, NetworkMonitor_nativeNotifyConnectionTypeChanged)(
+ JNIEnv* jni, jobject j_monitor, jlong j_native_monitor) {
+ rtc::NetworkMonitorInterface* network_monitor =
+ reinterpret_cast<rtc::NetworkMonitorInterface*>(j_native_monitor);
+ network_monitor->OnNetworksChanged();
+}
+
+} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/androidnetworkmonitor_jni.h b/talk/app/webrtc/java/jni/androidnetworkmonitor_jni.h
new file mode 100644
index 0000000000..3f5110c2c4
--- /dev/null
+++ b/talk/app/webrtc/java/jni/androidnetworkmonitor_jni.h
@@ -0,0 +1,67 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+#define TALK_APP_WEBRTC_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+
+#include "webrtc/base/networkmonitor.h"
+
+#include "webrtc/base/thread_checker.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+
+class AndroidNetworkMonitor : public rtc::NetworkMonitorBase {
+ public:
+ AndroidNetworkMonitor();
+
+ static void SetAndroidContext(JNIEnv* jni, jobject context);
+
+ void Start() override;
+ void Stop() override;
+
+ private:
+ JNIEnv* jni() { return AttachCurrentThreadIfNeeded(); }
+
+ ScopedGlobalRef<jclass> j_network_monitor_class_;
+ ScopedGlobalRef<jobject> j_network_monitor_;
+ rtc::ThreadChecker thread_checker_;
+ static jobject application_context_;
+};
+
+class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
+ public:
+ AndroidNetworkMonitorFactory() {}
+
+ rtc::NetworkMonitorInterface* CreateNetworkMonitor() override {
+ return new AndroidNetworkMonitor();
+ }
+};
+
+} // namespace webrtc_jni
+
+#endif // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
new file mode 100644
index 0000000000..02b9f22015
--- /dev/null
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
@@ -0,0 +1,266 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+
+namespace webrtc_jni {
+
+jobject AndroidVideoCapturerJni::application_context_ = nullptr;
+
+// static
+int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
+ jobject appliction_context) {
+ if (application_context_) {
+ jni->DeleteGlobalRef(application_context_);
+ }
+ application_context_ = NewGlobalRef(jni, appliction_context);
+
+ return 0;
+}
+
+AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
+ jobject j_video_capturer)
+ : j_capturer_global_(jni, j_video_capturer),
+ j_video_capturer_class_(
+ jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
+ j_observer_class_(
+ jni,
+ FindClass(jni,
+ "org/webrtc/VideoCapturerAndroid$NativeObserver")),
+ capturer_(nullptr) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
+ thread_checker_.DetachFromThread();
+}
+
+AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
+ jni()->CallVoidMethod(
+ *j_capturer_global_,
+ GetMethodID(jni(), *j_video_capturer_class_, "release", "()V"));
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.release()";
+}
+
+void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
+ webrtc::AndroidVideoCapturer* capturer) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni start";
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ {
+ rtc::CritScope cs(&capturer_lock_);
+ RTC_CHECK(capturer_ == nullptr);
+ RTC_CHECK(invoker_.get() == nullptr);
+ capturer_ = capturer;
+ invoker_.reset(new rtc::GuardedAsyncInvoker());
+ }
+ jobject j_frame_observer =
+ jni()->NewObject(*j_observer_class_,
+ GetMethodID(jni(), *j_observer_class_, "<init>", "(J)V"),
+ jlongFromPointer(this));
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+ jmethodID m = GetMethodID(
+ jni(), *j_video_capturer_class_, "startCapture",
+ "(IIILandroid/content/Context;"
+ "Lorg/webrtc/VideoCapturerAndroid$CapturerObserver;)V");
+ jni()->CallVoidMethod(*j_capturer_global_,
+ m, width, height,
+ framerate,
+ application_context_,
+ j_frame_observer);
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.startCapture";
+}
+
+void AndroidVideoCapturerJni::Stop() {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni stop";
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ {
+ rtc::CritScope cs(&capturer_lock_);
+ // Destroying |invoker_| will cancel all pending calls to |capturer_|.
+ invoker_ = nullptr;
+ capturer_ = nullptr;
+ }
+ jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
+ "stopCapture", "()V");
+ jni()->CallVoidMethod(*j_capturer_global_, m);
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
+ LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
+}
+
+template <typename... Args>
+void AndroidVideoCapturerJni::AsyncCapturerInvoke(
+ const char* method_name,
+ void (webrtc::AndroidVideoCapturer::*method)(Args...),
+ typename Identity<Args>::type... args) {
+ rtc::CritScope cs(&capturer_lock_);
+ if (!invoker_) {
+ LOG(LS_WARNING) << method_name << "() called for closed capturer.";
+ return;
+ }
+ invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
+}
+
+void AndroidVideoCapturerJni::ReturnBuffer(int64_t time_stamp) {
+ jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
+ "returnBuffer", "(J)V");
+ jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp);
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.returnBuffer";
+}
+
+std::string AndroidVideoCapturerJni::GetSupportedFormats() {
+ jmethodID m =
+ GetMethodID(jni(), *j_video_capturer_class_,
+ "getSupportedFormatsAsJson", "()Ljava/lang/String;");
+ jstring j_json_caps =
+ (jstring) jni()->CallObjectMethod(*j_capturer_global_, m);
+ CHECK_EXCEPTION(jni()) << "error during supportedFormatsAsJson";
+ return JavaToStdString(jni(), j_json_caps);
+}
+
+void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success;
+ AsyncCapturerInvoke("OnCapturerStarted",
+ &webrtc::AndroidVideoCapturer::OnCapturerStarted,
+ success);
+}
+
+void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
+ int length,
+ int width,
+ int height,
+ int rotation,
+ int64_t timestamp_ns) {
+ const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
+ // Android guarantees that the stride is a multiple of 16.
+ // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
+ int y_stride;
+ int uv_stride;
+ webrtc::Calc16ByteAlignedStride(width, &y_stride, &uv_stride);
+ const uint8_t* v_plane = y_plane + y_stride * height;
+ const uint8_t* u_plane =
+ v_plane + uv_stride * webrtc::AlignInt(height, 2) / 2;
+
+ // Wrap the Java buffer, and call ReturnBuffer() in the wrapped
+ // VideoFrameBuffer destructor.
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+ new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+ width, height, y_plane, y_stride, u_plane, uv_stride, v_plane,
+ uv_stride,
+ rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
+ timestamp_ns)));
+ AsyncCapturerInvoke("OnIncomingFrame",
+ &webrtc::AndroidVideoCapturer::OnIncomingFrame,
+ buffer, rotation, timestamp_ns);
+}
+
+void AndroidVideoCapturerJni::OnTextureFrame(
+ int width,
+ int height,
+ int64_t timestamp_ns,
+ const NativeTextureHandleImpl& handle) {
+ // TODO(magjed): Fix this. See bug webrtc:4993.
+ RTC_NOTREACHED()
+ << "The rest of the stack for Android expects the native "
+ "handle to be a NativeHandleImpl with a SurfaceTexture, not a "
+ "NativeTextureHandleImpl";
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+ new rtc::RefCountedObject<AndroidTextureBuffer>(
+ width, height, handle,
+ rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
+ timestamp_ns)));
+ AsyncCapturerInvoke("OnIncomingFrame",
+ &webrtc::AndroidVideoCapturer::OnIncomingFrame,
+ buffer, 0, timestamp_ns);
+}
+
+void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
+ int height,
+ int fps) {
+ AsyncCapturerInvoke("OnOutputFormatRequest",
+ &webrtc::AndroidVideoCapturer::OnOutputFormatRequest,
+ width, height, fps);
+}
+
+JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
+
+JOW(void,
+ VideoCapturerAndroid_00024NativeObserver_nativeOnByteBufferFrameCaptured)
+ (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length,
+ jint width, jint height, jint rotation, jlong timestamp) {
+ jboolean is_copy = true;
+ jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
+ // If this is a copy of the original frame, it means that the memory
+ // is not direct memory and thus VideoCapturerAndroid does not guarantee
+ // that the memory is valid when we have released |j_frame|.
+ // TODO(magjed): Move ReleaseByteArrayElements() into ReturnBuffer() and
+ // remove this check.
+ RTC_CHECK(!is_copy)
+ << "NativeObserver_nativeOnFrameCaptured: frame is a copy";
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+ ->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
+ jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
+ (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+ jint j_oes_texture_id, jfloatArray j_transform_matrix,
+ jlong j_timestamp) {
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+ ->OnTextureFrame(j_width, j_height, j_timestamp,
+ NativeTextureHandleImpl(jni, j_oes_texture_id,
+ j_transform_matrix));
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
+ (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
+ LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted";
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted(
+ j_success);
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest)
+ (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+ jint j_fps) {
+ LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest";
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest(
+ j_width, j_height, j_fps);
+}
+
+JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer)
+ (JNIEnv* jni, jclass, jobject j_video_capturer) {
+ rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer);
+ rtc::scoped_ptr<cricket::VideoCapturer> capturer(
+ new webrtc::AndroidVideoCapturer(delegate));
+ // Caller takes ownership of the cricket::VideoCapturer* pointer.
+ return jlongFromPointer(capturer.release());
+}
+
+} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
new file mode 100644
index 0000000000..d1eb3a0ad0
--- /dev/null
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
@@ -0,0 +1,110 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef TALK_APP_WEBRTC_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+#define TALK_APP_WEBRTC_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+
+#include <string>
+
+#include "talk/app/webrtc/androidvideocapturer.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "webrtc/base/asyncinvoker.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc_jni {
+
+class NativeTextureHandleImpl;
+
+// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
+// The purpose of the delegate is to hide the JNI specifics from the C++ only
+// AndroidVideoCapturer.
+class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
+ public:
+ static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
+
+ AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer);
+
+ void Start(int width, int height, int framerate,
+ webrtc::AndroidVideoCapturer* capturer) override;
+ void Stop() override;
+
+ std::string GetSupportedFormats() override;
+
+ // Called from VideoCapturerAndroid::NativeObserver on a Java thread.
+ void OnCapturerStarted(bool success);
+ void OnMemoryBufferFrame(void* video_frame, int length, int width,
+ int height, int rotation, int64_t timestamp_ns);
+ void OnTextureFrame(int width, int height, int64_t timestamp_ns,
+ const NativeTextureHandleImpl& handle);
+ void OnOutputFormatRequest(int width, int height, int fps);
+
+ protected:
+ ~AndroidVideoCapturerJni();
+
+ private:
+ void ReturnBuffer(int64_t time_stamp);
+ JNIEnv* jni();
+
+ // To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
+ template <typename T>
+ struct Identity {
+ typedef T type;
+ };
+
+ // Helper function to make safe asynchronous calls to |capturer_|. The calls
+ // are not guaranteed to be delivered.
+ template <typename... Args>
+ void AsyncCapturerInvoke(
+ const char* method_name,
+ void (webrtc::AndroidVideoCapturer::*method)(Args...),
+ typename Identity<Args>::type... args);
+
+ const ScopedGlobalRef<jobject> j_capturer_global_;
+ const ScopedGlobalRef<jclass> j_video_capturer_class_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+
+ rtc::ThreadChecker thread_checker_;
+
+ // |capturer| is a guaranteed to be a valid pointer between a call to
+ // AndroidVideoCapturerDelegate::Start
+ // until AndroidVideoCapturerDelegate::Stop.
+ rtc::CriticalSection capturer_lock_;
+ webrtc::AndroidVideoCapturer* capturer_ GUARDED_BY(capturer_lock_);
+ // |invoker_| is used to communicate with |capturer_| on the thread Start() is
+ // called on.
+ rtc::scoped_ptr<rtc::GuardedAsyncInvoker> invoker_ GUARDED_BY(capturer_lock_);
+
+ static jobject application_context_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(AndroidVideoCapturerJni);
+};
+
+} // namespace webrtc_jni
+
+#endif // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
diff --git a/talk/app/webrtc/java/jni/classreferenceholder.cc b/talk/app/webrtc/java/jni/classreferenceholder.cc
new file mode 100644
index 0000000000..4c836f8252
--- /dev/null
+++ b/talk/app/webrtc/java/jni/classreferenceholder.cc
@@ -0,0 +1,151 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+
+// ClassReferenceHolder holds global reference to Java classes in app/webrtc.
+class ClassReferenceHolder {
+ public:
+ explicit ClassReferenceHolder(JNIEnv* jni);
+ ~ClassReferenceHolder();
+
+ void FreeReferences(JNIEnv* jni);
+ jclass GetClass(const std::string& name);
+
+ private:
+ void LoadClass(JNIEnv* jni, const std::string& name);
+
+ std::map<std::string, jclass> classes_;
+};
+
+// Allocated in LoadGlobalClassReferenceHolder(),
+// freed in FreeGlobalClassReferenceHolder().
+static ClassReferenceHolder* g_class_reference_holder = nullptr;
+
+void LoadGlobalClassReferenceHolder() {
+ RTC_CHECK(g_class_reference_holder == nullptr);
+ g_class_reference_holder = new ClassReferenceHolder(GetEnv());
+}
+
+void FreeGlobalClassReferenceHolder() {
+ g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
+ delete g_class_reference_holder;
+ g_class_reference_holder = nullptr;
+}
+
+ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
+ LoadClass(jni, "java/nio/ByteBuffer");
+ LoadClass(jni, "java/util/ArrayList");
+ LoadClass(jni, "org/webrtc/AudioTrack");
+ LoadClass(jni, "org/webrtc/DataChannel");
+ LoadClass(jni, "org/webrtc/DataChannel$Buffer");
+ LoadClass(jni, "org/webrtc/DataChannel$Init");
+ LoadClass(jni, "org/webrtc/DataChannel$State");
+ LoadClass(jni, "org/webrtc/IceCandidate");
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+ LoadClass(jni, "android/graphics/SurfaceTexture");
+ LoadClass(jni, "javax/microedition/khronos/egl/EGLContext");
+ LoadClass(jni, "org/webrtc/CameraEnumerator");
+ LoadClass(jni, "org/webrtc/Camera2Enumerator");
+ LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
+ LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
+ LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
+ LoadClass(jni, "org/webrtc/EglBase");
+ LoadClass(jni, "org/webrtc/NetworkMonitor");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
+ LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
+#endif
+ LoadClass(jni, "org/webrtc/MediaSource$State");
+ LoadClass(jni, "org/webrtc/MediaStream");
+ LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
+ LoadClass(jni, "org/webrtc/PeerConnectionFactory");
+ LoadClass(jni, "org/webrtc/PeerConnection$BundlePolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$RtcpMuxPolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
+ LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
+ LoadClass(jni, "org/webrtc/PeerConnection$IceTransportsType");
+ LoadClass(jni, "org/webrtc/PeerConnection$TcpCandidatePolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$KeyType");
+ LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
+ LoadClass(jni, "org/webrtc/RtpReceiver");
+ LoadClass(jni, "org/webrtc/RtpSender");
+ LoadClass(jni, "org/webrtc/SessionDescription");
+ LoadClass(jni, "org/webrtc/SessionDescription$Type");
+ LoadClass(jni, "org/webrtc/StatsReport");
+ LoadClass(jni, "org/webrtc/StatsReport$Value");
+ LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
+ LoadClass(jni, "org/webrtc/VideoCapturer");
+ LoadClass(jni, "org/webrtc/VideoTrack");
+}
+
+ClassReferenceHolder::~ClassReferenceHolder() {
+ RTC_CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
+}
+
+void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
+ for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
+ it != classes_.end(); ++it) {
+ jni->DeleteGlobalRef(it->second);
+ }
+ classes_.clear();
+}
+
+jclass ClassReferenceHolder::GetClass(const std::string& name) {
+ std::map<std::string, jclass>::iterator it = classes_.find(name);
+ RTC_CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
+ return it->second;
+}
+
+void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
+ jclass localRef = jni->FindClass(name.c_str());
+ CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
+ RTC_CHECK(localRef) << name;
+ jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
+ RTC_CHECK(globalRef) << name;
+ bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
+ RTC_CHECK(inserted) << "Duplicate class name: " << name;
+}
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name) {
+ return g_class_reference_holder->GetClass(name);
+}
+
+} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/classreferenceholder.h b/talk/app/webrtc/java/jni/classreferenceholder.h
new file mode 100644
index 0000000000..46e018ba77
--- /dev/null
+++ b/talk/app/webrtc/java/jni/classreferenceholder.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// Android's FindClass() is trickier than usual because the app-specific
+// ClassLoader is not consulted when there is no app-specific frame on the
+// stack. Consequently, we only look up all classes once in app/webrtc.
+// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+
+#ifndef TALK_APP_WEBRTC_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+#define TALK_APP_WEBRTC_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+
+#include <jni.h>
+#include <string>
+#include <map>
+
+namespace webrtc_jni {
+
+// LoadGlobalClassReferenceHolder must be called in JNI_OnLoad.
+void LoadGlobalClassReferenceHolder();
+// FreeGlobalClassReferenceHolder must be called in JNI_UnLoad.
+void FreeGlobalClassReferenceHolder();
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name);
+
+// Convenience macro defining JNI-accessible methods in the org.webrtc package.
+// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
+#define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \
+ Java_org_webrtc_##name
+
+} // namespace webrtc_jni
+
+#endif // TALK_APP_WEBRTC_JAVA_JNI_CLASSREFERENCEHOLDER_H_
diff --git a/talk/app/webrtc/java/jni/jni_helpers.cc b/talk/app/webrtc/java/jni/jni_helpers.cc
new file mode 100644
index 0000000000..755698e379
--- /dev/null
+++ b/talk/app/webrtc/java/jni/jni_helpers.cc
@@ -0,0 +1,288 @@
+
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+
+#include <asm/unistd.h>
+#include <sys/prctl.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+
+#include "unicode/unistr.h"
+
+namespace webrtc_jni {
+
+static JavaVM* g_jvm = nullptr;
+
+static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
+
+// Key for per-thread JNIEnv* data. Non-NULL in threads attached to |g_jvm| by
+// AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
+// were attached by the JVM because of a Java->native call.
+static pthread_key_t g_jni_ptr;
+
+using icu::UnicodeString;
+
+JavaVM *GetJVM() {
+ RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
+ return g_jvm;
+}
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv() {
+ void* env = NULL;
+ jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
+ RTC_CHECK(((env != NULL) && (status == JNI_OK)) ||
+ ((env == NULL) && (status == JNI_EDETACHED)))
+ << "Unexpected GetEnv return: " << status << ":" << env;
+ return reinterpret_cast<JNIEnv*>(env);
+}
+
+static void ThreadDestructor(void* prev_jni_ptr) {
+ // This function only runs on threads where |g_jni_ptr| is non-NULL, meaning
+ // we were responsible for originally attaching the thread, so are responsible
+ // for detaching it now. However, because some JVM implementations (notably
+ // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
+ // the JVMs accounting info for this thread may already be wiped out by the
+ // time this is called. Thus it may appear we are already detached even though
+ // it was our responsibility to detach! Oh well.
+ if (!GetEnv())
+ return;
+
+ RTC_CHECK(GetEnv() == prev_jni_ptr)
+ << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
+ jint status = g_jvm->DetachCurrentThread();
+ RTC_CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
+ RTC_CHECK(!GetEnv()) << "Detaching was a successful no-op???";
+}
+
+static void CreateJNIPtrKey() {
+ RTC_CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
+ << "pthread_key_create";
+}
+
+jint InitGlobalJniVariables(JavaVM *jvm) {
+ RTC_CHECK(!g_jvm) << "InitGlobalJniVariables!";
+ g_jvm = jvm;
+ RTC_CHECK(g_jvm) << "InitGlobalJniVariables handed NULL?";
+
+ RTC_CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
+
+ JNIEnv* jni = nullptr;
+ if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
+ return -1;
+
+ return JNI_VERSION_1_6;
+}
+
+// Return thread ID as a string.
+static std::string GetThreadId() {
+ char buf[21]; // Big enough to hold a kuint64max plus terminating NULL.
+ RTC_CHECK_LT(snprintf(buf, sizeof(buf), "%ld",
+ static_cast<long>(syscall(__NR_gettid))),
+ sizeof(buf))
+ << "Thread id is bigger than uint64??";
+ return std::string(buf);
+}
+
+// Return the current thread's name.
+static std::string GetThreadName() {
+ char name[17] = {0};
+ if (prctl(PR_GET_NAME, name) != 0)
+ return std::string("<noname>");
+ return std::string(name);
+}
+
+// Return a |JNIEnv*| usable on this thread. Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded() {
+ JNIEnv* jni = GetEnv();
+ if (jni)
+ return jni;
+ RTC_CHECK(!pthread_getspecific(g_jni_ptr))
+ << "TLS has a JNIEnv* but not attached?";
+
+ std::string name(GetThreadName() + " - " + GetThreadId());
+ JavaVMAttachArgs args;
+ args.version = JNI_VERSION_1_6;
+ args.name = &name[0];
+ args.group = NULL;
+ // Deal with difference in signatures between Oracle's jni.h and Android's.
+#ifdef _JAVASOFT_JNI_H_ // Oracle's jni.h violates the JNI spec!
+ void* env = NULL;
+#else
+ JNIEnv* env = NULL;
+#endif
+ RTC_CHECK(!g_jvm->AttachCurrentThread(&env, &args))
+ << "Failed to attach thread";
+ RTC_CHECK(env) << "AttachCurrentThread handed back NULL!";
+ jni = reinterpret_cast<JNIEnv*>(env);
+ RTC_CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
+ return jni;
+}
+
+// Return a |jlong| that will correctly convert back to |ptr|. This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr) {
+ static_assert(sizeof(intptr_t) <= sizeof(jlong),
+ "Time to rethink the use of jlongs");
+ // Going through intptr_t to be obvious about the definedness of the
+ // conversion from pointer to integral type. intptr_t to jlong is a standard
+ // widening by the static_assert above.
+ jlong ret = reinterpret_cast<intptr_t>(ptr);
+ RTC_DCHECK(reinterpret_cast<void*>(ret) == ptr);
+ return ret;
+}
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+ JNIEnv* jni, jclass c, const std::string& name, const char* signature) {
+ jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
+ CHECK_EXCEPTION(jni) << "error during GetMethodID: " << name << ", "
+ << signature;
+ RTC_CHECK(m) << name << ", " << signature;
+ return m;
+}
+
+jmethodID GetStaticMethodID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature) {
+ jmethodID m = jni->GetStaticMethodID(c, name, signature);
+ CHECK_EXCEPTION(jni) << "error during GetStaticMethodID: " << name << ", "
+ << signature;
+ RTC_CHECK(m) << name << ", " << signature;
+ return m;
+}
+
+jfieldID GetFieldID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature) {
+ jfieldID f = jni->GetFieldID(c, name, signature);
+ CHECK_EXCEPTION(jni) << "error during GetFieldID";
+ RTC_CHECK(f) << name << ", " << signature;
+ return f;
+}
+
+jclass GetObjectClass(JNIEnv* jni, jobject object) {
+ jclass c = jni->GetObjectClass(object);
+ CHECK_EXCEPTION(jni) << "error during GetObjectClass";
+ RTC_CHECK(c) << "GetObjectClass returned NULL";
+ return c;
+}
+
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
+ jobject o = jni->GetObjectField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetObjectField";
+ RTC_CHECK(o) << "GetObjectField returned NULL";
+ return o;
+}
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) {
+ return static_cast<jstring>(GetObjectField(jni, object, id));
+}
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) {
+ jlong l = jni->GetLongField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetLongField";
+ return l;
+}
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) {
+ jint i = jni->GetIntField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetIntField";
+ return i;
+}
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) {
+ jboolean b = jni->GetBooleanField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetBooleanField";
+ return b;
+}
+
+// Java references to "null" can only be distinguished as such in C++ by
+// creating a local reference, so this helper wraps that logic.
+bool IsNull(JNIEnv* jni, jobject obj) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ return jni->NewLocalRef(obj) == NULL;
+}
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
+ UnicodeString ustr(UnicodeString::fromUTF8(native));
+ jstring jstr = jni->NewString(ustr.getBuffer(), ustr.length());
+ CHECK_EXCEPTION(jni) << "error during NewString";
+ return jstr;
+}
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
+ const jchar* jchars = jni->GetStringChars(j_string, NULL);
+ CHECK_EXCEPTION(jni) << "Error during GetStringChars";
+ UnicodeString ustr(jchars, jni->GetStringLength(j_string));
+ CHECK_EXCEPTION(jni) << "Error during GetStringLength";
+ jni->ReleaseStringChars(j_string, jchars);
+ CHECK_EXCEPTION(jni) << "Error during ReleaseStringChars";
+ std::string ret;
+ return ustr.toUTF8String(ret);
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+ const std::string& state_class_name, int index) {
+ jmethodID state_values_id = GetStaticMethodID(
+ jni, state_class, "values", ("()[L" + state_class_name + ";").c_str());
+ jobjectArray state_values = static_cast<jobjectArray>(
+ jni->CallStaticObjectMethod(state_class, state_values_id));
+ CHECK_EXCEPTION(jni) << "error during CallStaticObjectMethod";
+ jobject ret = jni->GetObjectArrayElement(state_values, index);
+ CHECK_EXCEPTION(jni) << "error during GetObjectArrayElement";
+ return ret;
+}
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o) {
+ jobject ret = jni->NewGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
+ RTC_CHECK(ret);
+ return ret;
+}
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o) {
+ jni->DeleteGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
+}
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+ScopedLocalRefFrame::ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
+ RTC_CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
+}
+ScopedLocalRefFrame::~ScopedLocalRefFrame() {
+ jni_->PopLocalFrame(NULL);
+}
+
+} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/jni_helpers.h b/talk/app/webrtc/java/jni/jni_helpers.h
new file mode 100644
index 0000000000..7072ee855e
--- /dev/null
+++ b/talk/app/webrtc/java/jni/jni_helpers.h
@@ -0,0 +1,141 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// This file contain convenience functions and classes for JNI.
+// Before using any of the methods, InitGlobalJniVariables must be called.
+
+#ifndef TALK_APP_WEBRTC_JAVA_JNI_JNI_HELPERS_H_
+#define TALK_APP_WEBRTC_JAVA_JNI_JNI_HELPERS_H_
+
+#include <jni.h>
+#include <string>
+
+#include "webrtc/base/checks.h"
+
+// Abort the process if |jni| has a Java exception pending.
+// This macros uses the comma operator to execute ExceptionDescribe
+// and ExceptionClear ignoring their return values and sending ""
+// to the error stream.
+#define CHECK_EXCEPTION(jni) \
+ RTC_CHECK(!jni->ExceptionCheck()) \
+ << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+// Helper that calls ptr->Release() and aborts the process with a useful
+// message if that didn't actually delete *ptr because of extra refcounts.
+#define CHECK_RELEASE(ptr) \
+ RTC_CHECK_EQ(0, (ptr)->Release()) << "Unexpected refcount."
+
+namespace webrtc_jni {
+
+jint InitGlobalJniVariables(JavaVM *jvm);
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv();
+
+JavaVM *GetJVM();
+
+// Return a |JNIEnv*| usable on this thread. Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded();
+
+// Return a |jlong| that will correctly convert back to |ptr|. This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr);
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+ JNIEnv* jni, jclass c, const std::string& name, const char* signature);
+
+jmethodID GetStaticMethodID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature);
+
+jfieldID GetFieldID(JNIEnv* jni, jclass c, const char* name,
+ const char* signature);
+
+jclass GetObjectClass(JNIEnv* jni, jobject object);
+
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id);
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id);
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id);
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id);
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id);
+
+// Java references to "null" can only be distinguished as such in C++ by
+// creating a local reference, so this helper wraps that logic.
+bool IsNull(JNIEnv* jni, jobject obj);
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native);
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+ const std::string& state_class_name, int index);
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o);
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o);
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+class ScopedLocalRefFrame {
+ public:
+ explicit ScopedLocalRefFrame(JNIEnv* jni);
+ ~ScopedLocalRefFrame();
+
+ private:
+ JNIEnv* jni_;
+};
+
+// Scoped holder for global Java refs.
+template<class T> // T is jclass, jobject, jintArray, etc.
+class ScopedGlobalRef {
+ public:
+ ScopedGlobalRef(JNIEnv* jni, T obj)
+ : obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
+ ~ScopedGlobalRef() {
+ DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
+ }
+ T operator*() const {
+ return obj_;
+ }
+ private:
+ T obj_;
+};
+
+} // namespace webrtc_jni
+
+#endif // TALK_APP_WEBRTC_JAVA_JNI_JNI_HELPERS_H_
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc
new file mode 100644
index 0000000000..ac3e0455df
--- /dev/null
+++ b/talk/app/webrtc/java/jni/native_handle_impl.cc
@@ -0,0 +1,95 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+
+#include "webrtc/base/checks.h"
+
+namespace webrtc_jni {
+
+NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix)
+ : oes_texture_id(j_oes_texture_id) {
+ RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
+ jfloat* transform_matrix_ptr =
+ jni->GetFloatArrayElements(j_transform_matrix, nullptr);
+ for (int i = 0; i < 16; ++i) {
+ sampling_matrix[i] = transform_matrix_ptr[i];
+ }
+ jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
+}
+
+NativeHandleImpl::NativeHandleImpl() : texture_object_(NULL), texture_id_(-1) {}
+
+void* NativeHandleImpl::GetHandle() {
+ return texture_object_;
+}
+
+int NativeHandleImpl::GetTextureId() {
+ return texture_id_;
+}
+
+void NativeHandleImpl::SetTextureObject(void* texture_object, int texture_id) {
+ texture_object_ = reinterpret_cast<jobject>(texture_object);
+ texture_id_ = texture_id;
+}
+
+JniNativeHandleBuffer::JniNativeHandleBuffer(void* native_handle,
+ int width,
+ int height)
+ : NativeHandleBuffer(native_handle, width, height) {}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+JniNativeHandleBuffer::NativeToI420Buffer() {
+ // TODO(pbos): Implement before using this in the encoder pipeline (or
+ // remove the RTC_CHECK() in VideoCapture).
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+AndroidTextureBuffer::AndroidTextureBuffer(
+ int width,
+ int height,
+ const NativeTextureHandleImpl& native_handle,
+ const rtc::Callback0<void>& no_longer_used)
+ : webrtc::NativeHandleBuffer(&native_handle_, width, height),
+ native_handle_(native_handle),
+ no_longer_used_cb_(no_longer_used) {}
+
+AndroidTextureBuffer::~AndroidTextureBuffer() {
+ no_longer_used_cb_();
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+AndroidTextureBuffer::NativeToI420Buffer() {
+ RTC_NOTREACHED()
+ << "AndroidTextureBuffer::NativeToI420Buffer not implemented.";
+ return nullptr;
+}
+
+} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h
new file mode 100644
index 0000000000..dd04bc20b1
--- /dev/null
+++ b/talk/app/webrtc/java/jni/native_handle_impl.h
@@ -0,0 +1,89 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+#define TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+
+#include <jni.h>
+
+#include "webrtc/common_video/interface/video_frame_buffer.h"
+
+namespace webrtc_jni {
+
+// Wrapper for texture object.
+struct NativeTextureHandleImpl {
+ NativeTextureHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix);
+
+ const int oes_texture_id;
+ float sampling_matrix[16];
+};
+
+// Native handle for SurfaceTexture + texture id.
+class NativeHandleImpl {
+ public:
+ NativeHandleImpl();
+
+ void* GetHandle();
+ int GetTextureId();
+ void SetTextureObject(void* texture_object, int texture_id);
+
+ private:
+ jobject texture_object_;
+ int32_t texture_id_;
+};
+
+class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer {
+ public:
+ JniNativeHandleBuffer(void* native_handle, int width, int height);
+
+ // TODO(pbos): Override destructor to release native handle, at the moment the
+ // native handle is not released based on refcount.
+
+ private:
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> NativeToI420Buffer() override;
+};
+
+class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
+ public:
+ AndroidTextureBuffer(int width,
+ int height,
+ const NativeTextureHandleImpl& native_handle,
+ const rtc::Callback0<void>& no_longer_used);
+ ~AndroidTextureBuffer();
+ rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+
+ private:
+ NativeTextureHandleImpl native_handle_;
+ rtc::Callback0<void> no_longer_used_cb_;
+};
+
+} // namespace webrtc_jni
+
+#endif // TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc
new file mode 100644
index 0000000000..e75cd553b6
--- /dev/null
+++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc
@@ -0,0 +1,2055 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Hints for future visitors:
+// This entire file is an implementation detail of the org.webrtc Java package,
+// the most interesting bits of which are org.webrtc.PeerConnection{,Factory}.
+// The layout of this file is roughly:
+// - various helper C++ functions & classes that wrap Java counterparts and
+// expose a C++ interface that can be passed to the C++ PeerConnection APIs
+// - implementations of methods declared "static" in the Java package (named
+// things like Java_org_webrtc_OMG_Can_This_Name_Be_Any_Longer, prescribed by
+// the JNI spec).
+//
+// Lifecycle notes: objects are owned where they will be called; in other words
+// FooObservers are owned by C++-land, and user-callable objects (e.g.
+// PeerConnection and VideoTrack) are owned by Java-land.
+// When this file allocates C++ RefCountInterfaces it AddRef()s an artificial
+// ref simulating the jlong held in Java-land, and then Release()s the ref in
+// the respective free call. Sometimes this AddRef is implicit in the
+// construction of a scoped_refptr<> which is then .release()d.
+// Any persistent (non-local) references from C++ to Java must be global or weak
+// (in which case they must be checked before use)!
+//
+// Exception notes: pretty much all JNI calls can throw Java exceptions, so each
+// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
+// call. In this file this is done in CHECK_EXCEPTION, making for much easier
+// debugging in case of failure (the alternative is to wait for control to
+// return to the Java frame that called code in this file, at which point it's
+// impossible to tell which JNI call broke).
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include <limits>
+
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/dtlsidentitystore.h"
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/rtpreceiverinterface.h"
+#include "talk/app/webrtc/rtpsenderinterface.h"
+#include "talk/app/webrtc/videosourceinterface.h"
+#include "talk/media/base/videocapturer.h"
+#include "talk/media/base/videorenderer.h"
+#include "talk/media/devices/videorendererfactory.h"
+#include "talk/media/webrtc/webrtcvideodecoderfactory.h"
+#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/logsinks.h"
+#include "webrtc/base/networkmonitor.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/system_wrappers/include/field_trial_default.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+#include "talk/app/webrtc/androidvideocapturer.h"
+#include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h"
+#include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
+#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
+#include "talk/app/webrtc/java/jni/androidnetworkmonitor_jni.h"
+#include "webrtc/modules/video_render/video_render_internal.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+using webrtc::LogcatTraceContext;
+#endif
+
+using cricket::WebRtcVideoDecoderFactory;
+using cricket::WebRtcVideoEncoderFactory;
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+using webrtc::AudioSourceInterface;
+using webrtc::AudioTrackInterface;
+using webrtc::AudioTrackVector;
+using webrtc::CreateSessionDescriptionObserver;
+using webrtc::DataBuffer;
+using webrtc::DataChannelInit;
+using webrtc::DataChannelInterface;
+using webrtc::DataChannelObserver;
+using webrtc::IceCandidateInterface;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::PeerConnectionFactoryInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::RtpReceiverInterface;
+using webrtc::RtpSenderInterface;
+using webrtc::SessionDescriptionInterface;
+using webrtc::SetSessionDescriptionObserver;
+using webrtc::StatsObserver;
+using webrtc::StatsReport;
+using webrtc::StatsReports;
+using webrtc::VideoRendererInterface;
+using webrtc::VideoSourceInterface;
+using webrtc::VideoTrackInterface;
+using webrtc::VideoTrackVector;
+using webrtc::kVideoCodecVP8;
+
+namespace webrtc_jni {
+
+// Field trials initialization string
+static char *field_trials_init_string = NULL;
+
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+// Set in PeerConnectionFactory_initializeAndroidGlobals().
+static bool factory_static_initialized = false;
+static bool video_hw_acceleration_enabled = true;
+#endif
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
+ jint ret = InitGlobalJniVariables(jvm);
+ if (ret < 0)
+ return -1;
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ LoadGlobalClassReferenceHolder();
+
+ return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
+ FreeGlobalClassReferenceHolder();
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static jobject JavaEnumFromIndex(
+ JNIEnv* jni, const std::string& state_class_fragment, int index) {
+ const std::string state_class = "org/webrtc/" + state_class_fragment;
+ return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+ state_class, index);
+}
+
+static DataChannelInit JavaDataChannelInitToNative(
+ JNIEnv* jni, jobject j_init) {
+ DataChannelInit init;
+
+ jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init");
+ jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z");
+ jfieldID max_retransmit_time_id =
+ GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I");
+ jfieldID max_retransmits_id =
+ GetFieldID(jni, j_init_class, "maxRetransmits", "I");
+ jfieldID protocol_id =
+ GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;");
+ jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z");
+ jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I");
+
+ init.ordered = GetBooleanField(jni, j_init, ordered_id);
+ init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id);
+ init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id);
+ init.protocol = JavaToStdString(
+ jni, GetStringField(jni, j_init, protocol_id));
+ init.negotiated = GetBooleanField(jni, j_init, negotiated_id);
+ init.id = GetIntField(jni, j_init, id_id);
+
+ return init;
+}
+
+class ConstraintsWrapper;
+
+// Adapter between the C++ PeerConnectionObserver interface and the Java
+// PeerConnection.Observer interface. Wraps an instance of the Java interface
+// and dispatches C++ callbacks to Java.
+class PCOJava : public PeerConnectionObserver {
+ public:
+ PCOJava(JNIEnv* jni, jobject j_observer)
+ : j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
+ j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
+ j_media_stream_ctor_(GetMethodID(
+ jni, *j_media_stream_class_, "<init>", "(J)V")),
+ j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
+ j_audio_track_ctor_(GetMethodID(
+ jni, *j_audio_track_class_, "<init>", "(J)V")),
+ j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
+ j_video_track_ctor_(GetMethodID(
+ jni, *j_video_track_class_, "<init>", "(J)V")),
+ j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")),
+ j_data_channel_ctor_(GetMethodID(
+ jni, *j_data_channel_class_, "<init>", "(J)V")) {
+ }
+
+ virtual ~PCOJava() {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ while (!remote_streams_.empty())
+ DisposeRemoteStream(remote_streams_.begin());
+ }
+
+ void OnIceCandidate(const IceCandidateInterface* candidate) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ std::string sdp;
+ RTC_CHECK(candidate->ToString(&sdp)) << "got so far: " << sdp;
+ jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
+ jmethodID ctor = GetMethodID(jni(), candidate_class,
+ "<init>", "(Ljava/lang/String;ILjava/lang/String;)V");
+ jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
+ jstring j_sdp = JavaStringFromStdString(jni(), sdp);
+ jobject j_candidate = jni()->NewObject(
+ candidate_class, ctor, j_mid, candidate->sdp_mline_index(), j_sdp);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+ jmethodID m = GetMethodID(jni(), *j_observer_class_,
+ "onIceCandidate", "(Lorg/webrtc/IceCandidate;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onSignalingChange",
+ "(Lorg/webrtc/PeerConnection$SignalingState;)V");
+ jobject new_state_enum =
+ JavaEnumFromIndex(jni(), "PeerConnection$SignalingState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onIceConnectionChange",
+ "(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
+ jobject new_state_enum = JavaEnumFromIndex(
+ jni(), "PeerConnection$IceConnectionState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceConnectionReceivingChange(bool receiving) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onIceConnectionReceivingChange", "(Z)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, receiving);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onIceGatheringChange",
+ "(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
+ jobject new_state_enum = JavaEnumFromIndex(
+ jni(), "PeerConnection$IceGatheringState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnAddStream(MediaStreamInterface* stream) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ // Java MediaStream holds one reference. Corresponding Release() is in
+ // MediaStream_free, triggered by MediaStream.dispose().
+ stream->AddRef();
+ jobject j_stream =
+ jni()->NewObject(*j_media_stream_class_, j_media_stream_ctor_,
+ reinterpret_cast<jlong>(stream));
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+ for (const auto& track : stream->GetAudioTracks()) {
+ jstring id = JavaStringFromStdString(jni(), track->id());
+ // Java AudioTrack holds one reference. Corresponding Release() is in
+ // MediaStreamTrack_free, triggered by AudioTrack.dispose().
+ track->AddRef();
+ jobject j_track =
+ jni()->NewObject(*j_audio_track_class_, j_audio_track_ctor_,
+ reinterpret_cast<jlong>(track.get()), id);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+ jfieldID audio_tracks_id = GetFieldID(jni(),
+ *j_media_stream_class_,
+ "audioTracks",
+ "Ljava/util/LinkedList;");
+ jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
+ jmethodID add = GetMethodID(jni(),
+ GetObjectClass(jni(), audio_tracks),
+ "add",
+ "(Ljava/lang/Object;)Z");
+ jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
+ CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+ RTC_CHECK(added);
+ }
+
+ for (const auto& track : stream->GetVideoTracks()) {
+ jstring id = JavaStringFromStdString(jni(), track->id());
+ // Java VideoTrack holds one reference. Corresponding Release() is in
+ // MediaStreamTrack_free, triggered by VideoTrack.dispose().
+ track->AddRef();
+ jobject j_track =
+ jni()->NewObject(*j_video_track_class_, j_video_track_ctor_,
+ reinterpret_cast<jlong>(track.get()), id);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+ jfieldID video_tracks_id = GetFieldID(jni(),
+ *j_media_stream_class_,
+ "videoTracks",
+ "Ljava/util/LinkedList;");
+ jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
+ jmethodID add = GetMethodID(jni(),
+ GetObjectClass(jni(), video_tracks),
+ "add",
+ "(Ljava/lang/Object;)Z");
+ jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
+ CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+ RTC_CHECK(added);
+ }
+ remote_streams_[stream] = NewGlobalRef(jni(), j_stream);
+
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
+ "(Lorg/webrtc/MediaStream;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnRemoveStream(MediaStreamInterface* stream) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream);
+ RTC_CHECK(it != remote_streams_.end()) << "unexpected stream: " << std::hex
+ << stream;
+ jobject j_stream = it->second;
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream",
+ "(Lorg/webrtc/MediaStream;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ DisposeRemoteStream(it);
+ }
+
+ void OnDataChannel(DataChannelInterface* channel) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject j_channel = jni()->NewObject(
+ *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
+ "(Lorg/webrtc/DataChannel;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
+
+ // Channel is now owned by Java object, and will be freed from
+ // DataChannel.dispose(). Important that this be done _after_ the
+ // CallVoidMethod above as Java code might call back into native code and be
+ // surprised to see a refcount of 2.
+ int bumped_count = channel->AddRef();
+ RTC_CHECK(bumped_count == 2) << "Unexpected refcount OnDataChannel";
+
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnRenegotiationNeeded() override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m =
+ GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
+ jni()->CallVoidMethod(*j_observer_global_, m);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void SetConstraints(ConstraintsWrapper* constraints) {
+ RTC_CHECK(!constraints_.get()) << "constraints already set!";
+ constraints_.reset(constraints);
+ }
+
+ const ConstraintsWrapper* constraints() { return constraints_.get(); }
+
+ private:
+ typedef std::map<MediaStreamInterface*, jobject> NativeToJavaStreamsMap;
+
+ void DisposeRemoteStream(const NativeToJavaStreamsMap::iterator& it) {
+ jobject j_stream = it->second;
+ remote_streams_.erase(it);
+ jni()->CallVoidMethod(
+ j_stream, GetMethodID(jni(), *j_media_stream_class_, "dispose", "()V"));
+ CHECK_EXCEPTION(jni()) << "error during MediaStream.dispose()";
+ DeleteGlobalRef(jni(), j_stream);
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+ const ScopedGlobalRef<jclass> j_media_stream_class_;
+ const jmethodID j_media_stream_ctor_;
+ const ScopedGlobalRef<jclass> j_audio_track_class_;
+ const jmethodID j_audio_track_ctor_;
+ const ScopedGlobalRef<jclass> j_video_track_class_;
+ const jmethodID j_video_track_ctor_;
+ const ScopedGlobalRef<jclass> j_data_channel_class_;
+ const jmethodID j_data_channel_ctor_;
+ // C++ -> Java remote streams. The stored jobects are global refs and must be
+ // manually deleted upon removal. Use DisposeRemoteStream().
+ NativeToJavaStreamsMap remote_streams_;
+ scoped_ptr<ConstraintsWrapper> constraints_;
+};
+
+// Wrapper for a Java MediaConstraints object. Copies all needed data so when
+// the constructor returns the Java object is no longer needed.
+class ConstraintsWrapper : public MediaConstraintsInterface {
+ public:
+ ConstraintsWrapper(JNIEnv* jni, jobject j_constraints) {
+ PopulateConstraintsFromJavaPairList(
+ jni, j_constraints, "mandatory", &mandatory_);
+ PopulateConstraintsFromJavaPairList(
+ jni, j_constraints, "optional", &optional_);
+ }
+
+ virtual ~ConstraintsWrapper() {}
+
+ // MediaConstraintsInterface.
+ const Constraints& GetMandatory() const override { return mandatory_; }
+
+ const Constraints& GetOptional() const override { return optional_; }
+
+ private:
+ // Helper for translating a List<Pair<String, String>> to a Constraints.
+ static void PopulateConstraintsFromJavaPairList(
+ JNIEnv* jni, jobject j_constraints,
+ const char* field_name, Constraints* field) {
+ jfieldID j_id = GetFieldID(jni,
+ GetObjectClass(jni, j_constraints), field_name, "Ljava/util/List;");
+ jobject j_list = GetObjectField(jni, j_constraints, j_id);
+ jmethodID j_iterator_id = GetMethodID(jni,
+ GetObjectClass(jni, j_list), "iterator", "()Ljava/util/Iterator;");
+ jobject j_iterator = jni->CallObjectMethod(j_list, j_iterator_id);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jmethodID j_has_next = GetMethodID(jni,
+ GetObjectClass(jni, j_iterator), "hasNext", "()Z");
+ jmethodID j_next = GetMethodID(jni,
+ GetObjectClass(jni, j_iterator), "next", "()Ljava/lang/Object;");
+ while (jni->CallBooleanMethod(j_iterator, j_has_next)) {
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ jobject entry = jni->CallObjectMethod(j_iterator, j_next);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jmethodID get_key = GetMethodID(jni,
+ GetObjectClass(jni, entry), "getKey", "()Ljava/lang/String;");
+ jstring j_key = reinterpret_cast<jstring>(
+ jni->CallObjectMethod(entry, get_key));
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jmethodID get_value = GetMethodID(jni,
+ GetObjectClass(jni, entry), "getValue", "()Ljava/lang/String;");
+ jstring j_value = reinterpret_cast<jstring>(
+ jni->CallObjectMethod(entry, get_value));
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ field->push_back(Constraint(JavaToStdString(jni, j_key),
+ JavaToStdString(jni, j_value)));
+ }
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ }
+
+ Constraints mandatory_;
+ Constraints optional_;
+};
+
+static jobject JavaSdpFromNativeSdp(
+ JNIEnv* jni, const SessionDescriptionInterface* desc) {
+ std::string sdp;
+ RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+ jstring j_description = JavaStringFromStdString(jni, sdp);
+
+ jclass j_type_class = FindClass(
+ jni, "org/webrtc/SessionDescription$Type");
+ jmethodID j_type_from_canonical = GetStaticMethodID(
+ jni, j_type_class, "fromCanonicalForm",
+ "(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
+ jstring j_type_string = JavaStringFromStdString(jni, desc->type());
+ jobject j_type = jni->CallStaticObjectMethod(
+ j_type_class, j_type_from_canonical, j_type_string);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+
+ jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
+ jmethodID j_sdp_ctor = GetMethodID(
+ jni, j_sdp_class, "<init>",
+ "(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
+ jobject j_sdp = jni->NewObject(
+ j_sdp_class, j_sdp_ctor, j_type, j_description);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ return j_sdp;
+}
+
+template <class T> // T is one of {Create,Set}SessionDescriptionObserver.
+class SdpObserverWrapper : public T {
+ public:
+ SdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+ ConstraintsWrapper* constraints)
+ : constraints_(constraints),
+ j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, j_observer)) {
+ }
+
+ virtual ~SdpObserverWrapper() {}
+
+ // Can't mark override because of templating.
+ virtual void OnSuccess() {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
+ jni()->CallVoidMethod(*j_observer_global_, m);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ // Can't mark override because of templating.
+ virtual void OnSuccess(SessionDescriptionInterface* desc) {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onCreateSuccess",
+ "(Lorg/webrtc/SessionDescription;)V");
+ jobject j_sdp = JavaSdpFromNativeSdp(jni(), desc);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ protected:
+ // Common implementation for failure of Set & Create types, distinguished by
+ // |op| being "Set" or "Create".
+ void OnFailure(const std::string& op, const std::string& error) {
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
+ "(Ljava/lang/String;)V");
+ jstring j_error_string = JavaStringFromStdString(jni(), error);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ private:
+ scoped_ptr<ConstraintsWrapper> constraints_;
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+};
+
+class CreateSdpObserverWrapper
+ : public SdpObserverWrapper<CreateSessionDescriptionObserver> {
+ public:
+ CreateSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+ ConstraintsWrapper* constraints)
+ : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+ void OnFailure(const std::string& error) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ SdpObserverWrapper::OnFailure(std::string("Create"), error);
+ }
+};
+
+class SetSdpObserverWrapper
+ : public SdpObserverWrapper<SetSessionDescriptionObserver> {
+ public:
+ SetSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+ ConstraintsWrapper* constraints)
+ : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+ void OnFailure(const std::string& error) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ SdpObserverWrapper::OnFailure(std::string("Set"), error);
+ }
+};
+
+// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
+// and dispatching the callback from C++ back to Java.
+class DataChannelObserverWrapper : public DataChannelObserver {
+ public:
+ DataChannelObserverWrapper(JNIEnv* jni, jobject j_observer)
+ : j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+ j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")),
+ j_on_buffered_amount_change_mid_(GetMethodID(
+ jni, *j_observer_class_, "onBufferedAmountChange", "(J)V")),
+ j_on_state_change_mid_(
+ GetMethodID(jni, *j_observer_class_, "onStateChange", "()V")),
+ j_on_message_mid_(GetMethodID(jni, *j_observer_class_, "onMessage",
+ "(Lorg/webrtc/DataChannel$Buffer;)V")),
+ j_buffer_ctor_(GetMethodID(jni, *j_buffer_class_, "<init>",
+ "(Ljava/nio/ByteBuffer;Z)V")) {}
+
+ virtual ~DataChannelObserverWrapper() {}
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jni()->CallVoidMethod(*j_observer_global_, j_on_buffered_amount_change_mid_,
+ previous_amount);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnStateChange() override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnMessage(const DataBuffer& buffer) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject byte_buffer = jni()->NewDirectByteBuffer(
+ const_cast<char*>(buffer.data.data<char>()), buffer.data.size());
+ jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_,
+ byte_buffer, buffer.binary);
+ jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ private:
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+ const ScopedGlobalRef<jclass> j_buffer_class_;
+ const jmethodID j_on_buffered_amount_change_mid_;
+ const jmethodID j_on_state_change_mid_;
+ const jmethodID j_on_message_mid_;
+ const jmethodID j_buffer_ctor_;
+};
+
+// Adapter for a Java StatsObserver presenting a C++ StatsObserver and
+// dispatching the callback from C++ back to Java.
+class StatsObserverWrapper : public StatsObserver {
+ public:
+ StatsObserverWrapper(JNIEnv* jni, jobject j_observer)
+ : j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+ j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")),
+ j_stats_report_ctor_(GetMethodID(
+ jni, *j_stats_report_class_, "<init>",
+ "(Ljava/lang/String;Ljava/lang/String;D"
+ "[Lorg/webrtc/StatsReport$Value;)V")),
+ j_value_class_(jni, FindClass(
+ jni, "org/webrtc/StatsReport$Value")),
+ j_value_ctor_(GetMethodID(
+ jni, *j_value_class_, "<init>",
+ "(Ljava/lang/String;Ljava/lang/String;)V")) {
+ }
+
+ virtual ~StatsObserverWrapper() {}
+
+ void OnComplete(const StatsReports& reports) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobjectArray j_reports = ReportsToJava(jni(), reports);
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
+ "([Lorg/webrtc/StatsReport;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ private:
+ jobjectArray ReportsToJava(
+ JNIEnv* jni, const StatsReports& reports) {
+ jobjectArray reports_array = jni->NewObjectArray(
+ reports.size(), *j_stats_report_class_, NULL);
+ int i = 0;
+ for (const auto* report : reports) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jstring j_id = JavaStringFromStdString(jni, report->id()->ToString());
+ jstring j_type = JavaStringFromStdString(jni, report->TypeToString());
+ jobjectArray j_values = ValuesToJava(jni, report->values());
+ jobject j_report = jni->NewObject(*j_stats_report_class_,
+ j_stats_report_ctor_,
+ j_id,
+ j_type,
+ report->timestamp(),
+ j_values);
+ jni->SetObjectArrayElement(reports_array, i++, j_report);
+ }
+ return reports_array;
+ }
+
+ jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values) {
+ jobjectArray j_values = jni->NewObjectArray(
+ values.size(), *j_value_class_, NULL);
+ int i = 0;
+ for (const auto& it : values) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ // Should we use the '.name' enum value here instead of converting the
+ // name to a string?
+ jstring j_name = JavaStringFromStdString(jni, it.second->display_name());
+ jstring j_value = JavaStringFromStdString(jni, it.second->ToString());
+ jobject j_element_value =
+ jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
+ jni->SetObjectArrayElement(j_values, i++, j_element_value);
+ }
+ return j_values;
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+ const ScopedGlobalRef<jclass> j_stats_report_class_;
+ const jmethodID j_stats_report_ctor_;
+ const ScopedGlobalRef<jclass> j_value_class_;
+ const jmethodID j_value_ctor_;
+};
+
+// Adapter presenting a cricket::VideoRenderer as a
+// webrtc::VideoRendererInterface.
+class VideoRendererWrapper : public VideoRendererInterface {
+ public:
+ static VideoRendererWrapper* Create(cricket::VideoRenderer* renderer) {
+ if (renderer)
+ return new VideoRendererWrapper(renderer);
+ return NULL;
+ }
+
+ virtual ~VideoRendererWrapper() {}
+
+ // This wraps VideoRenderer which still has SetSize.
+ void RenderFrame(const cricket::VideoFrame* video_frame) override {
+ ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
+ const cricket::VideoFrame* frame =
+ video_frame->GetCopyWithRotationApplied();
+ if (width_ != frame->GetWidth() || height_ != frame->GetHeight()) {
+ width_ = frame->GetWidth();
+ height_ = frame->GetHeight();
+ renderer_->SetSize(width_, height_, 0);
+ }
+ renderer_->RenderFrame(frame);
+ }
+
+ private:
+ explicit VideoRendererWrapper(cricket::VideoRenderer* renderer)
+ : width_(0), height_(0), renderer_(renderer) {}
+ int width_, height_;
+ scoped_ptr<cricket::VideoRenderer> renderer_;
+};
+
+// Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
+// instance.
+class JavaVideoRendererWrapper : public VideoRendererInterface {
+ public:
+ JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
+ : j_callbacks_(jni, j_callbacks),
+ j_render_frame_id_(GetMethodID(
+ jni, GetObjectClass(jni, j_callbacks), "renderFrame",
+ "(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
+ j_frame_class_(jni,
+ FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
+ j_i420_frame_ctor_id_(GetMethodID(
+ jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
+ j_texture_frame_ctor_id_(GetMethodID(
+ jni, *j_frame_class_, "<init>",
+ "(IIILjava/lang/Object;IJ)V")),
+ j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
+ CHECK_EXCEPTION(jni);
+ }
+
+ virtual ~JavaVideoRendererWrapper() {}
+
+ void RenderFrame(const cricket::VideoFrame* video_frame) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject j_frame = (video_frame->GetNativeHandle() != nullptr)
+ ? CricketToJavaTextureFrame(video_frame)
+ : CricketToJavaI420Frame(video_frame);
+ // |j_callbacks_| is responsible for releasing |j_frame| with
+ // VideoRenderer.renderFrameDone().
+ jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
+ CHECK_EXCEPTION(jni());
+ }
+
+ private:
+ // Make a shallow copy of |frame| to be used with Java. The callee has
+ // ownership of the frame, and the frame should be released with
+ // VideoRenderer.releaseNativeFrame().
+ static jlong javaShallowCopy(const cricket::VideoFrame* frame) {
+ return jlongFromPointer(frame->Copy());
+ }
+
+ // Return a VideoRenderer.I420Frame referring to the data in |frame|.
+ jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
+ jintArray strides = jni()->NewIntArray(3);
+ jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
+ strides_array[0] = frame->GetYPitch();
+ strides_array[1] = frame->GetUPitch();
+ strides_array[2] = frame->GetVPitch();
+ jni()->ReleaseIntArrayElements(strides, strides_array, 0);
+ jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
+ jobject y_buffer =
+ jni()->NewDirectByteBuffer(const_cast<uint8_t*>(frame->GetYPlane()),
+ frame->GetYPitch() * frame->GetHeight());
+ jobject u_buffer = jni()->NewDirectByteBuffer(
+ const_cast<uint8_t*>(frame->GetUPlane()), frame->GetChromaSize());
+ jobject v_buffer = jni()->NewDirectByteBuffer(
+ const_cast<uint8_t*>(frame->GetVPlane()), frame->GetChromaSize());
+ jni()->SetObjectArrayElement(planes, 0, y_buffer);
+ jni()->SetObjectArrayElement(planes, 1, u_buffer);
+ jni()->SetObjectArrayElement(planes, 2, v_buffer);
+ return jni()->NewObject(
+ *j_frame_class_, j_i420_frame_ctor_id_,
+ frame->GetWidth(), frame->GetHeight(),
+ static_cast<int>(frame->GetVideoRotation()),
+ strides, planes, javaShallowCopy(frame));
+ }
+
+ // Return a VideoRenderer.I420Frame referring texture object in |frame|.
+ jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
+ NativeHandleImpl* handle =
+ reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
+ jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
+ int texture_id = handle->GetTextureId();
+ return jni()->NewObject(
+ *j_frame_class_, j_texture_frame_ctor_id_,
+ frame->GetWidth(), frame->GetHeight(),
+ static_cast<int>(frame->GetVideoRotation()),
+ texture_object, texture_id, javaShallowCopy(frame));
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ ScopedGlobalRef<jobject> j_callbacks_;
+ jmethodID j_render_frame_id_;
+ ScopedGlobalRef<jclass> j_frame_class_;
+ jmethodID j_i420_frame_ctor_id_;
+ jmethodID j_texture_frame_ctor_id_;
+ ScopedGlobalRef<jclass> j_byte_buffer_class_;
+};
+
+
+static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) {
+ jfieldID native_dc_id = GetFieldID(jni,
+ GetObjectClass(jni, j_dc), "nativeDataChannel", "J");
+ jlong j_d = GetLongField(jni, j_dc, native_dc_id);
+ return reinterpret_cast<DataChannelInterface*>(j_d);
+}
+
+JOW(jlong, DataChannel_registerObserverNative)(
+ JNIEnv* jni, jobject j_dc, jobject j_observer) {
+ scoped_ptr<DataChannelObserverWrapper> observer(
+ new DataChannelObserverWrapper(jni, j_observer));
+ ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
+ return jlongFromPointer(observer.release());
+}
+
+JOW(void, DataChannel_unregisterObserverNative)(
+ JNIEnv* jni, jobject j_dc, jlong native_observer) {
+ ExtractNativeDC(jni, j_dc)->UnregisterObserver();
+ delete reinterpret_cast<DataChannelObserverWrapper*>(native_observer);
+}
+
+JOW(jstring, DataChannel_label)(JNIEnv* jni, jobject j_dc) {
+ return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label());
+}
+
+JOW(jobject, DataChannel_state)(JNIEnv* jni, jobject j_dc) {
+ return JavaEnumFromIndex(
+ jni, "DataChannel$State", ExtractNativeDC(jni, j_dc)->state());
+}
+
+JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) {
+ uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
+ RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max())
+ << "buffered_amount overflowed jlong!";
+ return static_cast<jlong>(buffered_amount);
+}
+
+JOW(void, DataChannel_close)(JNIEnv* jni, jobject j_dc) {
+ ExtractNativeDC(jni, j_dc)->Close();
+}
+
+JOW(jboolean, DataChannel_sendNative)(JNIEnv* jni, jobject j_dc,
+ jbyteArray data, jboolean binary) {
+ jbyte* bytes = jni->GetByteArrayElements(data, NULL);
+ bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer(
+ rtc::Buffer(bytes, jni->GetArrayLength(data)),
+ binary));
+ jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT);
+ return ret;
+}
+
+JOW(void, DataChannel_dispose)(JNIEnv* jni, jobject j_dc) {
+ CHECK_RELEASE(ExtractNativeDC(jni, j_dc));
+}
+
+JOW(void, Logging_nativeEnableTracing)(
+ JNIEnv* jni, jclass, jstring j_path, jint nativeLevels,
+ jint nativeSeverity) {
+ std::string path = JavaToStdString(jni, j_path);
+ if (nativeLevels != webrtc::kTraceNone) {
+ webrtc::Trace::set_level_filter(nativeLevels);
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+ if (path != "logcat:") {
+#endif
+ RTC_CHECK_EQ(0, webrtc::Trace::SetTraceFile(path.c_str(), false))
+ << "SetTraceFile failed";
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+ } else {
+ // Intentionally leak this to avoid needing to reason about its lifecycle.
+ // It keeps no state and functions only as a dispatch point.
+ static LogcatTraceContext* g_trace_callback = new LogcatTraceContext();
+ }
+#endif
+ }
+ if (nativeSeverity >= rtc::LS_SENSITIVE && nativeSeverity <= rtc::LS_ERROR) {
+ rtc::LogMessage::LogToDebug(
+ static_cast<rtc::LoggingSeverity>(nativeSeverity));
+ }
+}
+
+JOW(void, Logging_nativeEnableLogThreads)(JNIEnv* jni, jclass) {
+ rtc::LogMessage::LogThreads(true);
+}
+
+JOW(void, Logging_nativeEnableLogTimeStamps)(JNIEnv* jni, jclass) {
+ rtc::LogMessage::LogTimestamps(true);
+}
+
+JOW(void, Logging_nativeLog)(
+ JNIEnv* jni, jclass, jint j_severity, jstring j_tag, jstring j_message) {
+ std::string message = JavaToStdString(jni, j_message);
+ std::string tag = JavaToStdString(jni, j_tag);
+ LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag) << message;
+}
+
+JOW(void, PeerConnection_freePeerConnection)(JNIEnv*, jclass, jlong j_p) {
+ CHECK_RELEASE(reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) {
+ PCOJava* p = reinterpret_cast<PCOJava*>(j_p);
+ delete p;
+}
+
+JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
+ CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
+}
+
+JOW(void, VideoCapturer_free)(JNIEnv*, jclass, jlong j_p) {
+ delete reinterpret_cast<cricket::VideoCapturer*>(j_p);
+}
+
+JOW(void, VideoRenderer_freeGuiVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
+ delete reinterpret_cast<VideoRendererWrapper*>(j_p);
+}
+
+JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
+ delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
+}
+
+JOW(void, VideoRenderer_releaseNativeFrame)(
+ JNIEnv* jni, jclass, jlong j_frame_ptr) {
+ delete reinterpret_cast<const cricket::VideoFrame*>(j_frame_ptr);
+}
+
+JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
+ reinterpret_cast<MediaStreamTrackInterface*>(j_p)->Release();
+}
+
+JOW(jboolean, MediaStream_nativeAddAudioTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeAddVideoTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)
+ ->AddTrack(reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveAudioTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveVideoTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jstring, MediaStream_nativeLabel)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
+}
+
+JOW(void, MediaStream_free)(JNIEnv*, jclass, jlong j_p) {
+ CHECK_RELEASE(reinterpret_cast<MediaStreamInterface*>(j_p));
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
+ JNIEnv * jni, jclass, jobject j_observer) {
+ return (jlong)new PCOJava(jni, j_observer);
+}
+
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
+ JNIEnv* jni, jclass, jobject context,
+ jboolean initialize_audio, jboolean initialize_video,
+ jboolean video_hw_acceleration) {
+ bool failure = false;
+ video_hw_acceleration_enabled = video_hw_acceleration;
+ AndroidNetworkMonitor::SetAndroidContext(jni, context);
+ if (!factory_static_initialized) {
+ if (initialize_video) {
+ failure |= webrtc::SetRenderAndroidVM(GetJVM());
+ failure |= AndroidVideoCapturerJni::SetAndroidObjects(jni, context);
+ }
+ if (initialize_audio)
+ failure |= webrtc::VoiceEngine::SetAndroidObjects(GetJVM(), context);
+ factory_static_initialized = true;
+ }
+ return !failure;
+}
+#endif // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+
+JOW(void, PeerConnectionFactory_initializeFieldTrials)(
+ JNIEnv* jni, jclass, jstring j_trials_init_string) {
+ field_trials_init_string = NULL;
+ if (j_trials_init_string != NULL) {
+ const char* init_string =
+ jni->GetStringUTFChars(j_trials_init_string, NULL);
+ int init_string_length = jni->GetStringUTFLength(j_trials_init_string);
+ field_trials_init_string = new char[init_string_length + 1];
+ rtc::strcpyn(field_trials_init_string, init_string_length + 1, init_string);
+ jni->ReleaseStringUTFChars(j_trials_init_string, init_string);
+ LOG(LS_INFO) << "initializeFieldTrials: " << field_trials_init_string;
+ }
+ webrtc::field_trial::InitFieldTrialsFromString(field_trials_init_string);
+}
+
+// Helper struct for working around the fact that CreatePeerConnectionFactory()
+// comes in two flavors: either entirely automagical (constructing its own
+// threads and deleting them on teardown, but no external codec factory support)
+// or entirely manual (requires caller to delete threads after factory
+// teardown). This struct takes ownership of its ctor's arguments to present a
+// single thing for Java to hold and eventually free.
+class OwnedFactoryAndThreads {
+ public:
+ OwnedFactoryAndThreads(Thread* worker_thread,
+ Thread* signaling_thread,
+ WebRtcVideoEncoderFactory* encoder_factory,
+ WebRtcVideoDecoderFactory* decoder_factory,
+ rtc::NetworkMonitorFactory* network_monitor_factory,
+ PeerConnectionFactoryInterface* factory)
+ : worker_thread_(worker_thread),
+ signaling_thread_(signaling_thread),
+ encoder_factory_(encoder_factory),
+ decoder_factory_(decoder_factory),
+ network_monitor_factory_(network_monitor_factory),
+ factory_(factory) {}
+
+ ~OwnedFactoryAndThreads() {
+ CHECK_RELEASE(factory_);
+ if (network_monitor_factory_ != nullptr) {
+ rtc::NetworkMonitorFactory::ReleaseFactory(network_monitor_factory_);
+ }
+ }
+
+ PeerConnectionFactoryInterface* factory() { return factory_; }
+ WebRtcVideoEncoderFactory* encoder_factory() { return encoder_factory_; }
+ WebRtcVideoDecoderFactory* decoder_factory() { return decoder_factory_; }
+ rtc::NetworkMonitorFactory* network_monitor_factory() {
+ return network_monitor_factory_;
+ }
+ void clear_network_monitor_factory() { network_monitor_factory_ = nullptr; }
+ void InvokeJavaCallbacksOnFactoryThreads();
+
+ private:
+ void JavaCallbackOnFactoryThreads();
+
+ const scoped_ptr<Thread> worker_thread_;
+ const scoped_ptr<Thread> signaling_thread_;
+ WebRtcVideoEncoderFactory* encoder_factory_;
+ WebRtcVideoDecoderFactory* decoder_factory_;
+ rtc::NetworkMonitorFactory* network_monitor_factory_;
+ PeerConnectionFactoryInterface* factory_; // Const after ctor except dtor.
+};
+
+void OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_factory_class = FindClass(jni, "org/webrtc/PeerConnectionFactory");
+ jmethodID m = nullptr;
+ if (Thread::Current() == worker_thread_) {
+ LOG(LS_INFO) << "Worker thread JavaCallback";
+ m = GetStaticMethodID(jni, j_factory_class, "onWorkerThreadReady", "()V");
+ }
+ if (Thread::Current() == signaling_thread_) {
+ LOG(LS_INFO) << "Signaling thread JavaCallback";
+ m = GetStaticMethodID(
+ jni, j_factory_class, "onSignalingThreadReady", "()V");
+ }
+ if (m != nullptr) {
+ jni->CallStaticVoidMethod(j_factory_class, m);
+ CHECK_EXCEPTION(jni) << "error during JavaCallback::CallStaticVoidMethod";
+ }
+}
+
+void OwnedFactoryAndThreads::InvokeJavaCallbacksOnFactoryThreads() {
+ LOG(LS_INFO) << "InvokeJavaCallbacksOnFactoryThreads.";
+ worker_thread_->Invoke<void>(
+ Bind(&OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads, this));
+ signaling_thread_->Invoke<void>(
+ Bind(&OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads, this));
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
+ JNIEnv* jni, jclass) {
+ // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
+ // ThreadManager only WrapCurrentThread()s the thread where it is first
+ // created. Since the semantics around when auto-wrapping happens in
+ // webrtc/base/ are convoluted, we simply wrap here to avoid having to think
+ // about ramifications of auto-wrapping there.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+ webrtc::Trace::CreateTrace();
+ Thread* worker_thread = new Thread();
+ worker_thread->SetName("worker_thread", NULL);
+ Thread* signaling_thread = new Thread();
+ signaling_thread->SetName("signaling_thread", NULL);
+ RTC_CHECK(worker_thread->Start() && signaling_thread->Start())
+ << "Failed to start threads";
+ WebRtcVideoEncoderFactory* encoder_factory = nullptr;
+ WebRtcVideoDecoderFactory* decoder_factory = nullptr;
+ rtc::NetworkMonitorFactory* network_monitor_factory = nullptr;
+
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+ if (video_hw_acceleration_enabled) {
+ encoder_factory = new MediaCodecVideoEncoderFactory();
+ decoder_factory = new MediaCodecVideoDecoderFactory();
+ }
+ network_monitor_factory = new AndroidNetworkMonitorFactory();
+ rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
+#endif
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ webrtc::CreatePeerConnectionFactory(worker_thread,
+ signaling_thread,
+ NULL,
+ encoder_factory,
+ decoder_factory));
+ RTC_CHECK(factory) << "Failed to create the peer connection factory; "
+ << "WebRTC/libjingle init likely failed on this device";
+ OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
+ worker_thread, signaling_thread,
+ encoder_factory, decoder_factory,
+ network_monitor_factory, factory.release());
+ owned_factory->InvokeJavaCallbacksOnFactoryThreads();
+ return jlongFromPointer(owned_factory);
+}
+
+JOW(void, PeerConnectionFactory_nativeFreeFactory)(JNIEnv*, jclass, jlong j_p) {
+ delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+ if (field_trials_init_string) {
+ webrtc::field_trial::InitFieldTrialsFromString(NULL);
+ delete field_trials_init_string;
+ field_trials_init_string = NULL;
+ }
+ webrtc::Trace::ReturnTrace();
+}
+
+static PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
+ return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
+}
+
+JOW(void, PeerConnectionFactory_nativeThreadsCallbacks)(
+ JNIEnv*, jclass, jlong j_p) {
+ OwnedFactoryAndThreads *factory =
+ reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+ factory->InvokeJavaCallbacksOnFactoryThreads();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
+ JNIEnv* jni, jclass, jlong native_factory, jstring label) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
+ return (jlong)stream.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
+ JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer,
+ jobject j_constraints) {
+ scoped_ptr<ConstraintsWrapper> constraints(
+ new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<VideoSourceInterface> source(
+ factory->CreateVideoSource(
+ reinterpret_cast<cricket::VideoCapturer*>(native_capturer),
+ constraints.get()));
+ return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)(
+ JNIEnv* jni, jclass, jlong native_factory, jstring id,
+ jlong native_source) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<VideoTrackInterface> track(
+ factory->CreateVideoTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<VideoSourceInterface*>(native_source)));
+ return (jlong)track.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioSource)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject j_constraints) {
+ scoped_ptr<ConstraintsWrapper> constraints(
+ new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<AudioSourceInterface> source(
+ factory->CreateAudioSource(constraints.get()));
+ return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
+ JNIEnv* jni, jclass, jlong native_factory, jstring id,
+ jlong native_source) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<AudioSourceInterface*>(native_source)));
+ return (jlong)track.release();
+}
+
+JOW(void, PeerConnectionFactory_nativeSetOptions)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject options) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ jclass options_class = jni->GetObjectClass(options);
+ jfieldID network_ignore_mask_field =
+ jni->GetFieldID(options_class, "networkIgnoreMask", "I");
+ int network_ignore_mask =
+ jni->GetIntField(options, network_ignore_mask_field);
+
+ jfieldID disable_encryption_field =
+ jni->GetFieldID(options_class, "disableEncryption", "Z");
+ bool disable_encryption =
+ jni->GetBooleanField(options, disable_encryption_field);
+
+ jfieldID disable_network_monitor_field =
+ jni->GetFieldID(options_class, "disableNetworkMonitor", "Z");
+ bool disable_network_monitor =
+ jni->GetBooleanField(options, disable_network_monitor_field);
+
+ PeerConnectionFactoryInterface::Options options_to_set;
+
+ // This doesn't necessarily match the c++ version of this struct; feel free
+ // to add more parameters as necessary.
+ options_to_set.network_ignore_mask = network_ignore_mask;
+ options_to_set.disable_encryption = disable_encryption;
+ options_to_set.disable_network_monitor = disable_network_monitor;
+ factory->SetOptions(options_to_set);
+
+ if (disable_network_monitor) {
+ OwnedFactoryAndThreads* owner =
+ reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+ if (owner->network_monitor_factory()) {
+ rtc::NetworkMonitorFactory::ReleaseFactory(
+ owner->network_monitor_factory());
+ owner->clear_network_monitor_factory();
+ }
+ }
+}
+
+JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject render_egl_context) {
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+ OwnedFactoryAndThreads* owned_factory =
+ reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+ MediaCodecVideoDecoderFactory* decoder_factory =
+ static_cast<MediaCodecVideoDecoderFactory*>
+ (owned_factory->decoder_factory());
+ if (decoder_factory) {
+ LOG(LS_INFO) << "Set EGL context for HW acceleration.";
+ decoder_factory->SetEGLContext(jni, render_egl_context);
+ }
+#endif
+}
+
+
+static std::string
+GetJavaEnumName(JNIEnv* jni, const std::string& className, jobject j_enum) {
+ jclass enumClass = FindClass(jni, className.c_str());
+ jmethodID nameMethod =
+ GetMethodID(jni, enumClass, "name", "()Ljava/lang/String;");
+ jstring name =
+ reinterpret_cast<jstring>(jni->CallObjectMethod(j_enum, nameMethod));
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod for "
+ << className << ".name";
+ return JavaToStdString(jni, name);
+}
+
+static PeerConnectionInterface::IceTransportsType
+JavaIceTransportsTypeToNativeType(JNIEnv* jni, jobject j_ice_transports_type) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$IceTransportsType",
+ j_ice_transports_type);
+
+ if (enum_name == "ALL")
+ return PeerConnectionInterface::kAll;
+
+ if (enum_name == "RELAY")
+ return PeerConnectionInterface::kRelay;
+
+ if (enum_name == "NOHOST")
+ return PeerConnectionInterface::kNoHost;
+
+ if (enum_name == "NONE")
+ return PeerConnectionInterface::kNone;
+
+ RTC_CHECK(false) << "Unexpected IceTransportsType enum_name " << enum_name;
+ return PeerConnectionInterface::kAll;
+}
+
+static PeerConnectionInterface::BundlePolicy
+JavaBundlePolicyToNativeType(JNIEnv* jni, jobject j_bundle_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$BundlePolicy",
+ j_bundle_policy);
+
+ if (enum_name == "BALANCED")
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+
+ if (enum_name == "MAXBUNDLE")
+ return PeerConnectionInterface::kBundlePolicyMaxBundle;
+
+ if (enum_name == "MAXCOMPAT")
+ return PeerConnectionInterface::kBundlePolicyMaxCompat;
+
+ RTC_CHECK(false) << "Unexpected BundlePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+}
+
+static PeerConnectionInterface::RtcpMuxPolicy
+JavaRtcpMuxPolicyToNativeType(JNIEnv* jni, jobject j_rtcp_mux_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$RtcpMuxPolicy",
+ j_rtcp_mux_policy);
+
+ if (enum_name == "NEGOTIATE")
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+
+ if (enum_name == "REQUIRE")
+ return PeerConnectionInterface::kRtcpMuxPolicyRequire;
+
+ RTC_CHECK(false) << "Unexpected RtcpMuxPolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+}
+
+static PeerConnectionInterface::TcpCandidatePolicy
+JavaTcpCandidatePolicyToNativeType(
+ JNIEnv* jni, jobject j_tcp_candidate_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$TcpCandidatePolicy",
+ j_tcp_candidate_policy);
+
+ if (enum_name == "ENABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+
+ if (enum_name == "DISABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+
+ RTC_CHECK(false) << "Unexpected TcpCandidatePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+}
+
+static rtc::KeyType JavaKeyTypeToNativeType(JNIEnv* jni, jobject j_key_type) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$KeyType", j_key_type);
+
+ if (enum_name == "RSA")
+ return rtc::KT_RSA;
+ if (enum_name == "ECDSA")
+ return rtc::KT_ECDSA;
+
+ RTC_CHECK(false) << "Unexpected KeyType enum_name " << enum_name;
+ return rtc::KT_ECDSA;
+}
+
+static PeerConnectionInterface::ContinualGatheringPolicy
+ JavaContinualGatheringPolicyToNativeType(
+ JNIEnv* jni, jobject j_gathering_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy",
+ j_gathering_policy);
+ if (enum_name == "GATHER_ONCE")
+ return PeerConnectionInterface::GATHER_ONCE;
+
+ if (enum_name == "GATHER_CONTINUALLY")
+ return PeerConnectionInterface::GATHER_CONTINUALLY;
+
+ RTC_CHECK(false) << "Unexpected ContinualGatheringPolicy enum name "
+ << enum_name;
+ return PeerConnectionInterface::GATHER_ONCE;
+}
+
+static void JavaIceServersToJsepIceServers(
+ JNIEnv* jni, jobject j_ice_servers,
+ PeerConnectionInterface::IceServers* ice_servers) {
+ jclass list_class = GetObjectClass(jni, j_ice_servers);
+ jmethodID iterator_id = GetMethodID(
+ jni, list_class, "iterator", "()Ljava/util/Iterator;");
+ jobject iterator = jni->CallObjectMethod(j_ice_servers, iterator_id);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jmethodID iterator_has_next = GetMethodID(
+ jni, GetObjectClass(jni, iterator), "hasNext", "()Z");
+ jmethodID iterator_next = GetMethodID(
+ jni, GetObjectClass(jni, iterator), "next", "()Ljava/lang/Object;");
+ while (jni->CallBooleanMethod(iterator, iterator_has_next)) {
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ jobject j_ice_server = jni->CallObjectMethod(iterator, iterator_next);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jclass j_ice_server_class = GetObjectClass(jni, j_ice_server);
+ jfieldID j_ice_server_uri_id =
+ GetFieldID(jni, j_ice_server_class, "uri", "Ljava/lang/String;");
+ jfieldID j_ice_server_username_id =
+ GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;");
+ jfieldID j_ice_server_password_id =
+ GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;");
+ jstring uri = reinterpret_cast<jstring>(
+ GetObjectField(jni, j_ice_server, j_ice_server_uri_id));
+ jstring username = reinterpret_cast<jstring>(
+ GetObjectField(jni, j_ice_server, j_ice_server_username_id));
+ jstring password = reinterpret_cast<jstring>(
+ GetObjectField(jni, j_ice_server, j_ice_server_password_id));
+ PeerConnectionInterface::IceServer server;
+ server.uri = JavaToStdString(jni, uri);
+ server.username = JavaToStdString(jni, username);
+ server.password = JavaToStdString(jni, password);
+ ice_servers->push_back(server);
+ }
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+}
+
+static void JavaRTCConfigurationToJsepRTCConfiguration(
+ JNIEnv* jni,
+ jobject j_rtc_config,
+ PeerConnectionInterface::RTCConfiguration* rtc_config) {
+ jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+
+ jfieldID j_ice_transports_type_id = GetFieldID(
+ jni, j_rtc_config_class, "iceTransportsType",
+ "Lorg/webrtc/PeerConnection$IceTransportsType;");
+ jobject j_ice_transports_type = GetObjectField(
+ jni, j_rtc_config, j_ice_transports_type_id);
+
+ jfieldID j_bundle_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "bundlePolicy",
+ "Lorg/webrtc/PeerConnection$BundlePolicy;");
+ jobject j_bundle_policy = GetObjectField(
+ jni, j_rtc_config, j_bundle_policy_id);
+
+ jfieldID j_rtcp_mux_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "rtcpMuxPolicy",
+ "Lorg/webrtc/PeerConnection$RtcpMuxPolicy;");
+ jobject j_rtcp_mux_policy = GetObjectField(
+ jni, j_rtc_config, j_rtcp_mux_policy_id);
+
+ jfieldID j_tcp_candidate_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "tcpCandidatePolicy",
+ "Lorg/webrtc/PeerConnection$TcpCandidatePolicy;");
+ jobject j_tcp_candidate_policy = GetObjectField(
+ jni, j_rtc_config, j_tcp_candidate_policy_id);
+
+ jfieldID j_ice_servers_id = GetFieldID(
+ jni, j_rtc_config_class, "iceServers", "Ljava/util/List;");
+ jobject j_ice_servers = GetObjectField(jni, j_rtc_config, j_ice_servers_id);
+
+ jfieldID j_audio_jitter_buffer_max_packets_id =
+ GetFieldID(jni, j_rtc_config_class, "audioJitterBufferMaxPackets", "I");
+ jfieldID j_audio_jitter_buffer_fast_accelerate_id = GetFieldID(
+ jni, j_rtc_config_class, "audioJitterBufferFastAccelerate", "Z");
+
+ jfieldID j_ice_connection_receiving_timeout_id =
+ GetFieldID(jni, j_rtc_config_class, "iceConnectionReceivingTimeout", "I");
+
+ jfieldID j_continual_gathering_policy_id =
+ GetFieldID(jni, j_rtc_config_class, "continualGatheringPolicy",
+ "Lorg/webrtc/PeerConnection$ContinualGatheringPolicy;");
+ jobject j_continual_gathering_policy =
+ GetObjectField(jni, j_rtc_config, j_continual_gathering_policy_id);
+
+ rtc_config->type =
+ JavaIceTransportsTypeToNativeType(jni, j_ice_transports_type);
+ rtc_config->bundle_policy =
+ JavaBundlePolicyToNativeType(jni, j_bundle_policy);
+ rtc_config->rtcp_mux_policy =
+ JavaRtcpMuxPolicyToNativeType(jni, j_rtcp_mux_policy);
+ rtc_config->tcp_candidate_policy =
+ JavaTcpCandidatePolicyToNativeType(jni, j_tcp_candidate_policy);
+ JavaIceServersToJsepIceServers(jni, j_ice_servers, &rtc_config->servers);
+ rtc_config->audio_jitter_buffer_max_packets =
+ GetIntField(jni, j_rtc_config, j_audio_jitter_buffer_max_packets_id);
+ rtc_config->audio_jitter_buffer_fast_accelerate = GetBooleanField(
+ jni, j_rtc_config, j_audio_jitter_buffer_fast_accelerate_id);
+ rtc_config->ice_connection_receiving_timeout =
+ GetIntField(jni, j_rtc_config, j_ice_connection_receiving_timeout_id);
+ rtc_config->continual_gathering_policy =
+ JavaContinualGatheringPolicyToNativeType(
+ jni, j_continual_gathering_policy);
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
+ JNIEnv *jni, jclass, jlong factory, jobject j_rtc_config,
+ jobject j_constraints, jlong observer_p) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> f(
+ reinterpret_cast<PeerConnectionFactoryInterface*>(
+ factoryFromJava(factory)));
+
+ PeerConnectionInterface::RTCConfiguration rtc_config;
+ JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+
+ jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+ jfieldID j_key_type_id = GetFieldID(jni, j_rtc_config_class, "keyType",
+ "Lorg/webrtc/PeerConnection$KeyType;");
+ jobject j_key_type = GetObjectField(jni, j_rtc_config, j_key_type_id);
+
+ // Create ECDSA certificate.
+ if (JavaKeyTypeToNativeType(jni, j_key_type) == rtc::KT_ECDSA) {
+ scoped_ptr<rtc::SSLIdentity> ssl_identity(
+ rtc::SSLIdentity::Generate(webrtc::kIdentityName, rtc::KT_ECDSA));
+ if (ssl_identity.get()) {
+ rtc_config.certificates.push_back(
+ rtc::RTCCertificate::Create(ssl_identity.Pass()));
+ LOG(LS_INFO) << "ECDSA certificate created.";
+ } else {
+ // Failing to create certificate should not abort peer connection
+ // creation. Instead default encryption (currently RSA) will be used.
+ LOG(LS_WARNING) <<
+ "Failed to generate SSLIdentity. Default encryption will be used.";
+ }
+ }
+
+ PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p);
+ observer->SetConstraints(new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionInterface> pc(f->CreatePeerConnection(
+ rtc_config, observer->constraints(), NULL, NULL, observer));
+ return (jlong)pc.release();
+}
+
+static rtc::scoped_refptr<PeerConnectionInterface> ExtractNativePC(
+ JNIEnv* jni, jobject j_pc) {
+ jfieldID native_pc_id = GetFieldID(jni,
+ GetObjectClass(jni, j_pc), "nativePeerConnection", "J");
+ jlong j_p = GetLongField(jni, j_pc, native_pc_id);
+ return rtc::scoped_refptr<PeerConnectionInterface>(
+ reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(jobject, PeerConnection_getLocalDescription)(JNIEnv* jni, jobject j_pc) {
+ const SessionDescriptionInterface* sdp =
+ ExtractNativePC(jni, j_pc)->local_description();
+ return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_getRemoteDescription)(JNIEnv* jni, jobject j_pc) {
+ const SessionDescriptionInterface* sdp =
+ ExtractNativePC(jni, j_pc)->remote_description();
+ return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_createDataChannel)(
+ JNIEnv* jni, jobject j_pc, jstring j_label, jobject j_init) {
+ DataChannelInit init = JavaDataChannelInitToNative(jni, j_init);
+ rtc::scoped_refptr<DataChannelInterface> channel(
+ ExtractNativePC(jni, j_pc)->CreateDataChannel(
+ JavaToStdString(jni, j_label), &init));
+ // Mustn't pass channel.get() directly through NewObject to avoid reading its
+ // vararg parameter as 64-bit and reading memory that doesn't belong to the
+ // 32-bit parameter.
+ jlong nativeChannelPtr = jlongFromPointer(channel.get());
+ RTC_CHECK(nativeChannelPtr) << "Failed to create DataChannel";
+ jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel");
+ jmethodID j_data_channel_ctor = GetMethodID(
+ jni, j_data_channel_class, "<init>", "(J)V");
+ jobject j_channel = jni->NewObject(
+ j_data_channel_class, j_data_channel_ctor, nativeChannelPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Channel is now owned by Java object, and will be freed from there.
+ int bumped_count = channel->AddRef();
+ RTC_CHECK(bumped_count == 2) << "Unexpected refcount";
+ return j_channel;
+}
+
+JOW(void, PeerConnection_createOffer)(
+ JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+ ConstraintsWrapper* constraints =
+ new ConstraintsWrapper(jni, j_constraints);
+ rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+ jni, j_observer, constraints));
+ ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints);
+}
+
+JOW(void, PeerConnection_createAnswer)(
+ JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+ ConstraintsWrapper* constraints =
+ new ConstraintsWrapper(jni, j_constraints);
+ rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+ jni, j_observer, constraints));
+ ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints);
+}
+
+// Helper to create a SessionDescriptionInterface from a SessionDescription.
+static SessionDescriptionInterface* JavaSdpToNativeSdp(
+ JNIEnv* jni, jobject j_sdp) {
+ jfieldID j_type_id = GetFieldID(
+ jni, GetObjectClass(jni, j_sdp), "type",
+ "Lorg/webrtc/SessionDescription$Type;");
+ jobject j_type = GetObjectField(jni, j_sdp, j_type_id);
+ jmethodID j_canonical_form_id = GetMethodID(
+ jni, GetObjectClass(jni, j_type), "canonicalForm",
+ "()Ljava/lang/String;");
+ jstring j_type_string = (jstring)jni->CallObjectMethod(
+ j_type, j_canonical_form_id);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ std::string std_type = JavaToStdString(jni, j_type_string);
+
+ jfieldID j_description_id = GetFieldID(
+ jni, GetObjectClass(jni, j_sdp), "description", "Ljava/lang/String;");
+ jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id);
+ std::string std_description = JavaToStdString(jni, j_description);
+
+ return webrtc::CreateSessionDescription(
+ std_type, std_description, NULL);
+}
+
+JOW(void, PeerConnection_setLocalDescription)(
+ JNIEnv* jni, jobject j_pc,
+ jobject j_observer, jobject j_sdp) {
+ rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<SetSdpObserverWrapper>(
+ jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+ ExtractNativePC(jni, j_pc)->SetLocalDescription(
+ observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(void, PeerConnection_setRemoteDescription)(
+ JNIEnv* jni, jobject j_pc,
+ jobject j_observer, jobject j_sdp) {
+ rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<SetSdpObserverWrapper>(
+ jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+ ExtractNativePC(jni, j_pc)->SetRemoteDescription(
+ observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(jboolean, PeerConnection_setConfiguration)(
+ JNIEnv* jni, jobject j_pc, jobject j_rtc_config) {
+ PeerConnectionInterface::RTCConfiguration rtc_config;
+ JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+ return ExtractNativePC(jni, j_pc)->SetConfiguration(rtc_config);
+}
+
+JOW(jboolean, PeerConnection_nativeAddIceCandidate)(
+ JNIEnv* jni, jobject j_pc, jstring j_sdp_mid,
+ jint j_sdp_mline_index, jstring j_candidate_sdp) {
+ std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
+ std::string sdp = JavaToStdString(jni, j_candidate_sdp);
+ scoped_ptr<IceCandidateInterface> candidate(
+ webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL));
+ return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
+}
+
+JOW(jboolean, PeerConnection_nativeAddLocalStream)(
+ JNIEnv* jni, jobject j_pc, jlong native_stream) {
+ return ExtractNativePC(jni, j_pc)->AddStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(void, PeerConnection_nativeRemoveLocalStream)(
+ JNIEnv* jni, jobject j_pc, jlong native_stream) {
+ ExtractNativePC(jni, j_pc)->RemoveStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(jobject, PeerConnection_nativeGetSenders)(JNIEnv* jni, jobject j_pc) {
+ jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+ jmethodID j_array_list_ctor =
+ GetMethodID(jni, j_array_list_class, "<init>", "()V");
+ jmethodID j_array_list_add =
+ GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+ jobject j_senders = jni->NewObject(j_array_list_class, j_array_list_ctor);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+
+ jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+ jmethodID j_rtp_sender_ctor =
+ GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+ auto senders = ExtractNativePC(jni, j_pc)->GetSenders();
+ for (const auto& sender : senders) {
+ jlong nativeSenderPtr = jlongFromPointer(sender.get());
+ jobject j_sender =
+ jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Sender is now owned by Java object, and will be freed from there.
+ sender->AddRef();
+ jni->CallBooleanMethod(j_senders, j_array_list_add, j_sender);
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ }
+ return j_senders;
+}
+
+JOW(jobject, PeerConnection_nativeGetReceivers)(JNIEnv* jni, jobject j_pc) {
+ jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+ jmethodID j_array_list_ctor =
+ GetMethodID(jni, j_array_list_class, "<init>", "()V");
+ jmethodID j_array_list_add =
+ GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+ jobject j_receivers = jni->NewObject(j_array_list_class, j_array_list_ctor);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+
+ jclass j_rtp_receiver_class = FindClass(jni, "org/webrtc/RtpReceiver");
+ jmethodID j_rtp_receiver_ctor =
+ GetMethodID(jni, j_rtp_receiver_class, "<init>", "(J)V");
+
+ auto receivers = ExtractNativePC(jni, j_pc)->GetReceivers();
+ for (const auto& receiver : receivers) {
+ jlong nativeReceiverPtr = jlongFromPointer(receiver.get());
+ jobject j_receiver = jni->NewObject(j_rtp_receiver_class,
+ j_rtp_receiver_ctor, nativeReceiverPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Receiver is now owned by Java object, and will be freed from there.
+ receiver->AddRef();
+ jni->CallBooleanMethod(j_receivers, j_array_list_add, j_receiver);
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ }
+ return j_receivers;
+}
+
+JOW(bool, PeerConnection_nativeGetStats)(
+ JNIEnv* jni, jobject j_pc, jobject j_observer, jlong native_track) {
+ rtc::scoped_refptr<StatsObserverWrapper> observer(
+ new rtc::RefCountedObject<StatsObserverWrapper>(jni, j_observer));
+ return ExtractNativePC(jni, j_pc)->GetStats(
+ observer,
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track),
+ PeerConnectionInterface::kStatsOutputLevelStandard);
+}
+
+JOW(jobject, PeerConnection_signalingState)(JNIEnv* jni, jobject j_pc) {
+ PeerConnectionInterface::SignalingState state =
+ ExtractNativePC(jni, j_pc)->signaling_state();
+ return JavaEnumFromIndex(jni, "PeerConnection$SignalingState", state);
+}
+
+JOW(jobject, PeerConnection_iceConnectionState)(JNIEnv* jni, jobject j_pc) {
+ PeerConnectionInterface::IceConnectionState state =
+ ExtractNativePC(jni, j_pc)->ice_connection_state();
+ return JavaEnumFromIndex(jni, "PeerConnection$IceConnectionState", state);
+}
+
+JOW(jobject, PeerConnection_iceGatheringState)(JNIEnv* jni, jobject j_pc) {
+ PeerConnectionInterface::IceGatheringState state =
+ ExtractNativePC(jni, j_pc)->ice_gathering_state();
+ return JavaEnumFromIndex(jni, "PeerConnection$IceGatheringState", state);
+}
+
+JOW(void, PeerConnection_close)(JNIEnv* jni, jobject j_pc) {
+ ExtractNativePC(jni, j_pc)->Close();
+ return;
+}
+
+JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+ rtc::scoped_refptr<MediaSourceInterface> p(
+ reinterpret_cast<MediaSourceInterface*>(j_p));
+ return JavaEnumFromIndex(jni, "MediaSource$State", p->state());
+}
+
+JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)(
+ JNIEnv* jni, jclass, jstring j_device_name) {
+// Since we can't create platform specific java implementations in Java, we
+// defer the creation to C land.
+#if defined(ANDROID)
+ jclass j_video_capturer_class(
+ FindClass(jni, "org/webrtc/VideoCapturerAndroid"));
+ const int camera_id = jni->CallStaticIntMethod(
+ j_video_capturer_class,
+ GetStaticMethodID(jni, j_video_capturer_class, "lookupDeviceName",
+ "(Ljava/lang/String;)I"),
+ j_device_name);
+ CHECK_EXCEPTION(jni) << "error during VideoCapturerAndroid.lookupDeviceName";
+ if (camera_id == -1)
+ return nullptr;
+ jobject j_video_capturer = jni->NewObject(
+ j_video_capturer_class,
+ GetMethodID(jni, j_video_capturer_class, "<init>", "(I)V"), camera_id);
+ CHECK_EXCEPTION(jni) << "error during creation of VideoCapturerAndroid";
+ rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer);
+ rtc::scoped_ptr<cricket::VideoCapturer> capturer(
+ new webrtc::AndroidVideoCapturer(delegate));
+
+#else
+ std::string device_name = JavaToStdString(jni, j_device_name);
+ scoped_ptr<cricket::DeviceManagerInterface> device_manager(
+ cricket::DeviceManagerFactory::Create());
+ RTC_CHECK(device_manager->Init()) << "DeviceManager::Init() failed";
+ cricket::Device device;
+ if (!device_manager->GetVideoCaptureDevice(device_name, &device)) {
+ LOG(LS_ERROR) << "GetVideoCaptureDevice failed for " << device_name;
+ return 0;
+ }
+ scoped_ptr<cricket::VideoCapturer> capturer(
+ device_manager->CreateVideoCapturer(device));
+
+ jclass j_video_capturer_class(
+ FindClass(jni, "org/webrtc/VideoCapturer"));
+ const jmethodID j_videocapturer_ctor(GetMethodID(
+ jni, j_video_capturer_class, "<init>", "()V"));
+ jobject j_video_capturer =
+ jni->NewObject(j_video_capturer_class,
+ j_videocapturer_ctor);
+ CHECK_EXCEPTION(jni) << "error during creation of VideoCapturer";
+
+#endif
+ const jmethodID j_videocapturer_set_native_capturer(GetMethodID(
+ jni, j_video_capturer_class, "setNativeCapturer", "(J)V"));
+ jni->CallVoidMethod(j_video_capturer,
+ j_videocapturer_set_native_capturer,
+ jlongFromPointer(capturer.release()));
+ CHECK_EXCEPTION(jni) << "error during setNativeCapturer";
+ return j_video_capturer;
+}
+
+JOW(jlong, VideoRenderer_nativeCreateGuiVideoRenderer)(
+ JNIEnv* jni, jclass, int x, int y) {
+ scoped_ptr<VideoRendererWrapper> renderer(VideoRendererWrapper::Create(
+ cricket::VideoRendererFactory::CreateGuiVideoRenderer(x, y)));
+ return (jlong)renderer.release();
+}
+
+JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
+ JNIEnv* jni, jclass, jobject j_callbacks) {
+ scoped_ptr<JavaVideoRendererWrapper> renderer(
+ new JavaVideoRendererWrapper(jni, j_callbacks));
+ return (jlong)renderer.release();
+}
+
+JOW(void, VideoRenderer_nativeCopyPlane)(
+ JNIEnv *jni, jclass, jobject j_src_buffer, jint width, jint height,
+ jint src_stride, jobject j_dst_buffer, jint dst_stride) {
+ size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer);
+ size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
+ RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
+ RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
+ RTC_CHECK(src_size >= src_stride * height)
+ << "Insufficient source buffer capacity " << src_size;
+ RTC_CHECK(dst_size >= dst_stride * height)
+ << "Isufficient destination buffer capacity " << dst_size;
+ uint8_t *src =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer));
+ uint8_t *dst =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
+ if (src_stride == dst_stride) {
+ memcpy(dst, src, src_stride * height);
+ } else {
+ for (int i = 0; i < height; i++) {
+ memcpy(dst, src, width);
+ src += src_stride;
+ dst += dst_stride;
+ }
+ }
+}
+
+JOW(void, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
+ reinterpret_cast<VideoSourceInterface*>(j_p)->Stop();
+}
+
+JOW(void, VideoSource_restart)(
+ JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
+ reinterpret_cast<VideoSourceInterface*>(j_p_source)->Restart();
+}
+
+JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
+}
+
+JOW(jstring, MediaStreamTrack_nativeKind)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeEnabled)(JNIEnv* jni, jclass, jlong j_p) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
+}
+
+JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaEnumFromIndex(
+ jni,
+ "MediaStreamTrack$State",
+ reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeSetState)(
+ JNIEnv* jni, jclass, jlong j_p, jint j_new_state) {
+ MediaStreamTrackInterface::TrackState new_state =
+ (MediaStreamTrackInterface::TrackState)j_new_state;
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
+ ->set_state(new_state);
+}
+
+JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
+ JNIEnv* jni, jclass, jlong j_p, jboolean enabled) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
+ ->set_enabled(enabled);
+}
+
+JOW(void, VideoTrack_nativeAddRenderer)(
+ JNIEnv* jni, jclass,
+ jlong j_video_track_pointer, jlong j_renderer_pointer) {
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->AddRenderer(
+ reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
+}
+
+JOW(void, VideoTrack_nativeRemoveRenderer)(
+ JNIEnv* jni, jclass,
+ jlong j_video_track_pointer, jlong j_renderer_pointer) {
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->RemoveRenderer(
+ reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
+}
+
+JOW(jlong, CallSessionFileRotatingLogSink_nativeAddSink)(
+ JNIEnv* jni, jclass,
+ jstring j_dirPath, jint j_maxFileSize, jint j_severity) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ rtc::CallSessionFileRotatingLogSink* sink =
+ new rtc::CallSessionFileRotatingLogSink(dir_path, j_maxFileSize);
+ if (!sink->Init()) {
+ LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+ "Failed to init CallSessionFileRotatingLogSink for path " << dir_path;
+ delete sink;
+ return 0;
+ }
+ rtc::LogMessage::AddLogToStream(
+ sink, static_cast<rtc::LoggingSeverity>(j_severity));
+ return (jlong) sink;
+}
+
+JOW(void, CallSessionFileRotatingLogSink_nativeDeleteSink)(
+ JNIEnv* jni, jclass, jlong j_sink) {
+ rtc::CallSessionFileRotatingLogSink* sink =
+ reinterpret_cast<rtc::CallSessionFileRotatingLogSink*>(j_sink);
+ rtc::LogMessage::RemoveLogToStream(sink);
+ delete sink;
+}
+
+JOW(jbyteArray, CallSessionFileRotatingLogSink_nativeGetLogData)(
+ JNIEnv* jni, jclass, jstring j_dirPath) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ rtc::scoped_ptr<rtc::CallSessionFileRotatingStream> stream(
+ new rtc::CallSessionFileRotatingStream(dir_path));
+ if (!stream->Open()) {
+ LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+ "Failed to open CallSessionFileRotatingStream for path " << dir_path;
+ return jni->NewByteArray(0);
+ }
+ size_t log_size = 0;
+ if (!stream->GetSize(&log_size) || log_size == 0) {
+ LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+ "CallSessionFileRotatingStream returns 0 size for path " << dir_path;
+ return jni->NewByteArray(0);
+ }
+
+ size_t read = 0;
+ rtc::scoped_ptr<jbyte> buffer(static_cast<jbyte*>(malloc(log_size)));
+ stream->ReadAll(buffer.get(), log_size, &read, nullptr);
+
+ jbyteArray result = jni->NewByteArray(read);
+ jni->SetByteArrayRegion(result, 0, read, buffer.get());
+
+ return result;
+}
+
+JOW(void, RtpSender_nativeSetTrack)(JNIEnv* jni,
+ jclass,
+ jlong j_rtp_sender_pointer,
+ jlong j_track_pointer) {
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
+}
+
+JOW(jlong, RtpSender_nativeGetTrack)(JNIEnv* jni,
+ jclass,
+ jlong j_rtp_sender_pointer,
+ jlong j_track_pointer) {
+ return jlongFromPointer(
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->track()
+ .release());
+}
+
+JOW(jstring, RtpSender_nativeId)(
+ JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->id());
+}
+
+JOW(void, RtpSender_free)(JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->Release();
+}
+
+JOW(jlong, RtpReceiver_nativeGetTrack)(JNIEnv* jni,
+ jclass,
+ jlong j_rtp_receiver_pointer,
+ jlong j_track_pointer) {
+ return jlongFromPointer(
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->track()
+ .release());
+}
+
+JOW(jstring, RtpReceiver_nativeId)(
+ JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+ return JavaStringFromStdString(
+ jni,
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->id());
+}
+
+JOW(void, RtpReceiver_free)(JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->Release();
+}
+
+} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
new file mode 100644
index 0000000000..05f1b23768
--- /dev/null
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+
+#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
+
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+
+namespace webrtc_jni {
+
+SurfaceTextureHelper::SurfaceTextureHelper(JNIEnv* jni,
+ jobject egl_shared_context)
+ : j_surface_texture_helper_class_(
+ jni,
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper")),
+ j_surface_texture_helper_(
+ jni,
+ jni->CallStaticObjectMethod(
+ *j_surface_texture_helper_class_,
+ GetStaticMethodID(jni,
+ *j_surface_texture_helper_class_,
+ "create",
+ "(Ljavax/microedition/khronos/egl/EGLContext;)"
+ "Lorg/webrtc/SurfaceTextureHelper;"),
+ egl_shared_context)),
+ j_return_texture_method_(GetMethodID(jni,
+ *j_surface_texture_helper_class_,
+ "returnTextureFrame",
+ "()V")) {
+ CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
+}
+
+SurfaceTextureHelper::~SurfaceTextureHelper() {
+}
+
+void SurfaceTextureHelper::ReturnTextureFrame() const {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jni->CallVoidMethod(*j_surface_texture_helper_, j_return_texture_method_);
+
+ CHECK_EXCEPTION(
+ jni) << "error during SurfaceTextureHelper.returnTextureFrame";
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+SurfaceTextureHelper::CreateTextureFrame(int width, int height,
+ const NativeTextureHandleImpl& native_handle) {
+ return new rtc::RefCountedObject<AndroidTextureBuffer>(
+ width, height, native_handle,
+ rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
+}
+
+} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
new file mode 100644
index 0000000000..dc9d2b853d
--- /dev/null
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
@@ -0,0 +1,85 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef TALK_APP_WEBRTC_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+#define TALK_APP_WEBRTC_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+
+#include <jni.h>
+
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/common_video/interface/video_frame_buffer.h"
+
+namespace webrtc_jni {
+
+// Helper class to create and synchronize access to an Android SurfaceTexture.
+// It is used for creating webrtc::VideoFrameBuffers from a SurfaceTexture when
+// the SurfaceTexture has been updated.
+// When the VideoFrameBuffer is released, this class returns the buffer to the
+// java SurfaceTextureHelper so it can be updated safely. The VideoFrameBuffer
+// can be released on an arbitrary thread.
+// SurfaceTextureHelper is reference counted to make sure that it is not
+// destroyed while a VideoFrameBuffer is in use.
+// This class is the C++ counterpart of the java class SurfaceTextureHelper.
+// Usage:
+// 1. Create an instance of this class.
+// 2. Call GetJavaSurfaceTextureHelper to get the Java SurfaceTextureHelper.
+// 3. Register a listener to the Java SurfaceListener and start producing
+// new buffers.
+// 3. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
+class SurfaceTextureHelper : public rtc::RefCountInterface {
+ public:
+ SurfaceTextureHelper(JNIEnv* jni, jobject shared_egl_context);
+
+ // Returns the Java SurfaceTextureHelper.
+ jobject GetJavaSurfaceTextureHelper() const {
+ return *j_surface_texture_helper_;
+ }
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
+ int width,
+ int height,
+ const NativeTextureHandleImpl& native_handle);
+
+ protected:
+ ~SurfaceTextureHelper();
+
+ private:
+ // May be called on arbitrary thread.
+ void ReturnTextureFrame() const;
+
+ const ScopedGlobalRef<jclass> j_surface_texture_helper_class_;
+ const ScopedGlobalRef<jobject> j_surface_texture_helper_;
+ const jmethodID j_return_texture_method_;
+};
+
+} // namespace webrtc_jni
+
+#endif // TALK_APP_WEBRTC_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
diff --git a/talk/app/webrtc/java/src/org/webrtc/AudioSource.java b/talk/app/webrtc/java/src/org/webrtc/AudioSource.java
new file mode 100644
index 0000000000..06177a67a5
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/AudioSource.java
@@ -0,0 +1,38 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/**
+ * Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
+ * more {@code AudioTrack} objects.
+ */
+public class AudioSource extends MediaSource {
+ public AudioSource(long nativeSource) {
+ super(nativeSource);
+ }
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/AudioTrack.java b/talk/app/webrtc/java/src/org/webrtc/AudioTrack.java
new file mode 100644
index 0000000000..32000808b3
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/AudioTrack.java
@@ -0,0 +1,35 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ AudioTrackInterface */
+public class AudioTrack extends MediaStreamTrack {
+ public AudioTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/CallSessionFileRotatingLogSink.java b/talk/app/webrtc/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
new file mode 100644
index 0000000000..f7032a739b
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
@@ -0,0 +1,57 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+public class CallSessionFileRotatingLogSink {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+
+ private long nativeSink;
+
+ public static byte[] getLogData(String dirPath) {
+ return nativeGetLogData(dirPath);
+ }
+
+ public CallSessionFileRotatingLogSink(
+ String dirPath, int maxFileSize, Logging.Severity severity) {
+ nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
+ }
+
+ public void dispose() {
+ if (nativeSink != 0) {
+ nativeDeleteSink(nativeSink);
+ nativeSink = 0;
+ }
+ }
+
+ private static native long nativeAddSink(
+ String dirPath, int maxFileSize, int severity);
+ private static native void nativeDeleteSink(long nativeSink);
+ private static native byte[] nativeGetLogData(String dirPath);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/DataChannel.java b/talk/app/webrtc/java/src/org/webrtc/DataChannel.java
new file mode 100644
index 0000000000..1866098703
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/DataChannel.java
@@ -0,0 +1,143 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Java wrapper for a C++ DataChannelInterface. */
+public class DataChannel {
+ /** Java wrapper for WebIDL RTCDataChannel. */
+ public static class Init {
+ public boolean ordered = true;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmitTimeMs = -1;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmits = -1;
+ public String protocol = "";
+ public boolean negotiated = false;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int id = -1;
+
+ public Init() {}
+
+ // Called only by native code.
+ private Init(
+ boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
+ String protocol, boolean negotiated, int id) {
+ this.ordered = ordered;
+ this.maxRetransmitTimeMs = maxRetransmitTimeMs;
+ this.maxRetransmits = maxRetransmits;
+ this.protocol = protocol;
+ this.negotiated = negotiated;
+ this.id = id;
+ }
+ }
+
+ /** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
+ public static class Buffer {
+ /** The underlying data. */
+ public final ByteBuffer data;
+
+ /**
+ * Indicates whether |data| contains UTF-8 text or "binary data"
+ * (i.e. anything else).
+ */
+ public final boolean binary;
+
+ public Buffer(ByteBuffer data, boolean binary) {
+ this.data = data;
+ this.binary = binary;
+ }
+ }
+
+ /** Java version of C++ DataChannelObserver. */
+ public interface Observer {
+ /** The data channel's bufferedAmount has changed. */
+ public void onBufferedAmountChange(long previousAmount);
+ /** The data channel state has changed. */
+ public void onStateChange();
+ /**
+ * A data buffer was successfully received. NOTE: |buffer.data| will be
+ * freed once this function returns so callers who want to use the data
+ * asynchronously must make sure to copy it first.
+ */
+ public void onMessage(Buffer buffer);
+ }
+
+ /** Keep in sync with DataChannelInterface::DataState. */
+ public enum State { CONNECTING, OPEN, CLOSING, CLOSED };
+
+ private final long nativeDataChannel;
+ private long nativeObserver;
+
+ public DataChannel(long nativeDataChannel) {
+ this.nativeDataChannel = nativeDataChannel;
+ }
+
+ /** Register |observer|, replacing any previously-registered observer. */
+ public void registerObserver(Observer observer) {
+ if (nativeObserver != 0) {
+ unregisterObserverNative(nativeObserver);
+ }
+ nativeObserver = registerObserverNative(observer);
+ }
+ private native long registerObserverNative(Observer observer);
+
+ /** Unregister the (only) observer. */
+ public void unregisterObserver() {
+ unregisterObserverNative(nativeObserver);
+ }
+ private native void unregisterObserverNative(long nativeObserver);
+
+ public native String label();
+
+ public native State state();
+
+ /**
+ * Return the number of bytes of application data (UTF-8 text and binary data)
+ * that have been queued using SendBuffer but have not yet been transmitted
+ * to the network.
+ */
+ public native long bufferedAmount();
+
+ /** Close the channel. */
+ public native void close();
+
+ /** Send |data| to the remote peer; return success. */
+ public boolean send(Buffer buffer) {
+ // TODO(fischman): this could be cleverer about avoiding copies if the
+ // ByteBuffer is direct and/or is backed by an array.
+ byte[] data = new byte[buffer.data.remaining()];
+ buffer.data.get(data);
+ return sendNative(data, buffer.binary);
+ }
+ private native boolean sendNative(byte[] data, boolean binary);
+
+ /** Dispose of native resources attached to this channel. */
+ public native void dispose();
+};
diff --git a/talk/app/webrtc/java/src/org/webrtc/IceCandidate.java b/talk/app/webrtc/java/src/org/webrtc/IceCandidate.java
new file mode 100644
index 0000000000..eb42ce48cc
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/IceCandidate.java
@@ -0,0 +1,48 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/**
+ * Representation of a single ICE Candidate, mirroring
+ * {@code IceCandidateInterface} in the C++ API.
+ */
+public class IceCandidate {
+ public final String sdpMid;
+ public final int sdpMLineIndex;
+ public final String sdp;
+
+ public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
+ this.sdpMid = sdpMid;
+ this.sdpMLineIndex = sdpMLineIndex;
+ this.sdp = sdp;
+ }
+
+ public String toString() {
+ return sdpMid + ":" + sdpMLineIndex + ":" + sdp;
+ }
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
new file mode 100644
index 0000000000..42af9c7fd0
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -0,0 +1,418 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
+// This class is an implementation detail of the Java PeerConnection API.
+// MediaCodec is thread-hostile so this class must be operated on a single
+// thread.
+public class MediaCodecVideoDecoder {
+ // This class is constructed, operated, and destroyed by its C++ incarnation,
+ // so the class and its methods have non-public visibility. The API this
+ // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
+ // possibly to minimize the amount of translation work necessary.
+
+ private static final String TAG = "MediaCodecVideoDecoder";
+
+ // Tracks webrtc::VideoCodecType.
+ public enum VideoCodecType {
+ VIDEO_CODEC_VP8,
+ VIDEO_CODEC_VP9,
+ VIDEO_CODEC_H264
+ }
+
+ private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
+ // Active running decoder instance. Set in initDecode() (called from native code)
+ // and reset to null in release() call.
+ private static MediaCodecVideoDecoder runningInstance = null;
+ private Thread mediaCodecThread;
+ private MediaCodec mediaCodec;
+ private ByteBuffer[] inputBuffers;
+ private ByteBuffer[] outputBuffers;
+ private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String H264_MIME_TYPE = "video/avc";
+ // List of supported HW VP8 decoders.
+ private static final String[] supportedVp8HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
+ // List of supported HW H.264 decoders.
+ private static final String[] supportedH264HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Intel." };
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ private static final int
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+ // Allowable color formats supported by codec - in order of preference.
+ private static final List<Integer> supportedColorList = Arrays.asList(
+ CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
+ private int colorFormat;
+ private int width;
+ private int height;
+ private int stride;
+ private int sliceHeight;
+ private boolean useSurface;
+ private int textureID = 0;
+ private SurfaceTexture surfaceTexture = null;
+ private Surface surface = null;
+ private EglBase eglBase;
+
+ private MediaCodecVideoDecoder() {
+ }
+
+ // Helper struct for findVp8Decoder() below.
+ private static class DecoderProperties {
+ public DecoderProperties(String codecName, int colorFormat) {
+ this.codecName = codecName;
+ this.colorFormat = colorFormat;
+ }
+ public final String codecName; // OpenMax component name for VP8 codec.
+ public final int colorFormat; // Color format supported by codec.
+ }
+
+ private static DecoderProperties findDecoder(
+ String mime, String[] supportedCodecPrefixes) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ return null; // MediaCodec.setParameters is missing.
+ }
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+ if (info.isEncoder()) {
+ continue;
+ }
+ String name = null;
+ for (String mimeType : info.getSupportedTypes()) {
+ if (mimeType.equals(mime)) {
+ name = info.getName();
+ break;
+ }
+ }
+ if (name == null) {
+ continue; // No HW support in this codec; try the next one.
+ }
+ Logging.v(TAG, "Found candidate decoder " + name);
+
+ // Check if this is supported decoder.
+ boolean supportedCodec = false;
+ for (String codecPrefix : supportedCodecPrefixes) {
+ if (name.startsWith(codecPrefix)) {
+ supportedCodec = true;
+ break;
+ }
+ }
+ if (!supportedCodec) {
+ continue;
+ }
+
+ // Check if codec supports either yuv420 or nv12.
+ CodecCapabilities capabilities =
+ info.getCapabilitiesForType(mime);
+ for (int colorFormat : capabilities.colorFormats) {
+ Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
+ }
+ for (int supportedColorFormat : supportedColorList) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ // Found supported HW decoder.
+ Logging.d(TAG, "Found target decoder " + name +
+ ". Color: 0x" + Integer.toHexString(codecColorFormat));
+ return new DecoderProperties(name, codecColorFormat);
+ }
+ }
+ }
+ }
+ return null; // No HW decoder.
+ }
+
+ public static boolean isVp8HwSupported() {
+ return findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
+ }
+
+ public static boolean isH264HwSupported() {
+ return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
+ }
+
+ public static void printStackTrace() {
+ if (runningInstance != null && runningInstance.mediaCodecThread != null) {
+ StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
+ if (mediaCodecStackTraces.length > 0) {
+ Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
+ for (StackTraceElement stackTrace : mediaCodecStackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ private void checkOnMediaCodecThread() throws IllegalStateException {
+ if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
+ throw new IllegalStateException(
+ "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
+ " but is now called on " + Thread.currentThread());
+ }
+ }
+
+ // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
+ private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
+ if (mediaCodecThread != null) {
+ throw new RuntimeException("Forgot to release()?");
+ }
+ useSurface = (sharedContext != null);
+ String mime = null;
+ String[] supportedCodecPrefixes = null;
+ if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+ mime = VP8_MIME_TYPE;
+ supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
+ } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+ mime = H264_MIME_TYPE;
+ supportedCodecPrefixes = supportedH264HwCodecPrefixes;
+ } else {
+ throw new RuntimeException("Non supported codec " + type);
+ }
+ DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
+ if (properties == null) {
+ throw new RuntimeException("Cannot find HW decoder for " + type);
+ }
+ Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
+ ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
+ ". Use Surface: " + useSurface);
+ if (sharedContext != null) {
+ Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
+ }
+ runningInstance = this; // Decoder is now running and can be queried for stack traces.
+ mediaCodecThread = Thread.currentThread();
+ try {
+ this.width = width;
+ this.height = height;
+ stride = width;
+ sliceHeight = height;
+
+ if (useSurface) {
+ // Create shared EGL context.
+ eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ // Create output surface
+ textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ Logging.d(TAG, "Video decoder TextureID = " + textureID);
+ surfaceTexture = new SurfaceTexture(textureID);
+ surface = new Surface(surfaceTexture);
+ }
+
+ MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
+ if (!useSurface) {
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+ }
+ Logging.d(TAG, " Format: " + format);
+ mediaCodec =
+ MediaCodecVideoEncoder.createByCodecName(properties.codecName);
+ if (mediaCodec == null) {
+ Logging.e(TAG, "Can not create media decoder");
+ return false;
+ }
+ mediaCodec.configure(format, surface, null, 0);
+ mediaCodec.start();
+ colorFormat = properties.colorFormat;
+ outputBuffers = mediaCodec.getOutputBuffers();
+ inputBuffers = mediaCodec.getInputBuffers();
+ Logging.d(TAG, "Input buffers: " + inputBuffers.length +
+ ". Output buffers: " + outputBuffers.length);
+ return true;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "initDecode failed", e);
+ return false;
+ }
+ }
+
+ private void release() {
+ Logging.d(TAG, "Java releaseDecoder");
+ checkOnMediaCodecThread();
+ try {
+ mediaCodec.stop();
+ mediaCodec.release();
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "release failed", e);
+ }
+ mediaCodec = null;
+ mediaCodecThread = null;
+ runningInstance = null;
+ if (useSurface) {
+ surface.release();
+ surface = null;
+ Logging.d(TAG, "Delete video decoder TextureID " + textureID);
+ GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
+ textureID = 0;
+ eglBase.release();
+ eglBase = null;
+ }
+ Logging.d(TAG, "Java releaseDecoder done");
+ }
+
+ // Dequeue an input buffer and return its index, -1 if no input buffer is
+ // available, or -2 if the codec is no longer operative.
+ private int dequeueInputBuffer() {
+ checkOnMediaCodecThread();
+ try {
+ return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueIntputBuffer failed", e);
+ return -2;
+ }
+ }
+
+ private boolean queueInputBuffer(
+ int inputBufferIndex, int size, long timestampUs) {
+ checkOnMediaCodecThread();
+ try {
+ inputBuffers[inputBufferIndex].position(0);
+ inputBuffers[inputBufferIndex].limit(size);
+ mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
+ return true;
+ }
+ catch (IllegalStateException e) {
+ Logging.e(TAG, "decode failed", e);
+ return false;
+ }
+ }
+
+ // Helper structs for dequeueOutputBuffer() below.
+ private static class DecodedByteBuffer {
+ public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) {
+ this.index = index;
+ this.offset = offset;
+ this.size = size;
+ this.presentationTimestampUs = presentationTimestampUs;
+ }
+
+ private final int index;
+ private final int offset;
+ private final int size;
+ private final long presentationTimestampUs;
+ }
+
+ private static class DecodedTextureBuffer {
+ private final int textureID;
+ private final long presentationTimestampUs;
+
+ public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
+ this.textureID = textureID;
+ this.presentationTimestampUs = presentationTimestampUs;
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or
+ // DecodedTexturebuffer depending on |useSurface| configuration.
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+ // upon codec error.
+ private Object dequeueOutputBuffer(int dequeueTimeoutUs)
+ throws IllegalStateException, MediaCodec.CodecException {
+ checkOnMediaCodecThread();
+ // Drain the decoder until receiving a decoded buffer or hitting
+ // MediaCodec.INFO_TRY_AGAIN_LATER.
+ final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ while (true) {
+ final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
+ switch (result) {
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return null;
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ outputBuffers = mediaCodec.getOutputBuffers();
+ Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
+ break;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ MediaFormat format = mediaCodec.getOutputFormat();
+ Logging.d(TAG, "Decoder format changed: " + format.toString());
+ width = format.getInteger(MediaFormat.KEY_WIDTH);
+ height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+ colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+ Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
+ if (!supportedColorList.contains(colorFormat)) {
+ throw new IllegalStateException("Non supported color format: " + colorFormat);
+ }
+ }
+ if (format.containsKey("stride")) {
+ stride = format.getInteger("stride");
+ }
+ if (format.containsKey("slice-height")) {
+ sliceHeight = format.getInteger("slice-height");
+ }
+ Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
+ stride = Math.max(width, stride);
+ sliceHeight = Math.max(height, sliceHeight);
+ break;
+ default:
+ // Output buffer decoded.
+ if (useSurface) {
+ mediaCodec.releaseOutputBuffer(result, true /* render */);
+ // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
+ // frame.
+ return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
+ } else {
+ return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
+ }
+ }
+ }
+ }
+
+ // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
+ // non-surface decoding.
+ // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
+ // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
+ // MediaCodec.CodecException upon codec error.
+ private void returnDecodedByteBuffer(int index)
+ throws IllegalStateException, MediaCodec.CodecException {
+ checkOnMediaCodecThread();
+ if (useSurface) {
+ throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding.");
+ }
+ mediaCodec.releaseOutputBuffer(index, false /* render */);
+ }
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
new file mode 100644
index 0000000000..f3f03c1d20
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -0,0 +1,447 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.os.Bundle;
+
+import org.webrtc.Logging;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.List;
+
+// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
+// This class is an implementation detail of the Java PeerConnection API.
+// MediaCodec is thread-hostile so this class must be operated on a single
+// thread.
+public class MediaCodecVideoEncoder {
+ // This class is constructed, operated, and destroyed by its C++ incarnation,
+ // so the class and its methods have non-public visibility. The API this
+ // class exposes aims to mimic the webrtc::VideoEncoder API as closely as
+ // possibly to minimize the amount of translation work necessary.
+
+ private static final String TAG = "MediaCodecVideoEncoder";
+
+ // Tracks webrtc::VideoCodecType.
+ public enum VideoCodecType {
+ VIDEO_CODEC_VP8,
+ VIDEO_CODEC_VP9,
+ VIDEO_CODEC_H264
+ }
+
+ private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
+ // Active running encoder instance. Set in initDecode() (called from native code)
+ // and reset to null in release() call.
+ private static MediaCodecVideoEncoder runningInstance = null;
+ private Thread mediaCodecThread;
+ private MediaCodec mediaCodec;
+ private ByteBuffer[] outputBuffers;
+ private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String H264_MIME_TYPE = "video/avc";
+ // List of supported HW VP8 codecs.
+ private static final String[] supportedVp8HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Intel." };
+ // List of supported HW H.264 codecs.
+ private static final String[] supportedH264HwCodecPrefixes =
+ {"OMX.qcom." };
+ // List of devices with poor H.264 encoder quality.
+ private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
+ // HW H.264 encoder on below devices has poor bitrate control - actual
+ // bitrates deviates a lot from the target value.
+ "SAMSUNG-SGH-I337",
+ "Nexus 7",
+ "Nexus 4"
+ };
+
+ // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
+ // in OMX_Video.h
+ private static final int VIDEO_ControlRateVariable = 1;
+ private static final int VIDEO_ControlRateConstant = 2;
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ private static final int
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+ // Allowable color formats supported by codec - in order of preference.
+ private static final int[] supportedColorList = {
+ CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
+ };
+ private int colorFormat;
+ // Video encoder type.
+ private VideoCodecType type;
+ // SPS and PPS NALs (Config frame) for H.264.
+ private ByteBuffer configData = null;
+
+ private MediaCodecVideoEncoder() {
+ }
+
+ // Helper struct for findHwEncoder() below.
+ private static class EncoderProperties {
+ public EncoderProperties(String codecName, int colorFormat) {
+ this.codecName = codecName;
+ this.colorFormat = colorFormat;
+ }
+ public final String codecName; // OpenMax component name for HW codec.
+ public final int colorFormat; // Color format supported by codec.
+ }
+
+ private static EncoderProperties findHwEncoder(
+ String mime, String[] supportedHwCodecPrefixes) {
+ // MediaCodec.setParameters is missing for JB and below, so bitrate
+ // can not be adjusted dynamically.
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ return null;
+ }
+
+ // Check if device is in H.264 exception list.
+ if (mime.equals(H264_MIME_TYPE)) {
+ List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
+ if (exceptionModels.contains(Build.MODEL)) {
+ Logging.w(TAG, "Model: " + Build.MODEL +
+ " has black listed H.264 encoder.");
+ return null;
+ }
+ }
+
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+ if (!info.isEncoder()) {
+ continue;
+ }
+ String name = null;
+ for (String mimeType : info.getSupportedTypes()) {
+ if (mimeType.equals(mime)) {
+ name = info.getName();
+ break;
+ }
+ }
+ if (name == null) {
+ continue; // No HW support in this codec; try the next one.
+ }
+ Logging.v(TAG, "Found candidate encoder " + name);
+
+ // Check if this is supported HW encoder.
+ boolean supportedCodec = false;
+ for (String hwCodecPrefix : supportedHwCodecPrefixes) {
+ if (name.startsWith(hwCodecPrefix)) {
+ supportedCodec = true;
+ break;
+ }
+ }
+ if (!supportedCodec) {
+ continue;
+ }
+
+ CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
+ for (int colorFormat : capabilities.colorFormats) {
+ Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
+ }
+
+ // Check if codec supports either yuv420 or nv12.
+ for (int supportedColorFormat : supportedColorList) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ // Found supported HW encoder.
+ Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name +
+ ". Color: 0x" + Integer.toHexString(codecColorFormat));
+ return new EncoderProperties(name, codecColorFormat);
+ }
+ }
+ }
+ }
+ return null; // No HW VP8 encoder.
+ }
+
+ public static boolean isVp8HwSupported() {
+ return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
+ }
+
+ public static boolean isH264HwSupported() {
+ return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
+ }
+
+ private void checkOnMediaCodecThread() {
+ if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
+ throw new RuntimeException(
+ "MediaCodecVideoEncoder previously operated on " + mediaCodecThread +
+ " but is now called on " + Thread.currentThread());
+ }
+ }
+
+ public static void printStackTrace() {
+ if (runningInstance != null && runningInstance.mediaCodecThread != null) {
+ StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
+ if (mediaCodecStackTraces.length > 0) {
+ Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
+ for (StackTraceElement stackTrace : mediaCodecStackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ static MediaCodec createByCodecName(String codecName) {
+ try {
+ // In the L-SDK this call can throw IOException so in order to work in
+ // both cases catch an exception.
+ return MediaCodec.createByCodecName(codecName);
+ } catch (Exception e) {
+ return null;
+ }
+ }
+
+ // Return the array of input buffers, or null on failure.
+ private ByteBuffer[] initEncode(
+ VideoCodecType type, int width, int height, int kbps, int fps) {
+ Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
+ ". @ " + kbps + " kbps. Fps: " + fps +
+ ". Color: 0x" + Integer.toHexString(colorFormat));
+ if (mediaCodecThread != null) {
+ throw new RuntimeException("Forgot to release()?");
+ }
+ this.type = type;
+ EncoderProperties properties = null;
+ String mime = null;
+ int keyFrameIntervalSec = 0;
+ if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+ mime = VP8_MIME_TYPE;
+ properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes);
+ keyFrameIntervalSec = 100;
+ } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+ mime = H264_MIME_TYPE;
+ properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes);
+ keyFrameIntervalSec = 20;
+ }
+ if (properties == null) {
+ throw new RuntimeException("Can not find HW encoder for " + type);
+ }
+ runningInstance = this; // Encoder is now running and can be queried for stack traces.
+ mediaCodecThread = Thread.currentThread();
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * kbps);
+ format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
+ Logging.d(TAG, " Format: " + format);
+ mediaCodec = createByCodecName(properties.codecName);
+ if (mediaCodec == null) {
+ Logging.e(TAG, "Can not create media encoder");
+ return null;
+ }
+ mediaCodec.configure(
+ format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+ mediaCodec.start();
+ colorFormat = properties.colorFormat;
+ outputBuffers = mediaCodec.getOutputBuffers();
+ ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
+ Logging.d(TAG, "Input buffers: " + inputBuffers.length +
+ ". Output buffers: " + outputBuffers.length);
+ return inputBuffers;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "initEncode failed", e);
+ return null;
+ }
+ }
+
+ private boolean encode(
+ boolean isKeyframe, int inputBuffer, int size,
+ long presentationTimestampUs) {
+ checkOnMediaCodecThread();
+ try {
+ if (isKeyframe) {
+ // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
+ // indicate this in queueInputBuffer() below and guarantee _this_ frame
+ // be encoded as a key frame, but sadly that flag is ignored. Instead,
+ // we request a key frame "soon".
+ Logging.d(TAG, "Sync frame request");
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mediaCodec.setParameters(b);
+ }
+ mediaCodec.queueInputBuffer(
+ inputBuffer, 0, size, presentationTimestampUs, 0);
+ return true;
+ }
+ catch (IllegalStateException e) {
+ Logging.e(TAG, "encode failed", e);
+ return false;
+ }
+ }
+
+ private void release() {
+ Logging.d(TAG, "Java releaseEncoder");
+ checkOnMediaCodecThread();
+ try {
+ mediaCodec.stop();
+ mediaCodec.release();
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "release failed", e);
+ }
+ mediaCodec = null;
+ mediaCodecThread = null;
+ runningInstance = null;
+ Logging.d(TAG, "Java releaseEncoder done");
+ }
+
+ private boolean setRates(int kbps, int frameRateIgnored) {
+ // frameRate argument is ignored - HW encoder is supposed to use
+ // video frame timestamps for bit allocation.
+ checkOnMediaCodecThread();
+ Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + frameRateIgnored);
+ try {
+ Bundle params = new Bundle();
+ params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, 1000 * kbps);
+ mediaCodec.setParameters(params);
+ return true;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "setRates failed", e);
+ return false;
+ }
+ }
+
+ // Dequeue an input buffer and return its index, -1 if no input buffer is
+ // available, or -2 if the codec is no longer operative.
+ private int dequeueInputBuffer() {
+ checkOnMediaCodecThread();
+ try {
+ return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueIntputBuffer failed", e);
+ return -2;
+ }
+ }
+
+ // Helper struct for dequeueOutputBuffer() below.
+ private static class OutputBufferInfo {
+ public OutputBufferInfo(
+ int index, ByteBuffer buffer,
+ boolean isKeyFrame, long presentationTimestampUs) {
+ this.index = index;
+ this.buffer = buffer;
+ this.isKeyFrame = isKeyFrame;
+ this.presentationTimestampUs = presentationTimestampUs;
+ }
+
+ private final int index;
+ private final ByteBuffer buffer;
+ private final boolean isKeyFrame;
+ private final long presentationTimestampUs;
+ }
+
+ // Dequeue and return an output buffer, or null if no output is ready. Return
+ // a fake OutputBufferInfo with index -1 if the codec is no longer operable.
+ private OutputBufferInfo dequeueOutputBuffer() {
+ checkOnMediaCodecThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+ // Check if this is config frame and save configuration data.
+ if (result >= 0) {
+ boolean isConfigFrame =
+ (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
+ if (isConfigFrame) {
+ Logging.d(TAG, "Config frame generated. Offset: " + info.offset +
+ ". Size: " + info.size);
+ configData = ByteBuffer.allocateDirect(info.size);
+ outputBuffers[result].position(info.offset);
+ outputBuffers[result].limit(info.offset + info.size);
+ configData.put(outputBuffers[result]);
+ // Release buffer back.
+ mediaCodec.releaseOutputBuffer(result, false);
+ // Query next output.
+ result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+ }
+ }
+ if (result >= 0) {
+ // MediaCodec doesn't care about Buffer position/remaining/etc so we can
+ // mess with them to get a slice and avoid having to pass extra
+ // (BufferInfo-related) parameters back to C++.
+ ByteBuffer outputBuffer = outputBuffers[result].duplicate();
+ outputBuffer.position(info.offset);
+ outputBuffer.limit(info.offset + info.size);
+ // Check key frame flag.
+ boolean isKeyFrame =
+ (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
+ if (isKeyFrame) {
+ Logging.d(TAG, "Sync frame generated");
+ }
+ if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
+ Logging.d(TAG, "Appending config frame of size " + configData.capacity() +
+ " to output buffer with offset " + info.offset + ", size " +
+ info.size);
+ // For H.264 key frame append SPS and PPS NALs at the start
+ ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
+ configData.capacity() + info.size);
+ configData.rewind();
+ keyFrameBuffer.put(configData);
+ keyFrameBuffer.put(outputBuffer);
+ keyFrameBuffer.position(0);
+ return new OutputBufferInfo(result, keyFrameBuffer,
+ isKeyFrame, info.presentationTimeUs);
+ } else {
+ return new OutputBufferInfo(result, outputBuffer.slice(),
+ isKeyFrame, info.presentationTimeUs);
+ }
+ } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ outputBuffers = mediaCodec.getOutputBuffers();
+ return dequeueOutputBuffer();
+ } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ return dequeueOutputBuffer();
+ } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
+ return null;
+ }
+ throw new RuntimeException("dequeueOutputBuffer: " + result);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueOutputBuffer failed", e);
+ return new OutputBufferInfo(-1, null, false, -1);
+ }
+ }
+
+ // Release a dequeued output buffer back to the codec for re-use. Return
+ // false if the codec is no longer operable.
+ private boolean releaseOutputBuffer(int index) {
+ checkOnMediaCodecThread();
+ try {
+ mediaCodec.releaseOutputBuffer(index, false);
+ return true;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "releaseOutputBuffer failed", e);
+ return false;
+ }
+ }
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaConstraints.java b/talk/app/webrtc/java/src/org/webrtc/MediaConstraints.java
new file mode 100644
index 0000000000..730df3553d
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaConstraints.java
@@ -0,0 +1,101 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Description of media constraints for {@code MediaStream} and
+ * {@code PeerConnection}.
+ */
+public class MediaConstraints {
+ /** Simple String key/value pair. */
+ public static class KeyValuePair {
+ private final String key;
+ private final String value;
+
+ public KeyValuePair(String key, String value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public String toString() {
+ return key + ": " + value;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+ KeyValuePair that = (KeyValuePair)other;
+ return key.equals(that.key) && value.equals(that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return key.hashCode() + value.hashCode();
+ }
+ }
+
+ public final List<KeyValuePair> mandatory;
+ public final List<KeyValuePair> optional;
+
+ public MediaConstraints() {
+ mandatory = new LinkedList<KeyValuePair>();
+ optional = new LinkedList<KeyValuePair>();
+ }
+
+ private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
+ StringBuilder builder = new StringBuilder("[");
+ for (KeyValuePair pair : list) {
+ if (builder.length() > 1) {
+ builder.append(", ");
+ }
+ builder.append(pair.toString());
+ }
+ return builder.append("]").toString();
+ }
+
+ public String toString() {
+ return "mandatory: " + stringifyKeyValuePairList(mandatory) +
+ ", optional: " + stringifyKeyValuePairList(optional);
+ }
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaSource.java b/talk/app/webrtc/java/src/org/webrtc/MediaSource.java
new file mode 100644
index 0000000000..d79b4628eb
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaSource.java
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaSourceInterface. */
+public class MediaSource {
+ /** Tracks MediaSourceInterface.SourceState */
+ public enum State {
+ INITIALIZING, LIVE, ENDED, MUTED
+ }
+
+ final long nativeSource; // Package-protected for PeerConnectionFactory.
+
+ public MediaSource(long nativeSource) {
+ this.nativeSource = nativeSource;
+ }
+
+ public State state() {
+ return nativeState(nativeSource);
+ }
+
+ public void dispose() {
+ free(nativeSource);
+ }
+
+ private static native State nativeState(long pointer);
+
+ private static native void free(long nativeSource);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaStream.java b/talk/app/webrtc/java/src/org/webrtc/MediaStream.java
new file mode 100644
index 0000000000..be00f13e67
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaStream.java
@@ -0,0 +1,134 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+
+/** Java wrapper for a C++ MediaStreamInterface. */
+public class MediaStream {
+ public final LinkedList<AudioTrack> audioTracks;
+ public final LinkedList<VideoTrack> videoTracks;
+ public final LinkedList<VideoTrack> preservedVideoTracks;
+ // Package-protected for PeerConnection.
+ final long nativeStream;
+
+ public MediaStream(long nativeStream) {
+ audioTracks = new LinkedList<AudioTrack>();
+ videoTracks = new LinkedList<VideoTrack>();
+ preservedVideoTracks = new LinkedList<VideoTrack>();
+ this.nativeStream = nativeStream;
+ }
+
+ public boolean addTrack(AudioTrack track) {
+ if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) {
+ audioTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean addTrack(VideoTrack track) {
+ if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
+ videoTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ // Tracks added in addTrack() call will be auto released once MediaStream.dispose()
+ // is called. If video track need to be preserved after MediaStream is destroyed it
+ // should be added to MediaStream using addPreservedTrack() call.
+ public boolean addPreservedTrack(VideoTrack track) {
+ if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
+ preservedVideoTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean removeTrack(AudioTrack track) {
+ if (nativeRemoveAudioTrack(nativeStream, track.nativeTrack)) {
+ audioTracks.remove(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean removeTrack(VideoTrack track) {
+ if (nativeRemoveVideoTrack(nativeStream, track.nativeTrack)) {
+ videoTracks.remove(track);
+ preservedVideoTracks.remove(track);
+ return true;
+ }
+ return false;
+ }
+
+ public void dispose() {
+ // Remove and release previously added audio and video tracks.
+ while (!audioTracks.isEmpty()) {
+ AudioTrack track = audioTracks.getFirst();
+ removeTrack(track);
+ track.dispose();
+ }
+ while (!videoTracks.isEmpty()) {
+ VideoTrack track = videoTracks.getFirst();
+ removeTrack(track);
+ track.dispose();
+ }
+ // Remove, but do not release preserved video tracks.
+ while (!preservedVideoTracks.isEmpty()) {
+ removeTrack(preservedVideoTracks.getFirst());
+ }
+ free(nativeStream);
+ }
+
+ public String label() {
+ return nativeLabel(nativeStream);
+ }
+
+ public String toString() {
+ return "[" + label() + ":A=" + audioTracks.size() +
+ ":V=" + videoTracks.size() + "]";
+ }
+
+ private static native boolean nativeAddAudioTrack(
+ long nativeStream, long nativeAudioTrack);
+
+ private static native boolean nativeAddVideoTrack(
+ long nativeStream, long nativeVideoTrack);
+
+ private static native boolean nativeRemoveAudioTrack(
+ long nativeStream, long nativeAudioTrack);
+
+ private static native boolean nativeRemoveVideoTrack(
+ long nativeStream, long nativeVideoTrack);
+
+ private static native String nativeLabel(long nativeStream);
+
+ private static native void free(long nativeStream);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaStreamTrack.java b/talk/app/webrtc/java/src/org/webrtc/MediaStreamTrack.java
new file mode 100644
index 0000000000..3965069b24
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaStreamTrack.java
@@ -0,0 +1,86 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaStreamTrackInterface. */
+public class MediaStreamTrack {
+ /** Tracks MediaStreamTrackInterface.TrackState */
+ public enum State {
+ INITIALIZING, LIVE, ENDED, FAILED
+ }
+
+ final long nativeTrack;
+
+ public MediaStreamTrack(long nativeTrack) {
+ this.nativeTrack = nativeTrack;
+ }
+
+ public String id() {
+ return nativeId(nativeTrack);
+ }
+
+ public String kind() {
+ return nativeKind(nativeTrack);
+ }
+
+ public boolean enabled() {
+ return nativeEnabled(nativeTrack);
+ }
+
+ public boolean setEnabled(boolean enable) {
+ return nativeSetEnabled(nativeTrack, enable);
+ }
+
+ public State state() {
+ return nativeState(nativeTrack);
+ }
+
+ public boolean setState(State newState) {
+ return nativeSetState(nativeTrack, newState.ordinal());
+ }
+
+ public void dispose() {
+ free(nativeTrack);
+ }
+
+ private static native String nativeId(long nativeTrack);
+
+ private static native String nativeKind(long nativeTrack);
+
+ private static native boolean nativeEnabled(long nativeTrack);
+
+ private static native boolean nativeSetEnabled(
+ long nativeTrack, boolean enabled);
+
+ private static native State nativeState(long nativeTrack);
+
+ private static native boolean nativeSetState(
+ long nativeTrack, int newState);
+
+ private static native void free(long nativeTrack);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java b/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java
new file mode 100644
index 0000000000..50023001d7
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java
@@ -0,0 +1,294 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Java-land version of the PeerConnection APIs; wraps the C++ API
+ * http://www.webrtc.org/reference/native-apis, which in turn is inspired by the
+ * JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
+ * http://www.w3.org/TR/mediacapture-streams/
+ */
+public class PeerConnection {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+
+ /** Tracks PeerConnectionInterface::IceGatheringState */
+ public enum IceGatheringState { NEW, GATHERING, COMPLETE };
+
+
+ /** Tracks PeerConnectionInterface::IceConnectionState */
+ public enum IceConnectionState {
+ NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED
+ };
+
+ /** Tracks PeerConnectionInterface::SignalingState */
+ public enum SignalingState {
+ STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER,
+ HAVE_REMOTE_PRANSWER, CLOSED
+ };
+
+ /** Java version of PeerConnectionObserver. */
+ public static interface Observer {
+ /** Triggered when the SignalingState changes. */
+ public void onSignalingChange(SignalingState newState);
+
+ /** Triggered when the IceConnectionState changes. */
+ public void onIceConnectionChange(IceConnectionState newState);
+
+ /** Triggered when the ICE connection receiving status changes. */
+ public void onIceConnectionReceivingChange(boolean receiving);
+
+ /** Triggered when the IceGatheringState changes. */
+ public void onIceGatheringChange(IceGatheringState newState);
+
+ /** Triggered when a new ICE candidate has been found. */
+ public void onIceCandidate(IceCandidate candidate);
+
+ /** Triggered when media is received on a new stream from remote peer. */
+ public void onAddStream(MediaStream stream);
+
+ /** Triggered when a remote peer close a stream. */
+ public void onRemoveStream(MediaStream stream);
+
+ /** Triggered when a remote peer opens a DataChannel. */
+ public void onDataChannel(DataChannel dataChannel);
+
+ /** Triggered when renegotiation is necessary. */
+ public void onRenegotiationNeeded();
+ }
+
+ /** Java version of PeerConnectionInterface.IceServer. */
+ public static class IceServer {
+ public final String uri;
+ public final String username;
+ public final String password;
+
+ /** Convenience constructor for STUN servers. */
+ public IceServer(String uri) {
+ this(uri, "", "");
+ }
+
+ public IceServer(String uri, String username, String password) {
+ this.uri = uri;
+ this.username = username;
+ this.password = password;
+ }
+
+ public String toString() {
+ return uri + "[" + username + ":" + password + "]";
+ }
+ }
+
+ /** Java version of PeerConnectionInterface.IceTransportsType */
+ public enum IceTransportsType {
+ NONE, RELAY, NOHOST, ALL
+ };
+
+ /** Java version of PeerConnectionInterface.BundlePolicy */
+ public enum BundlePolicy {
+ BALANCED, MAXBUNDLE, MAXCOMPAT
+ };
+
+ /** Java version of PeerConnectionInterface.RtcpMuxPolicy */
+ public enum RtcpMuxPolicy {
+ NEGOTIATE, REQUIRE
+ };
+
+ /** Java version of PeerConnectionInterface.TcpCandidatePolicy */
+ public enum TcpCandidatePolicy {
+ ENABLED, DISABLED
+ };
+
+ /** Java version of rtc::KeyType */
+ public enum KeyType {
+ RSA, ECDSA
+ }
+
+ /** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
+ public enum ContinualGatheringPolicy {
+ GATHER_ONCE, GATHER_CONTINUALLY
+ }
+
+ /** Java version of PeerConnectionInterface.RTCConfiguration */
+ public static class RTCConfiguration {
+ public IceTransportsType iceTransportsType;
+ public List<IceServer> iceServers;
+ public BundlePolicy bundlePolicy;
+ public RtcpMuxPolicy rtcpMuxPolicy;
+ public TcpCandidatePolicy tcpCandidatePolicy;
+ public int audioJitterBufferMaxPackets;
+ public boolean audioJitterBufferFastAccelerate;
+ public int iceConnectionReceivingTimeout;
+ public KeyType keyType;
+ public ContinualGatheringPolicy continualGatheringPolicy;
+
+ public RTCConfiguration(List<IceServer> iceServers) {
+ iceTransportsType = IceTransportsType.ALL;
+ bundlePolicy = BundlePolicy.BALANCED;
+ rtcpMuxPolicy = RtcpMuxPolicy.NEGOTIATE;
+ tcpCandidatePolicy = TcpCandidatePolicy.ENABLED;
+ this.iceServers = iceServers;
+ audioJitterBufferMaxPackets = 50;
+ audioJitterBufferFastAccelerate = false;
+ iceConnectionReceivingTimeout = -1;
+ keyType = KeyType.ECDSA;
+ continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
+ }
+ };
+
+ private final List<MediaStream> localStreams;
+ private final long nativePeerConnection;
+ private final long nativeObserver;
+ private List<RtpSender> senders;
+ private List<RtpReceiver> receivers;
+
+ PeerConnection(long nativePeerConnection, long nativeObserver) {
+ this.nativePeerConnection = nativePeerConnection;
+ this.nativeObserver = nativeObserver;
+ localStreams = new LinkedList<MediaStream>();
+ senders = new LinkedList<RtpSender>();
+ receivers = new LinkedList<RtpReceiver>();
+ }
+
+ // JsepInterface.
+ public native SessionDescription getLocalDescription();
+
+ public native SessionDescription getRemoteDescription();
+
+ public native DataChannel createDataChannel(
+ String label, DataChannel.Init init);
+
+ public native void createOffer(
+ SdpObserver observer, MediaConstraints constraints);
+
+ public native void createAnswer(
+ SdpObserver observer, MediaConstraints constraints);
+
+ public native void setLocalDescription(
+ SdpObserver observer, SessionDescription sdp);
+
+ public native void setRemoteDescription(
+ SdpObserver observer, SessionDescription sdp);
+
+ public native boolean setConfiguration(RTCConfiguration config);
+
+ public boolean addIceCandidate(IceCandidate candidate) {
+ return nativeAddIceCandidate(
+ candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
+ }
+
+ public boolean addStream(MediaStream stream) {
+ boolean ret = nativeAddLocalStream(stream.nativeStream);
+ if (!ret) {
+ return false;
+ }
+ localStreams.add(stream);
+ return true;
+ }
+
+ public void removeStream(MediaStream stream) {
+ nativeRemoveLocalStream(stream.nativeStream);
+ localStreams.remove(stream);
+ }
+
+ // Note that calling getSenders will dispose of the senders previously
+ // returned (and same goes for getReceivers).
+ public List<RtpSender> getSenders() {
+ for (RtpSender sender : senders) {
+ sender.dispose();
+ }
+ senders = nativeGetSenders();
+ return Collections.unmodifiableList(senders);
+ }
+
+ public List<RtpReceiver> getReceivers() {
+ for (RtpReceiver receiver : receivers) {
+ receiver.dispose();
+ }
+ receivers = nativeGetReceivers();
+ return Collections.unmodifiableList(receivers);
+ }
+
+ public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
+ return nativeGetStats(observer, (track == null) ? 0 : track.nativeTrack);
+ }
+
+ // TODO(fischman): add support for DTMF-related methods once that API
+ // stabilizes.
+ public native SignalingState signalingState();
+
+ public native IceConnectionState iceConnectionState();
+
+ public native IceGatheringState iceGatheringState();
+
+ public native void close();
+
+ public void dispose() {
+ close();
+ for (MediaStream stream : localStreams) {
+ nativeRemoveLocalStream(stream.nativeStream);
+ stream.dispose();
+ }
+ localStreams.clear();
+ for (RtpSender sender : senders) {
+ sender.dispose();
+ }
+ senders.clear();
+ for (RtpReceiver receiver : receivers) {
+ receiver.dispose();
+ }
+ receivers.clear();
+ freePeerConnection(nativePeerConnection);
+ freeObserver(nativeObserver);
+ }
+
+ private static native void freePeerConnection(long nativePeerConnection);
+
+ private static native void freeObserver(long nativeObserver);
+
+ private native boolean nativeAddIceCandidate(
+ String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
+
+ private native boolean nativeAddLocalStream(long nativeStream);
+
+ private native void nativeRemoveLocalStream(long nativeStream);
+
+ private native boolean nativeGetStats(
+ StatsObserver observer, long nativeTrack);
+
+ private native List<RtpSender> nativeGetSenders();
+
+ private native List<RtpReceiver> nativeGetReceivers();
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java b/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
new file mode 100644
index 0000000000..83999ece98
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
@@ -0,0 +1,212 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+import java.util.List;
+
+/**
+ * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to
+ * the PeerConnection API for clients.
+ */
+public class PeerConnectionFactory {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+
+ private static final String TAG = "PeerConnectionFactory";
+ private final long nativeFactory;
+ private static Thread workerThread;
+ private static Thread signalingThread;
+
+ public static class Options {
+ // Keep in sync with webrtc/base/network.h!
+ static final int ADAPTER_TYPE_UNKNOWN = 0;
+ static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
+ static final int ADAPTER_TYPE_WIFI = 1 << 1;
+ static final int ADAPTER_TYPE_CELLULAR = 1 << 2;
+ static final int ADAPTER_TYPE_VPN = 1 << 3;
+ static final int ADAPTER_TYPE_LOOPBACK = 1 << 4;
+
+ public int networkIgnoreMask;
+ public boolean disableEncryption;
+ public boolean disableNetworkMonitor;
+ }
+
+ // |context| is an android.content.Context object, but we keep it untyped here
+ // to allow building on non-Android platforms.
+ // Callers may specify either |initializeAudio| or |initializeVideo| as false
+ // to skip initializing the respective engine (and avoid the need for the
+ // respective permissions).
+ // |renderEGLContext| can be provided to suport HW video decoding to
+ // texture and will be used to create a shared EGL context on video
+ // decoding thread.
+ public static native boolean initializeAndroidGlobals(
+ Object context, boolean initializeAudio, boolean initializeVideo,
+ boolean videoHwAcceleration);
+
+ // Field trial initialization. Must be called before PeerConnectionFactory
+ // is created.
+ public static native void initializeFieldTrials(String fieldTrialsInitString);
+
+ public PeerConnectionFactory() {
+ nativeFactory = nativeCreatePeerConnectionFactory();
+ if (nativeFactory == 0) {
+ throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
+ }
+ }
+
+ public PeerConnection createPeerConnection(
+ PeerConnection.RTCConfiguration rtcConfig,
+ MediaConstraints constraints,
+ PeerConnection.Observer observer) {
+ long nativeObserver = nativeCreateObserver(observer);
+ if (nativeObserver == 0) {
+ return null;
+ }
+ long nativePeerConnection = nativeCreatePeerConnection(
+ nativeFactory, rtcConfig, constraints, nativeObserver);
+ if (nativePeerConnection == 0) {
+ return null;
+ }
+ return new PeerConnection(nativePeerConnection, nativeObserver);
+ }
+
+ public PeerConnection createPeerConnection(
+ List<PeerConnection.IceServer> iceServers,
+ MediaConstraints constraints,
+ PeerConnection.Observer observer) {
+ PeerConnection.RTCConfiguration rtcConfig =
+ new PeerConnection.RTCConfiguration(iceServers);
+ return createPeerConnection(rtcConfig, constraints, observer);
+ }
+
+ public MediaStream createLocalMediaStream(String label) {
+ return new MediaStream(
+ nativeCreateLocalMediaStream(nativeFactory, label));
+ }
+
+ public VideoSource createVideoSource(
+ VideoCapturer capturer, MediaConstraints constraints) {
+ return new VideoSource(nativeCreateVideoSource(
+ nativeFactory, capturer.takeNativeVideoCapturer(), constraints));
+ }
+
+ public VideoTrack createVideoTrack(String id, VideoSource source) {
+ return new VideoTrack(nativeCreateVideoTrack(
+ nativeFactory, id, source.nativeSource));
+ }
+
+ public AudioSource createAudioSource(MediaConstraints constraints) {
+ return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints));
+ }
+
+ public AudioTrack createAudioTrack(String id, AudioSource source) {
+ return new AudioTrack(nativeCreateAudioTrack(
+ nativeFactory, id, source.nativeSource));
+ }
+
+ public void setOptions(Options options) {
+ nativeSetOptions(nativeFactory, options);
+ }
+
+ public void setVideoHwAccelerationOptions(Object renderEGLContext) {
+ nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext);
+ }
+
+ public void dispose() {
+ nativeFreeFactory(nativeFactory);
+ signalingThread = null;
+ workerThread = null;
+ }
+
+ public void threadsCallbacks() {
+ nativeThreadsCallbacks(nativeFactory);
+ }
+
+ private static void printStackTrace(Thread thread, String threadName) {
+ if (thread != null) {
+ StackTraceElement[] stackTraces = thread.getStackTrace();
+ if (stackTraces.length > 0) {
+ Logging.d(TAG, threadName + " stacks trace:");
+ for (StackTraceElement stackTrace : stackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ public static void printStackTraces() {
+ printStackTrace(workerThread, "Worker thread");
+ printStackTrace(signalingThread, "Signaling thread");
+ }
+
+ private static void onWorkerThreadReady() {
+ workerThread = Thread.currentThread();
+ Logging.d(TAG, "onWorkerThreadReady");
+ }
+
+ private static void onSignalingThreadReady() {
+ signalingThread = Thread.currentThread();
+ Logging.d(TAG, "onSignalingThreadReady");
+ }
+
+ private static native long nativeCreatePeerConnectionFactory();
+
+ private static native long nativeCreateObserver(
+ PeerConnection.Observer observer);
+
+ private static native long nativeCreatePeerConnection(
+ long nativeFactory, PeerConnection.RTCConfiguration rtcConfig,
+ MediaConstraints constraints, long nativeObserver);
+
+ private static native long nativeCreateLocalMediaStream(
+ long nativeFactory, String label);
+
+ private static native long nativeCreateVideoSource(
+ long nativeFactory, long nativeVideoCapturer,
+ MediaConstraints constraints);
+
+ private static native long nativeCreateVideoTrack(
+ long nativeFactory, String id, long nativeVideoSource);
+
+ private static native long nativeCreateAudioSource(
+ long nativeFactory, MediaConstraints constraints);
+
+ private static native long nativeCreateAudioTrack(
+ long nativeFactory, String id, long nativeSource);
+
+ public native void nativeSetOptions(long nativeFactory, Options options);
+
+ private static native void nativeSetVideoHwAccelerationOptions(
+ long nativeFactory, Object renderEGLContext);
+
+ private static native void nativeThreadsCallbacks(long nativeFactory);
+
+ private static native void nativeFreeFactory(long nativeFactory);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/RtpReceiver.java b/talk/app/webrtc/java/src/org/webrtc/RtpReceiver.java
new file mode 100644
index 0000000000..597f441334
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/RtpReceiver.java
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ RtpReceiverInterface. */
+public class RtpReceiver {
+ final long nativeRtpReceiver;
+
+ private MediaStreamTrack cachedTrack;
+
+ public RtpReceiver(long nativeRtpReceiver) {
+ this.nativeRtpReceiver = nativeRtpReceiver;
+ long track = nativeGetTrack(nativeRtpReceiver);
+ // We can assume that an RtpReceiver always has an associated track.
+ cachedTrack = new MediaStreamTrack(track);
+ }
+
+ public MediaStreamTrack track() {
+ return cachedTrack;
+ }
+
+ public String id() {
+ return nativeId(nativeRtpReceiver);
+ }
+
+ public void dispose() {
+ cachedTrack.dispose();
+ free(nativeRtpReceiver);
+ }
+
+ // This should increment the reference count of the track.
+ // Will be released in dispose().
+ private static native long nativeGetTrack(long nativeRtpReceiver);
+
+ private static native String nativeId(long nativeRtpReceiver);
+
+ private static native void free(long nativeRtpReceiver);
+};
diff --git a/talk/app/webrtc/java/src/org/webrtc/RtpSender.java b/talk/app/webrtc/java/src/org/webrtc/RtpSender.java
new file mode 100644
index 0000000000..37357c0657
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/RtpSender.java
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ RtpSenderInterface. */
+public class RtpSender {
+ final long nativeRtpSender;
+
+ private MediaStreamTrack cachedTrack;
+
+ public RtpSender(long nativeRtpSender) {
+ this.nativeRtpSender = nativeRtpSender;
+ long track = nativeGetTrack(nativeRtpSender);
+ // It may be possible for an RtpSender to be created without a track.
+ cachedTrack = (track == 0) ? null : new MediaStreamTrack(track);
+ }
+
+ // NOTE: This should not be called with a track that's already used by
+ // another RtpSender, because then it would be double-disposed.
+ public void setTrack(MediaStreamTrack track) {
+ if (cachedTrack != null) {
+ cachedTrack.dispose();
+ }
+ cachedTrack = track;
+ nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack);
+ }
+
+ public MediaStreamTrack track() {
+ return cachedTrack;
+ }
+
+ public String id() {
+ return nativeId(nativeRtpSender);
+ }
+
+ public void dispose() {
+ if (cachedTrack != null) {
+ cachedTrack.dispose();
+ }
+ free(nativeRtpSender);
+ }
+
+ private static native void nativeSetTrack(long nativeRtpSender,
+ long nativeTrack);
+
+ // This should increment the reference count of the track.
+ // Will be released in dispose() or setTrack().
+ private static native long nativeGetTrack(long nativeRtpSender);
+
+ private static native String nativeId(long nativeRtpSender);
+
+ private static native void free(long nativeRtpSender);
+}
+;
diff --git a/talk/app/webrtc/java/src/org/webrtc/SdpObserver.java b/talk/app/webrtc/java/src/org/webrtc/SdpObserver.java
new file mode 100644
index 0000000000..779bf1b346
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/SdpObserver.java
@@ -0,0 +1,43 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Interface for observing SDP-related events. */
+public interface SdpObserver {
+ /** Called on success of Create{Offer,Answer}(). */
+ public void onCreateSuccess(SessionDescription sdp);
+
+ /** Called on success of Set{Local,Remote}Description(). */
+ public void onSetSuccess();
+
+ /** Called on error of Create{Offer,Answer}(). */
+ public void onCreateFailure(String error);
+
+ /** Called on error of Set{Local,Remote}Description(). */
+ public void onSetFailure(String error);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/SessionDescription.java b/talk/app/webrtc/java/src/org/webrtc/SessionDescription.java
new file mode 100644
index 0000000000..c3dfcd4107
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/SessionDescription.java
@@ -0,0 +1,57 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+/**
+ * Description of an RFC 4566 Session.
+ * SDPs are passed as serialized Strings in Java-land and are materialized
+ * to SessionDescriptionInterface as appropriate in the JNI layer.
+ */
+public class SessionDescription {
+ /** Java-land enum version of SessionDescriptionInterface's type() string. */
+ public static enum Type {
+ OFFER, PRANSWER, ANSWER;
+
+ public String canonicalForm() {
+ return name().toLowerCase();
+ }
+
+ public static Type fromCanonicalForm(String canonical) {
+ return Type.valueOf(Type.class, canonical.toUpperCase());
+ }
+ }
+
+ public final Type type;
+ public final String description;
+
+ public SessionDescription(Type type, String description) {
+ this.type = type;
+ this.description = description;
+ }
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/StatsObserver.java b/talk/app/webrtc/java/src/org/webrtc/StatsObserver.java
new file mode 100644
index 0000000000..99223ad059
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/StatsObserver.java
@@ -0,0 +1,34 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Interface for observing Stats reports (see webrtc::StatsObservers). */
+public interface StatsObserver {
+ /** Called when the reports are ready.*/
+ public void onComplete(StatsReport[] reports);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/StatsReport.java b/talk/app/webrtc/java/src/org/webrtc/StatsReport.java
new file mode 100644
index 0000000000..6e325439da
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/StatsReport.java
@@ -0,0 +1,72 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java version of webrtc::StatsReport. */
+public class StatsReport {
+
+ /** Java version of webrtc::StatsReport::Value. */
+ public static class Value {
+ public final String name;
+ public final String value;
+
+ public Value(String name, String value) {
+ this.name = name;
+ this.value = value;
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("[").append(name).append(": ").append(value).append("]");
+ return builder.toString();
+ }
+ }
+
+ public final String id;
+ public final String type;
+ // Time since 1970-01-01T00:00:00Z in milliseconds.
+ public final double timestamp;
+ public final Value[] values;
+
+ public StatsReport(String id, String type, double timestamp, Value[] values) {
+ this.id = id;
+ this.type = type;
+ this.timestamp = timestamp;
+ this.values = values;
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("id: ").append(id).append(", type: ").append(type)
+ .append(", timestamp: ").append(timestamp).append(", values: ");
+ for (int i = 0; i < values.length; ++i) {
+ builder.append(values[i].toString()).append(", ");
+ }
+ return builder.toString();
+ }
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoCapturer.java b/talk/app/webrtc/java/src/org/webrtc/VideoCapturer.java
new file mode 100644
index 0000000000..158cc3447f
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/VideoCapturer.java
@@ -0,0 +1,70 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java version of cricket::VideoCapturer. */
+public class VideoCapturer {
+ private long nativeVideoCapturer;
+
+ protected VideoCapturer() {
+ }
+
+ public static VideoCapturer create(String deviceName) {
+ Object capturer = nativeCreateVideoCapturer(deviceName);
+ if (capturer != null)
+ return (VideoCapturer) (capturer);
+ return null;
+ }
+
+ // Sets |nativeCapturer| to be owned by VideoCapturer.
+ protected void setNativeCapturer(long nativeCapturer) {
+ this.nativeVideoCapturer = nativeCapturer;
+ }
+
+ // Package-visible for PeerConnectionFactory.
+ long takeNativeVideoCapturer() {
+ if (nativeVideoCapturer == 0) {
+ throw new RuntimeException("Capturer can only be taken once!");
+ }
+ long ret = nativeVideoCapturer;
+ nativeVideoCapturer = 0;
+ return ret;
+ }
+
+ public void dispose() {
+ // No-op iff this capturer is owned by a source (see comment on
+ // PeerConnectionFactoryInterface::CreateVideoSource()).
+ if (nativeVideoCapturer != 0) {
+ free(nativeVideoCapturer);
+ }
+ }
+
+ private static native Object nativeCreateVideoCapturer(String deviceName);
+
+ private static native void free(long nativeVideoCapturer);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
new file mode 100644
index 0000000000..3c255dd123
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
@@ -0,0 +1,179 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Java version of VideoRendererInterface. In addition to allowing clients to
+ * define their own rendering behavior (by passing in a Callbacks object), this
+ * class also provides a createGui() method for creating a GUI-rendering window
+ * on various platforms.
+ */
+public class VideoRenderer {
+ /**
+ * Java version of cricket::VideoFrame. Frames are only constructed from native code and test
+ * code.
+ */
+ public static class I420Frame {
+ public final int width;
+ public final int height;
+ public final int[] yuvStrides;
+ public ByteBuffer[] yuvPlanes;
+ public final boolean yuvFrame;
+ public Object textureObject;
+ public int textureId;
+ // Frame pointer in C++.
+ private long nativeFramePointer;
+
+ // rotationDegree is the degree that the frame must be rotated clockwisely
+ // to be rendered correctly.
+ public int rotationDegree;
+
+ /**
+ * Construct a frame of the given dimensions with the specified planar data.
+ */
+ I420Frame(int width, int height, int rotationDegree, int[] yuvStrides, ByteBuffer[] yuvPlanes,
+ long nativeFramePointer) {
+ this.width = width;
+ this.height = height;
+ this.yuvStrides = yuvStrides;
+ this.yuvPlanes = yuvPlanes;
+ this.yuvFrame = true;
+ this.rotationDegree = rotationDegree;
+ this.nativeFramePointer = nativeFramePointer;
+ if (rotationDegree % 90 != 0) {
+ throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
+ }
+ }
+
+ /**
+ * Construct a texture frame of the given dimensions with data in SurfaceTexture
+ */
+ I420Frame(
+ int width, int height, int rotationDegree,
+ Object textureObject, int textureId, long nativeFramePointer) {
+ this.width = width;
+ this.height = height;
+ this.yuvStrides = null;
+ this.yuvPlanes = null;
+ this.textureObject = textureObject;
+ this.textureId = textureId;
+ this.yuvFrame = false;
+ this.rotationDegree = rotationDegree;
+ this.nativeFramePointer = nativeFramePointer;
+ if (rotationDegree % 90 != 0) {
+ throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
+ }
+ }
+
+ public int rotatedWidth() {
+ return (rotationDegree % 180 == 0) ? width : height;
+ }
+
+ public int rotatedHeight() {
+ return (rotationDegree % 180 == 0) ? height : width;
+ }
+
+ @Override
+ public String toString() {
+ return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
+ ":" + yuvStrides[2];
+ }
+ }
+
+ // Helper native function to do a video frame plane copying.
+ public static native void nativeCopyPlane(ByteBuffer src, int width,
+ int height, int srcStride, ByteBuffer dst, int dstStride);
+
+ /** The real meat of VideoRendererInterface. */
+ public static interface Callbacks {
+ // |frame| might have pending rotation and implementation of Callbacks
+ // should handle that by applying rotation during rendering. The callee
+ // is responsible for signaling when it is done with |frame| by calling
+ // renderFrameDone(frame).
+ public void renderFrame(I420Frame frame);
+ }
+
+ /**
+ * This must be called after every renderFrame() to release the frame.
+ */
+ public static void renderFrameDone(I420Frame frame) {
+ frame.yuvPlanes = null;
+ frame.textureObject = null;
+ frame.textureId = 0;
+ if (frame.nativeFramePointer != 0) {
+ releaseNativeFrame(frame.nativeFramePointer);
+ frame.nativeFramePointer = 0;
+ }
+ }
+
+ // |this| either wraps a native (GUI) renderer or a client-supplied Callbacks
+ // (Java) implementation; this is indicated by |isWrappedVideoRenderer|.
+ long nativeVideoRenderer;
+ private final boolean isWrappedVideoRenderer;
+
+ public static VideoRenderer createGui(int x, int y) {
+ long nativeVideoRenderer = nativeCreateGuiVideoRenderer(x, y);
+ if (nativeVideoRenderer == 0) {
+ return null;
+ }
+ return new VideoRenderer(nativeVideoRenderer);
+ }
+
+ public VideoRenderer(Callbacks callbacks) {
+ nativeVideoRenderer = nativeWrapVideoRenderer(callbacks);
+ isWrappedVideoRenderer = true;
+ }
+
+ private VideoRenderer(long nativeVideoRenderer) {
+ this.nativeVideoRenderer = nativeVideoRenderer;
+ isWrappedVideoRenderer = false;
+ }
+
+ public void dispose() {
+ if (nativeVideoRenderer == 0) {
+ // Already disposed.
+ return;
+ }
+ if (!isWrappedVideoRenderer) {
+ freeGuiVideoRenderer(nativeVideoRenderer);
+ } else {
+ freeWrappedVideoRenderer(nativeVideoRenderer);
+ }
+ nativeVideoRenderer = 0;
+ }
+
+ private static native long nativeCreateGuiVideoRenderer(int x, int y);
+ private static native long nativeWrapVideoRenderer(Callbacks callbacks);
+
+ private static native void freeGuiVideoRenderer(long nativeVideoRenderer);
+ private static native void freeWrappedVideoRenderer(long nativeVideoRenderer);
+
+ private static native void releaseNativeFrame(long nativeFramePointer);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoSource.java b/talk/app/webrtc/java/src/org/webrtc/VideoSource.java
new file mode 100644
index 0000000000..7151748aa9
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/VideoSource.java
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+/**
+ * Java version of VideoSourceInterface, extended with stop/restart
+ * functionality to allow explicit control of the camera device on android,
+ * where there is no support for multiple open capture devices and the cost of
+ * holding a camera open (even if MediaStreamTrack.setEnabled(false) is muting
+ * its output to the encoder) can be too high to bear.
+ */
+public class VideoSource extends MediaSource {
+
+ public VideoSource(long nativeSource) {
+ super(nativeSource);
+ }
+
+ // Stop capture feeding this source.
+ public void stop() {
+ stop(nativeSource);
+ }
+
+ // Restart capture feeding this source. stop() must have been called since
+ // the last call to restart() (if any). Note that this isn't "start()";
+ // sources are started by default at birth.
+ public void restart() {
+ restart(nativeSource);
+ }
+
+ @Override
+ public void dispose() {
+ super.dispose();
+ }
+
+ private static native void stop(long nativeSource);
+ private static native void restart(long nativeSource);
+}
diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoTrack.java b/talk/app/webrtc/java/src/org/webrtc/VideoTrack.java
new file mode 100644
index 0000000000..7333a901d7
--- /dev/null
+++ b/talk/app/webrtc/java/src/org/webrtc/VideoTrack.java
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+
+/** Java version of VideoTrackInterface. */
+public class VideoTrack extends MediaStreamTrack {
+ private final LinkedList<VideoRenderer> renderers =
+ new LinkedList<VideoRenderer>();
+
+ public VideoTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+
+ public void addRenderer(VideoRenderer renderer) {
+ renderers.add(renderer);
+ nativeAddRenderer(nativeTrack, renderer.nativeVideoRenderer);
+ }
+
+ public void removeRenderer(VideoRenderer renderer) {
+ if (!renderers.remove(renderer)) {
+ return;
+ }
+ nativeRemoveRenderer(nativeTrack, renderer.nativeVideoRenderer);
+ renderer.dispose();
+ }
+
+ public void dispose() {
+ while (!renderers.isEmpty()) {
+ removeRenderer(renderers.getFirst());
+ }
+ super.dispose();
+ }
+
+ private static native void free(long nativeTrack);
+
+ private static native void nativeAddRenderer(
+ long nativeTrack, long nativeRenderer);
+
+ private static native void nativeRemoveRenderer(
+ long nativeTrack, long nativeRenderer);
+}
diff --git a/talk/app/webrtc/java/testcommon/src/org/webrtc/PeerConnectionTest.java b/talk/app/webrtc/java/testcommon/src/org/webrtc/PeerConnectionTest.java
new file mode 100644
index 0000000000..f9bc495fae
--- /dev/null
+++ b/talk/app/webrtc/java/testcommon/src/org/webrtc/PeerConnectionTest.java
@@ -0,0 +1,784 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import org.webrtc.PeerConnection.IceConnectionState;
+import org.webrtc.PeerConnection.IceGatheringState;
+import org.webrtc.PeerConnection.SignalingState;
+
+import java.io.File;
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.IdentityHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeSet;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+import static junit.framework.Assert.*;
+
+/** End-to-end tests for PeerConnection.java. */
+public class PeerConnectionTest {
+ // Set to true to render video.
+ private static final boolean RENDER_TO_GUI = false;
+ private static final int TIMEOUT_SECONDS = 20;
+ private TreeSet<String> threadsBeforeTest = null;
+
+ private static class ObserverExpectations implements PeerConnection.Observer,
+ VideoRenderer.Callbacks,
+ DataChannel.Observer,
+ StatsObserver {
+ private final String name;
+ private int expectedIceCandidates = 0;
+ private int expectedErrors = 0;
+ private int expectedRenegotiations = 0;
+ private int previouslySeenWidth = 0;
+ private int previouslySeenHeight = 0;
+ private int expectedFramesDelivered = 0;
+ private LinkedList<SignalingState> expectedSignalingChanges =
+ new LinkedList<SignalingState>();
+ private LinkedList<IceConnectionState> expectedIceConnectionChanges =
+ new LinkedList<IceConnectionState>();
+ private LinkedList<IceGatheringState> expectedIceGatheringChanges =
+ new LinkedList<IceGatheringState>();
+ private LinkedList<String> expectedAddStreamLabels =
+ new LinkedList<String>();
+ private LinkedList<String> expectedRemoveStreamLabels =
+ new LinkedList<String>();
+ private final LinkedList<IceCandidate> gotIceCandidates =
+ new LinkedList<IceCandidate>();
+ private Map<MediaStream, WeakReference<VideoRenderer>> renderers =
+ new IdentityHashMap<MediaStream, WeakReference<VideoRenderer>>();
+ private DataChannel dataChannel;
+ private LinkedList<DataChannel.Buffer> expectedBuffers =
+ new LinkedList<DataChannel.Buffer>();
+ private LinkedList<DataChannel.State> expectedStateChanges =
+ new LinkedList<DataChannel.State>();
+ private LinkedList<String> expectedRemoteDataChannelLabels =
+ new LinkedList<String>();
+ private int expectedStatsCallbacks = 0;
+ private LinkedList<StatsReport[]> gotStatsReports =
+ new LinkedList<StatsReport[]>();
+
+ public ObserverExpectations(String name) {
+ this.name = name;
+ }
+
+ public synchronized void setDataChannel(DataChannel dataChannel) {
+ assertNull(this.dataChannel);
+ this.dataChannel = dataChannel;
+ this.dataChannel.registerObserver(this);
+ assertNotNull(this.dataChannel);
+ }
+
+ public synchronized void expectIceCandidates(int count) {
+ expectedIceCandidates += count;
+ }
+
+ @Override
+ public synchronized void onIceCandidate(IceCandidate candidate) {
+ --expectedIceCandidates;
+
+ // We don't assert expectedIceCandidates >= 0 because it's hard to know
+ // how many to expect, in general. We only use expectIceCandidates to
+ // assert a minimal count.
+ synchronized (gotIceCandidates) {
+ gotIceCandidates.add(candidate);
+ gotIceCandidates.notifyAll();
+ }
+ }
+
+ private synchronized void setSize(int width, int height) {
+ assertFalse(RENDER_TO_GUI);
+ // Because different camera devices (fake & physical) produce different
+ // resolutions, we only sanity-check the set sizes,
+ assertTrue(width > 0);
+ assertTrue(height > 0);
+ if (previouslySeenWidth > 0) {
+ assertEquals(previouslySeenWidth, width);
+ assertEquals(previouslySeenHeight, height);
+ } else {
+ previouslySeenWidth = width;
+ previouslySeenHeight = height;
+ }
+ }
+
+ public synchronized void expectFramesDelivered(int count) {
+ assertFalse(RENDER_TO_GUI);
+ expectedFramesDelivered += count;
+ }
+
+ @Override
+ public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
+ setSize(frame.rotatedWidth(), frame.rotatedHeight());
+ --expectedFramesDelivered;
+ VideoRenderer.renderFrameDone(frame);
+ }
+
+ public synchronized void expectSignalingChange(SignalingState newState) {
+ expectedSignalingChanges.add(newState);
+ }
+
+ @Override
+ public synchronized void onSignalingChange(SignalingState newState) {
+ assertEquals(expectedSignalingChanges.removeFirst(), newState);
+ }
+
+ public synchronized void expectIceConnectionChange(
+ IceConnectionState newState) {
+ expectedIceConnectionChanges.add(newState);
+ }
+
+ @Override
+ public synchronized void onIceConnectionChange(
+ IceConnectionState newState) {
+ // TODO(bemasc): remove once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ if (newState.equals(IceConnectionState.COMPLETED)) {
+ return;
+ }
+
+ if (expectedIceConnectionChanges.isEmpty()) {
+ System.out.println(name + "Got an unexpected ice connection change " + newState);
+ return;
+ }
+
+ assertEquals(expectedIceConnectionChanges.removeFirst(), newState);
+ }
+
+ @Override
+ public synchronized void onIceConnectionReceivingChange(boolean receiving) {
+ System.out.println(name + "Got an ice connection receiving change " + receiving);
+ }
+
+ public synchronized void expectIceGatheringChange(
+ IceGatheringState newState) {
+ expectedIceGatheringChanges.add(newState);
+ }
+
+ @Override
+ public synchronized void onIceGatheringChange(IceGatheringState newState) {
+ // It's fine to get a variable number of GATHERING messages before
+ // COMPLETE fires (depending on how long the test runs) so we don't assert
+ // any particular count.
+ if (newState == IceGatheringState.GATHERING) {
+ return;
+ }
+ assertEquals(expectedIceGatheringChanges.removeFirst(), newState);
+ }
+
+ public synchronized void expectAddStream(String label) {
+ expectedAddStreamLabels.add(label);
+ }
+
+ @Override
+ public synchronized void onAddStream(MediaStream stream) {
+ assertEquals(expectedAddStreamLabels.removeFirst(), stream.label());
+ assertEquals(1, stream.videoTracks.size());
+ assertEquals(1, stream.audioTracks.size());
+ assertTrue(stream.videoTracks.get(0).id().endsWith("VideoTrack"));
+ assertTrue(stream.audioTracks.get(0).id().endsWith("AudioTrack"));
+ assertEquals("video", stream.videoTracks.get(0).kind());
+ assertEquals("audio", stream.audioTracks.get(0).kind());
+ VideoRenderer renderer = createVideoRenderer(this);
+ stream.videoTracks.get(0).addRenderer(renderer);
+ assertNull(renderers.put(
+ stream, new WeakReference<VideoRenderer>(renderer)));
+ }
+
+ public synchronized void expectRemoveStream(String label) {
+ expectedRemoveStreamLabels.add(label);
+ }
+
+ @Override
+ public synchronized void onRemoveStream(MediaStream stream) {
+ assertEquals(expectedRemoveStreamLabels.removeFirst(), stream.label());
+ WeakReference<VideoRenderer> renderer = renderers.remove(stream);
+ assertNotNull(renderer);
+ assertNotNull(renderer.get());
+ assertEquals(1, stream.videoTracks.size());
+ stream.videoTracks.get(0).removeRenderer(renderer.get());
+ }
+
+ public synchronized void expectDataChannel(String label) {
+ expectedRemoteDataChannelLabels.add(label);
+ }
+
+ @Override
+ public synchronized void onDataChannel(DataChannel remoteDataChannel) {
+ assertEquals(expectedRemoteDataChannelLabels.removeFirst(),
+ remoteDataChannel.label());
+ setDataChannel(remoteDataChannel);
+ assertEquals(DataChannel.State.CONNECTING, dataChannel.state());
+ }
+
+ public synchronized void expectRenegotiationNeeded() {
+ ++expectedRenegotiations;
+ }
+
+ @Override
+ public synchronized void onRenegotiationNeeded() {
+ assertTrue(--expectedRenegotiations >= 0);
+ }
+
+ public synchronized void expectMessage(ByteBuffer expectedBuffer,
+ boolean expectedBinary) {
+ expectedBuffers.add(
+ new DataChannel.Buffer(expectedBuffer, expectedBinary));
+ }
+
+ @Override
+ public synchronized void onMessage(DataChannel.Buffer buffer) {
+ DataChannel.Buffer expected = expectedBuffers.removeFirst();
+ assertEquals(expected.binary, buffer.binary);
+ assertTrue(expected.data.equals(buffer.data));
+ }
+
+ @Override
+ public synchronized void onBufferedAmountChange(long previousAmount) {
+ assertFalse(previousAmount == dataChannel.bufferedAmount());
+ }
+
+ @Override
+ public synchronized void onStateChange() {
+ assertEquals(expectedStateChanges.removeFirst(), dataChannel.state());
+ }
+
+ public synchronized void expectStateChange(DataChannel.State state) {
+ expectedStateChanges.add(state);
+ }
+
+ @Override
+ public synchronized void onComplete(StatsReport[] reports) {
+ if (--expectedStatsCallbacks < 0) {
+ throw new RuntimeException("Unexpected stats report: " + reports);
+ }
+ gotStatsReports.add(reports);
+ }
+
+ public synchronized void expectStatsCallback() {
+ ++expectedStatsCallbacks;
+ }
+
+ public synchronized LinkedList<StatsReport[]> takeStatsReports() {
+ LinkedList<StatsReport[]> got = gotStatsReports;
+ gotStatsReports = new LinkedList<StatsReport[]>();
+ return got;
+ }
+
+ // Return a set of expectations that haven't been satisfied yet, possibly
+ // empty if no such expectations exist.
+ public synchronized TreeSet<String> unsatisfiedExpectations() {
+ TreeSet<String> stillWaitingForExpectations = new TreeSet<String>();
+ if (expectedIceCandidates > 0) { // See comment in onIceCandidate.
+ stillWaitingForExpectations.add("expectedIceCandidates");
+ }
+ if (expectedErrors != 0) {
+ stillWaitingForExpectations.add("expectedErrors: " + expectedErrors);
+ }
+ if (expectedSignalingChanges.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedSignalingChanges: " + expectedSignalingChanges.size());
+ }
+ if (expectedIceConnectionChanges.size() != 0) {
+ stillWaitingForExpectations.add("expectedIceConnectionChanges: " +
+ expectedIceConnectionChanges.size());
+ }
+ if (expectedIceGatheringChanges.size() != 0) {
+ stillWaitingForExpectations.add("expectedIceGatheringChanges: " +
+ expectedIceGatheringChanges.size());
+ }
+ if (expectedAddStreamLabels.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedAddStreamLabels: " + expectedAddStreamLabels.size());
+ }
+ if (expectedRemoveStreamLabels.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedRemoveStreamLabels: " + expectedRemoveStreamLabels.size());
+ }
+ if (expectedFramesDelivered > 0) {
+ stillWaitingForExpectations.add(
+ "expectedFramesDelivered: " + expectedFramesDelivered);
+ }
+ if (!expectedBuffers.isEmpty()) {
+ stillWaitingForExpectations.add(
+ "expectedBuffers: " + expectedBuffers.size());
+ }
+ if (!expectedStateChanges.isEmpty()) {
+ stillWaitingForExpectations.add(
+ "expectedStateChanges: " + expectedStateChanges.size());
+ }
+ if (!expectedRemoteDataChannelLabels.isEmpty()) {
+ stillWaitingForExpectations.add("expectedRemoteDataChannelLabels: " +
+ expectedRemoteDataChannelLabels.size());
+ }
+ if (expectedStatsCallbacks != 0) {
+ stillWaitingForExpectations.add(
+ "expectedStatsCallbacks: " + expectedStatsCallbacks);
+ }
+ return stillWaitingForExpectations;
+ }
+
+ public boolean waitForAllExpectationsToBeSatisfied(int timeoutSeconds) {
+ // TODO(fischman): problems with this approach:
+ // - come up with something better than a poll loop
+ // - avoid serializing expectations explicitly; the test is not as robust
+ // as it could be because it must place expectations between wait
+ // statements very precisely (e.g. frame must not arrive before its
+ // expectation, and expectation must not be registered so early as to
+ // stall a wait). Use callbacks to fire off dependent steps instead of
+ // explicitly waiting, so there can be just a single wait at the end of
+ // the test.
+ long endTime = System.currentTimeMillis() + 1000 * timeoutSeconds;
+ TreeSet<String> prev = null;
+ TreeSet<String> stillWaitingForExpectations = unsatisfiedExpectations();
+ while (!stillWaitingForExpectations.isEmpty()) {
+ if (!stillWaitingForExpectations.equals(prev)) {
+ System.out.println(
+ name + " still waiting at\n " +
+ (new Throwable()).getStackTrace()[1] +
+ "\n for: " +
+ Arrays.toString(stillWaitingForExpectations.toArray()));
+ }
+ if (endTime < System.currentTimeMillis()) {
+ System.out.println(name + " timed out waiting for: "
+ + Arrays.toString(stillWaitingForExpectations.toArray()));
+ return false;
+ }
+ try {
+ Thread.sleep(10);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ prev = stillWaitingForExpectations;
+ stillWaitingForExpectations = unsatisfiedExpectations();
+ }
+ if (prev == null) {
+ System.out.println(name + " didn't need to wait at\n " +
+ (new Throwable()).getStackTrace()[1]);
+ }
+ return true;
+ }
+
+ // This methods return a list of all currently gathered ice candidates or waits until
+ // 1 candidate have been gathered.
+ public List<IceCandidate> getAtLeastOneIceCandidate() throws InterruptedException {
+ synchronized (gotIceCandidates) {
+ while (gotIceCandidates.isEmpty()) {
+ gotIceCandidates.wait();
+ }
+ return new LinkedList<IceCandidate>(gotIceCandidates);
+ }
+ }
+ }
+
+ private static class SdpObserverLatch implements SdpObserver {
+ private boolean success = false;
+ private SessionDescription sdp = null;
+ private String error = null;
+ private CountDownLatch latch = new CountDownLatch(1);
+
+ public SdpObserverLatch() {}
+
+ @Override
+ public void onCreateSuccess(SessionDescription sdp) {
+ this.sdp = sdp;
+ onSetSuccess();
+ }
+
+ @Override
+ public void onSetSuccess() {
+ success = true;
+ latch.countDown();
+ }
+
+ @Override
+ public void onCreateFailure(String error) {
+ onSetFailure(error);
+ }
+
+ @Override
+ public void onSetFailure(String error) {
+ this.error = error;
+ latch.countDown();
+ }
+
+ public boolean await() {
+ try {
+ assertTrue(latch.await(1000, TimeUnit.MILLISECONDS));
+ return getSuccess();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public boolean getSuccess() {
+ return success;
+ }
+
+ public SessionDescription getSdp() {
+ return sdp;
+ }
+
+ public String getError() {
+ return error;
+ }
+ }
+
+ static int videoWindowsMapped = -1;
+
+ private static VideoRenderer createVideoRenderer(
+ VideoRenderer.Callbacks videoCallbacks) {
+ if (!RENDER_TO_GUI) {
+ return new VideoRenderer(videoCallbacks);
+ }
+ ++videoWindowsMapped;
+ assertTrue(videoWindowsMapped < 4);
+ int x = videoWindowsMapped % 2 != 0 ? 700 : 0;
+ int y = videoWindowsMapped >= 2 ? 0 : 500;
+ return VideoRenderer.createGui(x, y);
+ }
+
+ // Return a weak reference to test that ownership is correctly held by
+ // PeerConnection, not by test code.
+ private static WeakReference<MediaStream> addTracksToPC(
+ PeerConnectionFactory factory, PeerConnection pc,
+ VideoSource videoSource,
+ String streamLabel, String videoTrackId, String audioTrackId,
+ VideoRenderer.Callbacks videoCallbacks) {
+ MediaStream lMS = factory.createLocalMediaStream(streamLabel);
+ VideoTrack videoTrack =
+ factory.createVideoTrack(videoTrackId, videoSource);
+ assertNotNull(videoTrack);
+ VideoRenderer videoRenderer = createVideoRenderer(videoCallbacks);
+ assertNotNull(videoRenderer);
+ videoTrack.addRenderer(videoRenderer);
+ lMS.addTrack(videoTrack);
+ // Just for fun, let's remove and re-add the track.
+ lMS.removeTrack(videoTrack);
+ lMS.addTrack(videoTrack);
+ lMS.addTrack(factory.createAudioTrack(
+ audioTrackId, factory.createAudioSource(new MediaConstraints())));
+ pc.addStream(lMS);
+ return new WeakReference<MediaStream>(lMS);
+ }
+
+ // Used for making sure thread handles are not leaked.
+ // Call initializeThreadCheck before a test and finalizeThreadCheck after
+ // a test.
+ void initializeThreadCheck() {
+ System.gc(); // Encourage any GC-related threads to start up.
+ threadsBeforeTest = allThreads();
+ }
+
+ void finalizeThreadCheck() throws Exception {
+ // TreeSet<String> threadsAfterTest = allThreads();
+
+ // TODO(tommi): Figure out a more reliable way to do this test. As is
+ // we're seeing three possible 'normal' situations:
+ // 1. before and after sets are equal.
+ // 2. before contains 3 threads that do not exist in after.
+ // 3. after contains 3 threads that do not exist in before.
+ //
+ // Maybe it would be better to do the thread enumeration from C++ and get
+ // the thread names as well, in order to determine what these 3 threads are.
+
+ // assertEquals(threadsBeforeTest, threadsAfterTest);
+ // Thread.sleep(100);
+ }
+
+ void doTest() throws Exception {
+ PeerConnectionFactory factory = new PeerConnectionFactory();
+ // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
+ // NOTE: this _must_ happen while |factory| is alive!
+ // Logging.enableTracing(
+ // "/tmp/PeerConnectionTest-log.txt",
+ // EnumSet.of(Logging.TraceLevel.TRACE_ALL),
+ // Logging.Severity.LS_SENSITIVE);
+
+ // Allow loopback interfaces too since our Android devices often don't
+ // have those.
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ factory.setOptions(options);
+
+ MediaConstraints pcConstraints = new MediaConstraints();
+ pcConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
+
+ LinkedList<PeerConnection.IceServer> iceServers =
+ new LinkedList<PeerConnection.IceServer>();
+ iceServers.add(new PeerConnection.IceServer(
+ "stun:stun.l.google.com:19302"));
+ iceServers.add(new PeerConnection.IceServer(
+ "turn:fake.example.com", "fakeUsername", "fakePassword"));
+ ObserverExpectations offeringExpectations =
+ new ObserverExpectations("PCTest:offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(
+ iceServers, pcConstraints, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ ObserverExpectations answeringExpectations =
+ new ObserverExpectations("PCTest:answerer");
+ PeerConnection answeringPC = factory.createPeerConnection(
+ iceServers, pcConstraints, answeringExpectations);
+ assertNotNull(answeringPC);
+
+ // We want to use the same camera for offerer & answerer, so create it here
+ // instead of in addTracksToPC.
+ VideoSource videoSource = factory.createVideoSource(
+ VideoCapturer.create(""), new MediaConstraints());
+
+ offeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> oLMS = addTracksToPC(
+ factory, offeringPC, videoSource, "offeredMediaStream",
+ "offeredVideoTrack", "offeredAudioTrack", offeringExpectations);
+
+ offeringExpectations.expectRenegotiationNeeded();
+ DataChannel offeringDC = offeringPC.createDataChannel(
+ "offeringDC", new DataChannel.Init());
+ assertEquals("offeringDC", offeringDC.label());
+
+ offeringExpectations.setDataChannel(offeringDC);
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(
+ SignalingState.HAVE_REMOTE_OFFER);
+ answeringExpectations.expectAddStream("offeredMediaStream");
+ // SCTP DataChannels are announced via OPEN messages over the established
+ // connection (not via SDP), so answeringExpectations can only register
+ // expecting the channel during ICE, below.
+ answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+ assertEquals(
+ PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ answeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> aLMS = addTracksToPC(
+ factory, answeringPC, videoSource, "answeredMediaStream",
+ "answeredVideoTrack", "answeredAudioTrack", answeringExpectations);
+
+ sdpLatch = new SdpObserverLatch();
+ answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription answerSdp = sdpLatch.getSdp();
+ assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+ assertFalse(answerSdp.description.isEmpty());
+
+ offeringExpectations.expectIceCandidates(2);
+ answeringExpectations.expectIceCandidates(2);
+
+ offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ answeringPC.setLocalDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ offeringExpectations.expectAddStream("answeredMediaStream");
+
+ offeringExpectations.expectIceConnectionChange(
+ IceConnectionState.CHECKING);
+ offeringExpectations.expectIceConnectionChange(
+ IceConnectionState.CONNECTED);
+ // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ //
+ // offeringExpectations.expectIceConnectionChange(
+ // IceConnectionState.COMPLETED);
+ answeringExpectations.expectIceConnectionChange(
+ IceConnectionState.CHECKING);
+ answeringExpectations.expectIceConnectionChange(
+ IceConnectionState.CONNECTED);
+
+ offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ assertEquals(offeringPC.getLocalDescription().type, offerSdp.type);
+ assertEquals(offeringPC.getRemoteDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getLocalDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getRemoteDescription().type, offerSdp.type);
+
+ assertEquals(offeringPC.getSenders().size(), 2);
+ assertEquals(offeringPC.getReceivers().size(), 2);
+ assertEquals(answeringPC.getSenders().size(), 2);
+ assertEquals(answeringPC.getReceivers().size(), 2);
+
+ if (!RENDER_TO_GUI) {
+ // Wait for at least some frames to be delivered at each end (number
+ // chosen arbitrarily).
+ offeringExpectations.expectFramesDelivered(10);
+ answeringExpectations.expectFramesDelivered(10);
+ }
+
+ offeringExpectations.expectStateChange(DataChannel.State.OPEN);
+ // See commentary about SCTP DataChannels above for why this is here.
+ answeringExpectations.expectDataChannel("offeringDC");
+ answeringExpectations.expectStateChange(DataChannel.State.OPEN);
+
+ // Wait for at least one ice candidate from the offering PC and forward them to the answering
+ // PC.
+ for (IceCandidate candidate : offeringExpectations.getAtLeastOneIceCandidate()) {
+ answeringPC.addIceCandidate(candidate);
+ }
+
+ // Wait for at least one ice candidate from the answering PC and forward them to the offering
+ // PC.
+ for (IceCandidate candidate : answeringExpectations.getAtLeastOneIceCandidate()) {
+ offeringPC.addIceCandidate(candidate);
+ }
+
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+
+ assertEquals(
+ PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertEquals(
+ PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+
+ // Test send & receive UTF-8 text.
+ answeringExpectations.expectMessage(
+ ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ DataChannel.Buffer buffer = new DataChannel.Buffer(
+ ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ assertTrue(offeringExpectations.dataChannel.send(buffer));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+
+ // Construct this binary message two different ways to ensure no
+ // shortcuts are taken.
+ ByteBuffer expectedBinaryMessage = ByteBuffer.allocateDirect(5);
+ for (byte i = 1; i < 6; ++i) {
+ expectedBinaryMessage.put(i);
+ }
+ expectedBinaryMessage.flip();
+ offeringExpectations.expectMessage(expectedBinaryMessage, true);
+ assertTrue(answeringExpectations.dataChannel.send(
+ new DataChannel.Buffer(
+ ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 }), true)));
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ answeringExpectations.dataChannel.close();
+ offeringExpectations.dataChannel.close();
+
+ if (RENDER_TO_GUI) {
+ try {
+ Thread.sleep(3000);
+ } catch (Throwable t) {
+ throw new RuntimeException(t);
+ }
+ }
+
+ // TODO(fischman) MOAR test ideas:
+ // - Test that PC.removeStream() works; requires a second
+ // createOffer/createAnswer dance.
+ // - audit each place that uses |constraints| for specifying non-trivial
+ // constraints (and ensure they're honored).
+ // - test error cases
+ // - ensure reasonable coverage of _jni.cc is achieved. Coverage is
+ // extra-important because of all the free-text (class/method names, etc)
+ // in JNI-style programming; make sure no typos!
+ // - Test that shutdown mid-interaction is crash-free.
+
+ // Free the Java-land objects, collect them, and sleep a bit to make sure we
+ // don't get late-arrival crashes after the Java-land objects have been
+ // freed.
+ shutdownPC(offeringPC, offeringExpectations);
+ offeringPC = null;
+ shutdownPC(answeringPC, answeringExpectations);
+ answeringPC = null;
+ videoSource.dispose();
+ factory.dispose();
+ System.gc();
+ }
+
+ private static void shutdownPC(
+ PeerConnection pc, ObserverExpectations expectations) {
+ expectations.dataChannel.unregisterObserver();
+ expectations.dataChannel.dispose();
+ expectations.expectStatsCallback();
+ assertTrue(pc.getStats(expectations, null));
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+ expectations.expectIceConnectionChange(IceConnectionState.CLOSED);
+ expectations.expectSignalingChange(SignalingState.CLOSED);
+ pc.close();
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+ expectations.expectStatsCallback();
+ assertTrue(pc.getStats(expectations, null));
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+
+ System.out.println("FYI stats: ");
+ int reportIndex = -1;
+ for (StatsReport[] reports : expectations.takeStatsReports()) {
+ System.out.println(" Report #" + (++reportIndex));
+ for (int i = 0; i < reports.length; ++i) {
+ System.out.println(" " + reports[i].toString());
+ }
+ }
+ assertEquals(1, reportIndex);
+ System.out.println("End stats.");
+
+ pc.dispose();
+ }
+
+ // Returns a set of thread IDs belonging to this process, as Strings.
+ private static TreeSet<String> allThreads() {
+ TreeSet<String> threads = new TreeSet<String>();
+ // This pokes at /proc instead of using the Java APIs because we're also
+ // looking for libjingle/webrtc native threads, most of which won't have
+ // attached to the JVM.
+ for (String threadId : (new File("/proc/self/task")).list()) {
+ threads.add(threadId);
+ }
+ return threads;
+ }
+}
diff --git a/talk/app/webrtc/javatests/libjingle_peerconnection_java_unittest.sh b/talk/app/webrtc/javatests/libjingle_peerconnection_java_unittest.sh
new file mode 100755
index 0000000000..d49fd00dc3
--- /dev/null
+++ b/talk/app/webrtc/javatests/libjingle_peerconnection_java_unittest.sh
@@ -0,0 +1,57 @@
+#!/bin/bash
+#
+# libjingle
+# Copyright 2013 Google Inc.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+# 3. The name of the author may not be used to endorse or promote products
+# derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Wrapper script for running the Java tests under this directory. This script
+# will only work if it has been massaged by the build action and placed in
+# the PRODUCT_DIR (e.g. out/Debug).
+
+# Exit with error immediately if any subcommand fails.
+set -e
+
+# Change directory to the PRODUCT_DIR (e.g. out/Debug).
+cd -P $(dirname $0)
+
+if [ -z "$LD_PRELOAD" ]; then
+ echo "LD_PRELOAD isn't set. It should be set to something like "
+ echo "/usr/lib/x86_64-linux-gnu/libpulse.so.0. I will now refuse to run "
+ echo "to protect you from the consequences of your folly."
+ exit 1
+fi
+
+export CLASSPATH=`pwd`/junit-4.11.jar
+CLASSPATH=$CLASSPATH:`pwd`/libjingle_peerconnection_test.jar
+CLASSPATH=$CLASSPATH:`pwd`/libjingle_peerconnection.jar
+
+# This sets java.library.path so lookup of libjingle_peerconnection_so.so works.
+export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:`pwd`:`pwd`/lib:`pwd`/lib.target
+
+# The RHS value is replaced by the build action that copies this script to
+# <(PRODUCT_DIR), using search-and-replace by the build action.
+export JAVA_HOME=GYP_JAVA_HOME
+
+${JAVA_HOME}/bin/java -Xcheck:jni -classpath $CLASSPATH \
+ junit.textui.TestRunner org.webrtc.PeerConnectionTestJava
diff --git a/talk/app/webrtc/javatests/src/org/webrtc/PeerConnectionTestJava.java b/talk/app/webrtc/javatests/src/org/webrtc/PeerConnectionTestJava.java
new file mode 100644
index 0000000000..2099aa657d
--- /dev/null
+++ b/talk/app/webrtc/javatests/src/org/webrtc/PeerConnectionTestJava.java
@@ -0,0 +1,62 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import junit.framework.TestCase;
+import org.junit.Test;
+
+/** End-to-end tests for PeerConnection.java. */
+public class PeerConnectionTestJava extends TestCase {
+ private PeerConnectionTest test = new PeerConnectionTest();
+ @Test
+ public void testCompleteSession() throws Exception {
+ test.initializeThreadCheck();
+ test.doTest();
+ test.finalizeThreadCheck();
+ }
+
+ @Test
+ public void testCompleteSessionOnNonMainThread() throws Exception {
+ final Exception[] exceptionHolder = new Exception[1];
+ Thread nonMainThread = new Thread("PeerConnectionTest-nonMainThread") {
+ @Override public void run() {
+ try {
+ test.initializeThreadCheck();
+ test.doTest();
+ test.finalizeThreadCheck();
+ } catch (Exception e) {
+ exceptionHolder[0] = e;
+ }
+ }
+ };
+ nonMainThread.start();
+ nonMainThread.join();
+ if (exceptionHolder[0] != null)
+ throw exceptionHolder[0];
+ }
+}
diff --git a/talk/app/webrtc/jsep.h b/talk/app/webrtc/jsep.h
new file mode 100644
index 0000000000..c12ab85f34
--- /dev/null
+++ b/talk/app/webrtc/jsep.h
@@ -0,0 +1,155 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Interfaces matching the draft-ietf-rtcweb-jsep-01.
+
+#ifndef TALK_APP_WEBRTC_JSEP_H_
+#define TALK_APP_WEBRTC_JSEP_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/refcount.h"
+
+namespace cricket {
+class SessionDescription;
+class Candidate;
+} // namespace cricket
+
+namespace webrtc {
+
+struct SdpParseError {
+ public:
+ // The sdp line that causes the error.
+ std::string line;
+ // Explains the error.
+ std::string description;
+};
+
+// Class representation of an ICE candidate.
+// An instance of this interface is supposed to be owned by one class at
+// a time and is therefore not expected to be thread safe.
+class IceCandidateInterface {
+ public:
+ virtual ~IceCandidateInterface() {}
+ /// If present, this contains the identierfier of the "media stream
+ // identification" as defined in [RFC 3388] for m-line this candidate is
+ // assocated with.
+ virtual std::string sdp_mid() const = 0;
+ // This indeicates the index (starting at zero) of m-line in the SDP this
+ // candidate is assocated with.
+ virtual int sdp_mline_index() const = 0;
+ virtual const cricket::Candidate& candidate() const = 0;
+ // Creates a SDP-ized form of this candidate.
+ virtual bool ToString(std::string* out) const = 0;
+};
+
+// Creates a IceCandidateInterface based on SDP string.
+// Returns NULL if the sdp string can't be parsed.
+// |error| can be NULL if doesn't care about the failure reason.
+IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& sdp,
+ SdpParseError* error);
+
+// This class represents a collection of candidates for a specific m-line.
+// This class is used in SessionDescriptionInterface to represent all known
+// candidates for a certain m-line.
+class IceCandidateCollection {
+ public:
+ virtual ~IceCandidateCollection() {}
+ virtual size_t count() const = 0;
+ // Returns true if an equivalent |candidate| exist in the collection.
+ virtual bool HasCandidate(const IceCandidateInterface* candidate) const = 0;
+ virtual const IceCandidateInterface* at(size_t index) const = 0;
+};
+
+// Class representation of a Session description.
+// An instance of this interface is supposed to be owned by one class at
+// a time and is therefore not expected to be thread safe.
+class SessionDescriptionInterface {
+ public:
+ // Supported types:
+ static const char kOffer[];
+ static const char kPrAnswer[];
+ static const char kAnswer[];
+
+ virtual ~SessionDescriptionInterface() {}
+ virtual cricket::SessionDescription* description() = 0;
+ virtual const cricket::SessionDescription* description() const = 0;
+ // Get the session id and session version, which are defined based on
+ // RFC 4566 for the SDP o= line.
+ virtual std::string session_id() const = 0;
+ virtual std::string session_version() const = 0;
+ virtual std::string type() const = 0;
+ // Adds the specified candidate to the description.
+ // Ownership is not transferred.
+ // Returns false if the session description does not have a media section that
+ // corresponds to the |candidate| label.
+ virtual bool AddCandidate(const IceCandidateInterface* candidate) = 0;
+ // Returns the number of m- lines in the session description.
+ virtual size_t number_of_mediasections() const = 0;
+ // Returns a collection of all candidates that belong to a certain m-line
+ virtual const IceCandidateCollection* candidates(
+ size_t mediasection_index) const = 0;
+ // Serializes the description to SDP.
+ virtual bool ToString(std::string* out) const = 0;
+};
+
+// Creates a SessionDescriptionInterface based on SDP string and the type.
+// Returns NULL if the sdp string can't be parsed or the type is unsupported.
+// |error| can be NULL if doesn't care about the failure reason.
+SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
+ const std::string& sdp,
+ SdpParseError* error);
+
+// Jsep CreateOffer and CreateAnswer callback interface.
+class CreateSessionDescriptionObserver : public rtc::RefCountInterface {
+ public:
+ // The implementation of the CreateSessionDescriptionObserver takes
+ // the ownership of the |desc|.
+ virtual void OnSuccess(SessionDescriptionInterface* desc) = 0;
+ virtual void OnFailure(const std::string& error) = 0;
+
+ protected:
+ ~CreateSessionDescriptionObserver() {}
+};
+
+// Jsep SetLocalDescription and SetRemoteDescription callback interface.
+class SetSessionDescriptionObserver : public rtc::RefCountInterface {
+ public:
+ virtual void OnSuccess() = 0;
+ virtual void OnFailure(const std::string& error) = 0;
+
+ protected:
+ ~SetSessionDescriptionObserver() {}
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_JSEP_H_
diff --git a/talk/app/webrtc/jsepicecandidate.cc b/talk/app/webrtc/jsepicecandidate.cc
new file mode 100644
index 0000000000..768bd0a281
--- /dev/null
+++ b/talk/app/webrtc/jsepicecandidate.cc
@@ -0,0 +1,99 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/jsepicecandidate.h"
+
+#include <vector>
+
+#include "talk/app/webrtc/webrtcsdp.h"
+#include "webrtc/base/stringencode.h"
+
+namespace webrtc {
+
+IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& sdp,
+ SdpParseError* error) {
+ JsepIceCandidate* jsep_ice = new JsepIceCandidate(sdp_mid, sdp_mline_index);
+ if (!jsep_ice->Initialize(sdp, error)) {
+ delete jsep_ice;
+ return NULL;
+ }
+ return jsep_ice;
+}
+
+JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index)
+ : sdp_mid_(sdp_mid),
+ sdp_mline_index_(sdp_mline_index) {
+}
+
+JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const cricket::Candidate& candidate)
+ : sdp_mid_(sdp_mid),
+ sdp_mline_index_(sdp_mline_index),
+ candidate_(candidate) {
+}
+
+JsepIceCandidate::~JsepIceCandidate() {
+}
+
+bool JsepIceCandidate::Initialize(const std::string& sdp, SdpParseError* err) {
+ return SdpDeserializeCandidate(sdp, this, err);
+}
+
+bool JsepIceCandidate::ToString(std::string* out) const {
+ if (!out)
+ return false;
+ *out = SdpSerializeCandidate(*this);
+ return !out->empty();
+}
+
+JsepCandidateCollection::~JsepCandidateCollection() {
+ for (std::vector<JsepIceCandidate*>::iterator it = candidates_.begin();
+ it != candidates_.end(); ++it) {
+ delete *it;
+ }
+}
+
+bool JsepCandidateCollection::HasCandidate(
+ const IceCandidateInterface* candidate) const {
+ bool ret = false;
+ for (std::vector<JsepIceCandidate*>::const_iterator it = candidates_.begin();
+ it != candidates_.end(); ++it) {
+ if ((*it)->sdp_mid() == candidate->sdp_mid() &&
+ (*it)->sdp_mline_index() == candidate->sdp_mline_index() &&
+ (*it)->candidate().IsEquivalent(candidate->candidate())) {
+ ret = true;
+ break;
+ }
+ }
+ return ret;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/jsepicecandidate.h b/talk/app/webrtc/jsepicecandidate.h
new file mode 100644
index 0000000000..1d5ef1997f
--- /dev/null
+++ b/talk/app/webrtc/jsepicecandidate.h
@@ -0,0 +1,92 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Implements the IceCandidateInterface.
+
+#ifndef TALK_APP_WEBRTC_JSEPICECANDIDATE_H_
+#define TALK_APP_WEBRTC_JSEPICECANDIDATE_H_
+
+#include <string>
+
+#include "talk/app/webrtc/jsep.h"
+#include "webrtc/p2p/base/candidate.h"
+#include "webrtc/base/constructormagic.h"
+
+namespace webrtc {
+
+class JsepIceCandidate : public IceCandidateInterface {
+ public:
+ JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index);
+ JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index,
+ const cricket::Candidate& candidate);
+ ~JsepIceCandidate();
+ // |error| can be NULL if don't care about the failure reason.
+ bool Initialize(const std::string& sdp, SdpParseError* err);
+ void SetCandidate(const cricket::Candidate& candidate) {
+ candidate_ = candidate;
+ }
+
+ virtual std::string sdp_mid() const { return sdp_mid_; }
+ virtual int sdp_mline_index() const { return sdp_mline_index_; }
+ virtual const cricket::Candidate& candidate() const {
+ return candidate_;
+ }
+
+ virtual bool ToString(std::string* out) const;
+
+ private:
+ std::string sdp_mid_;
+ int sdp_mline_index_;
+ cricket::Candidate candidate_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(JsepIceCandidate);
+};
+
+// Implementation of IceCandidateCollection.
+// This implementation stores JsepIceCandidates.
+class JsepCandidateCollection : public IceCandidateCollection {
+ public:
+ ~JsepCandidateCollection();
+ virtual size_t count() const {
+ return candidates_.size();
+ }
+ virtual bool HasCandidate(const IceCandidateInterface* candidate) const;
+ // Adds and takes ownership of the JsepIceCandidate.
+ virtual void add(JsepIceCandidate* candidate) {
+ candidates_.push_back(candidate);
+ }
+ virtual const IceCandidateInterface* at(size_t index) const {
+ return candidates_[index];
+ }
+
+ private:
+ std::vector<JsepIceCandidate*> candidates_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_JSEPICECANDIDATE_H_
diff --git a/talk/app/webrtc/jsepsessiondescription.cc b/talk/app/webrtc/jsepsessiondescription.cc
new file mode 100644
index 0000000000..24bd9d4195
--- /dev/null
+++ b/talk/app/webrtc/jsepsessiondescription.cc
@@ -0,0 +1,202 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/jsepsessiondescription.h"
+
+#include "talk/app/webrtc/webrtcsdp.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/stringencode.h"
+
+using rtc::scoped_ptr;
+using cricket::SessionDescription;
+
+namespace webrtc {
+
+static const char* kSupportedTypes[] = {
+ JsepSessionDescription::kOffer,
+ JsepSessionDescription::kPrAnswer,
+ JsepSessionDescription::kAnswer
+};
+
+static bool IsTypeSupported(const std::string& type) {
+ bool type_supported = false;
+ for (size_t i = 0; i < ARRAY_SIZE(kSupportedTypes); ++i) {
+ if (kSupportedTypes[i] == type) {
+ type_supported = true;
+ break;
+ }
+ }
+ return type_supported;
+}
+
+const char SessionDescriptionInterface::kOffer[] = "offer";
+const char SessionDescriptionInterface::kPrAnswer[] = "pranswer";
+const char SessionDescriptionInterface::kAnswer[] = "answer";
+
+const int JsepSessionDescription::kDefaultVideoCodecId = 100;
+// This is effectively a max value of the frame rate. 30 is default from camera.
+const int JsepSessionDescription::kDefaultVideoCodecFramerate = 60;
+const char JsepSessionDescription::kDefaultVideoCodecName[] = "VP8";
+// Used as default max video codec size before we have it in signaling.
+#if defined(ANDROID) || defined(WEBRTC_IOS)
+// Limit default max video codec size for Android to avoid
+// HW VP8 codec initialization failure for resolutions higher
+// than 1280x720 or 720x1280.
+// Same patch for iOS to support 720P in portrait mode.
+const int JsepSessionDescription::kMaxVideoCodecWidth = 1280;
+const int JsepSessionDescription::kMaxVideoCodecHeight = 1280;
+#else
+const int JsepSessionDescription::kMaxVideoCodecWidth = 1920;
+const int JsepSessionDescription::kMaxVideoCodecHeight = 1080;
+#endif
+const int JsepSessionDescription::kDefaultVideoCodecPreference = 1;
+
+SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
+ const std::string& sdp,
+ SdpParseError* error) {
+ if (!IsTypeSupported(type)) {
+ return NULL;
+ }
+
+ JsepSessionDescription* jsep_desc = new JsepSessionDescription(type);
+ if (!jsep_desc->Initialize(sdp, error)) {
+ delete jsep_desc;
+ return NULL;
+ }
+ return jsep_desc;
+}
+
+JsepSessionDescription::JsepSessionDescription(const std::string& type)
+ : type_(type) {
+}
+
+JsepSessionDescription::~JsepSessionDescription() {}
+
+bool JsepSessionDescription::Initialize(
+ cricket::SessionDescription* description,
+ const std::string& session_id,
+ const std::string& session_version) {
+ if (!description)
+ return false;
+
+ session_id_ = session_id;
+ session_version_ = session_version;
+ description_.reset(description);
+ candidate_collection_.resize(number_of_mediasections());
+ return true;
+}
+
+bool JsepSessionDescription::Initialize(const std::string& sdp,
+ SdpParseError* error) {
+ return SdpDeserialize(sdp, this, error);
+}
+
+bool JsepSessionDescription::AddCandidate(
+ const IceCandidateInterface* candidate) {
+ if (!candidate || candidate->sdp_mline_index() < 0)
+ return false;
+ size_t mediasection_index = 0;
+ if (!GetMediasectionIndex(candidate, &mediasection_index)) {
+ return false;
+ }
+ if (mediasection_index >= number_of_mediasections())
+ return false;
+ const std::string& content_name =
+ description_->contents()[mediasection_index].name;
+ const cricket::TransportInfo* transport_info =
+ description_->GetTransportInfoByName(content_name);
+ if (!transport_info) {
+ return false;
+ }
+
+ cricket::Candidate updated_candidate = candidate->candidate();
+ if (updated_candidate.username().empty()) {
+ updated_candidate.set_username(transport_info->description.ice_ufrag);
+ }
+ if (updated_candidate.password().empty()) {
+ updated_candidate.set_password(transport_info->description.ice_pwd);
+ }
+
+ scoped_ptr<JsepIceCandidate> updated_candidate_wrapper(
+ new JsepIceCandidate(candidate->sdp_mid(),
+ static_cast<int>(mediasection_index),
+ updated_candidate));
+ if (!candidate_collection_[mediasection_index].HasCandidate(
+ updated_candidate_wrapper.get()))
+ candidate_collection_[mediasection_index].add(
+ updated_candidate_wrapper.release());
+
+ return true;
+}
+
+size_t JsepSessionDescription::number_of_mediasections() const {
+ if (!description_)
+ return 0;
+ return description_->contents().size();
+}
+
+const IceCandidateCollection* JsepSessionDescription::candidates(
+ size_t mediasection_index) const {
+ if (mediasection_index >= candidate_collection_.size())
+ return NULL;
+ return &candidate_collection_[mediasection_index];
+}
+
+bool JsepSessionDescription::ToString(std::string* out) const {
+ if (!description_ || !out)
+ return false;
+ *out = SdpSerialize(*this);
+ return !out->empty();
+}
+
+bool JsepSessionDescription::GetMediasectionIndex(
+ const IceCandidateInterface* candidate,
+ size_t* index) {
+ if (!candidate || !index) {
+ return false;
+ }
+ *index = static_cast<size_t>(candidate->sdp_mline_index());
+ if (description_ && !candidate->sdp_mid().empty()) {
+ bool found = false;
+ // Try to match the sdp_mid with content name.
+ for (size_t i = 0; i < description_->contents().size(); ++i) {
+ if (candidate->sdp_mid() == description_->contents().at(i).name) {
+ *index = i;
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ // If the sdp_mid is presented but we can't find a match, we consider
+ // this as an error.
+ return false;
+ }
+ }
+ return true;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/jsepsessiondescription.h b/talk/app/webrtc/jsepsessiondescription.h
new file mode 100644
index 0000000000..756352c240
--- /dev/null
+++ b/talk/app/webrtc/jsepsessiondescription.h
@@ -0,0 +1,106 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Implements the SessionDescriptionInterface.
+
+#ifndef TALK_APP_WEBRTC_JSEPSESSIONDESCRIPTION_H_
+#define TALK_APP_WEBRTC_JSEPSESSIONDESCRIPTION_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/jsep.h"
+#include "talk/app/webrtc/jsepicecandidate.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace cricket {
+class SessionDescription;
+}
+
+namespace webrtc {
+
+class JsepSessionDescription : public SessionDescriptionInterface {
+ public:
+ explicit JsepSessionDescription(const std::string& type);
+ virtual ~JsepSessionDescription();
+
+ // |error| can be NULL if don't care about the failure reason.
+ bool Initialize(const std::string& sdp, SdpParseError* error);
+
+ // Takes ownership of |description|.
+ bool Initialize(cricket::SessionDescription* description,
+ const std::string& session_id,
+ const std::string& session_version);
+
+ virtual cricket::SessionDescription* description() {
+ return description_.get();
+ }
+ virtual const cricket::SessionDescription* description() const {
+ return description_.get();
+ }
+ virtual std::string session_id() const {
+ return session_id_;
+ }
+ virtual std::string session_version() const {
+ return session_version_;
+ }
+ virtual std::string type() const {
+ return type_;
+ }
+ // Allow changing the type. Used for testing.
+ void set_type(const std::string& type) { type_ = type; }
+ virtual bool AddCandidate(const IceCandidateInterface* candidate);
+ virtual size_t number_of_mediasections() const;
+ virtual const IceCandidateCollection* candidates(
+ size_t mediasection_index) const;
+ virtual bool ToString(std::string* out) const;
+
+ // Default video encoder settings. The resolution is the max resolution.
+ // TODO(perkj): Implement proper negotiation of video resolution.
+ static const int kDefaultVideoCodecId;
+ static const int kDefaultVideoCodecFramerate;
+ static const char kDefaultVideoCodecName[];
+ static const int kMaxVideoCodecWidth;
+ static const int kMaxVideoCodecHeight;
+ static const int kDefaultVideoCodecPreference;
+
+ private:
+ rtc::scoped_ptr<cricket::SessionDescription> description_;
+ std::string session_id_;
+ std::string session_version_;
+ std::string type_;
+ std::vector<JsepCandidateCollection> candidate_collection_;
+
+ bool GetMediasectionIndex(const IceCandidateInterface* candidate,
+ size_t* index);
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(JsepSessionDescription);
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_JSEPSESSIONDESCRIPTION_H_
diff --git a/talk/app/webrtc/jsepsessiondescription_unittest.cc b/talk/app/webrtc/jsepsessiondescription_unittest.cc
new file mode 100644
index 0000000000..a60911494c
--- /dev/null
+++ b/talk/app/webrtc/jsepsessiondescription_unittest.cc
@@ -0,0 +1,245 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/jsepicecandidate.h"
+#include "talk/app/webrtc/jsepsessiondescription.h"
+#include "webrtc/p2p/base/candidate.h"
+#include "webrtc/p2p/base/constants.h"
+#include "webrtc/p2p/base/sessiondescription.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/stringencode.h"
+
+using webrtc::IceCandidateCollection;
+using webrtc::IceCandidateInterface;
+using webrtc::JsepIceCandidate;
+using webrtc::JsepSessionDescription;
+using webrtc::SessionDescriptionInterface;
+using rtc::scoped_ptr;
+
+static const char kCandidateUfrag[] = "ufrag";
+static const char kCandidatePwd[] = "pwd";
+static const char kCandidateUfragVoice[] = "ufrag_voice";
+static const char kCandidatePwdVoice[] = "pwd_voice";
+static const char kCandidateUfragVideo[] = "ufrag_video";
+static const char kCandidatePwdVideo[] = "pwd_video";
+
+// This creates a session description with both audio and video media contents.
+// In SDP this is described by two m lines, one audio and one video.
+static cricket::SessionDescription* CreateCricketSessionDescription() {
+ cricket::SessionDescription* desc(new cricket::SessionDescription());
+ // AudioContentDescription
+ scoped_ptr<cricket::AudioContentDescription> audio(
+ new cricket::AudioContentDescription());
+
+ // VideoContentDescription
+ scoped_ptr<cricket::VideoContentDescription> video(
+ new cricket::VideoContentDescription());
+
+ audio->AddCodec(cricket::AudioCodec(103, "ISAC", 16000, 0, 0, 0));
+ desc->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
+ audio.release());
+
+ video->AddCodec(cricket::VideoCodec(120, "VP8", 640, 480, 30, 0));
+ desc->AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
+ video.release());
+
+ EXPECT_TRUE(desc->AddTransportInfo(
+ cricket::TransportInfo(
+ cricket::CN_AUDIO,
+ cricket::TransportDescription(
+ std::vector<std::string>(),
+ kCandidateUfragVoice, kCandidatePwdVoice,
+ cricket::ICEMODE_FULL,
+ cricket::CONNECTIONROLE_NONE,
+ NULL, cricket::Candidates()))));
+ EXPECT_TRUE(desc->AddTransportInfo(
+ cricket::TransportInfo(cricket::CN_VIDEO,
+ cricket::TransportDescription(
+ std::vector<std::string>(),
+ kCandidateUfragVideo, kCandidatePwdVideo,
+ cricket::ICEMODE_FULL,
+ cricket::CONNECTIONROLE_NONE,
+ NULL, cricket::Candidates()))));
+ return desc;
+}
+
+class JsepSessionDescriptionTest : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ int port = 1234;
+ rtc::SocketAddress address("127.0.0.1", port++);
+ cricket::Candidate candidate(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ address, 1, "", "", "local", 0, "1");
+ candidate_ = candidate;
+ const std::string session_id =
+ rtc::ToString(rtc::CreateRandomId64());
+ const std::string session_version =
+ rtc::ToString(rtc::CreateRandomId());
+ jsep_desc_.reset(new JsepSessionDescription("dummy"));
+ ASSERT_TRUE(jsep_desc_->Initialize(CreateCricketSessionDescription(),
+ session_id, session_version));
+ }
+
+ std::string Serialize(const SessionDescriptionInterface* desc) {
+ std::string sdp;
+ EXPECT_TRUE(desc->ToString(&sdp));
+ EXPECT_FALSE(sdp.empty());
+ return sdp;
+ }
+
+ SessionDescriptionInterface* DeSerialize(const std::string& sdp) {
+ JsepSessionDescription* desc(new JsepSessionDescription("dummy"));
+ EXPECT_TRUE(desc->Initialize(sdp, NULL));
+ return desc;
+ }
+
+ cricket::Candidate candidate_;
+ rtc::scoped_ptr<JsepSessionDescription> jsep_desc_;
+};
+
+// Test that number_of_mediasections() returns the number of media contents in
+// a session description.
+TEST_F(JsepSessionDescriptionTest, CheckSessionDescription) {
+ EXPECT_EQ(2u, jsep_desc_->number_of_mediasections());
+}
+
+// Test that we can add a candidate to a session description.
+TEST_F(JsepSessionDescriptionTest, AddCandidateWithoutMid) {
+ JsepIceCandidate jsep_candidate("", 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(0);
+ ASSERT_TRUE(ice_candidates != NULL);
+ EXPECT_EQ(1u, ice_candidates->count());
+ const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
+ ASSERT_TRUE(ice_candidate != NULL);
+ candidate_.set_username(kCandidateUfragVoice);
+ candidate_.set_password(kCandidatePwdVoice);
+ EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
+ EXPECT_EQ(0, ice_candidate->sdp_mline_index());
+ EXPECT_EQ(0u, jsep_desc_->candidates(1)->count());
+}
+
+TEST_F(JsepSessionDescriptionTest, AddCandidateWithMid) {
+ // mid and m-line index don't match, in this case mid is preferred.
+ JsepIceCandidate jsep_candidate("video", 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ EXPECT_EQ(0u, jsep_desc_->candidates(0)->count());
+ const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(1);
+ ASSERT_TRUE(ice_candidates != NULL);
+ EXPECT_EQ(1u, ice_candidates->count());
+ const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
+ ASSERT_TRUE(ice_candidate != NULL);
+ candidate_.set_username(kCandidateUfragVideo);
+ candidate_.set_password(kCandidatePwdVideo);
+ EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
+ // The mline index should have been updated according to mid.
+ EXPECT_EQ(1, ice_candidate->sdp_mline_index());
+}
+
+TEST_F(JsepSessionDescriptionTest, AddCandidateAlreadyHasUfrag) {
+ candidate_.set_username(kCandidateUfrag);
+ candidate_.set_password(kCandidatePwd);
+ JsepIceCandidate jsep_candidate("audio", 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(0);
+ ASSERT_TRUE(ice_candidates != NULL);
+ EXPECT_EQ(1u, ice_candidates->count());
+ const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
+ ASSERT_TRUE(ice_candidate != NULL);
+ candidate_.set_username(kCandidateUfrag);
+ candidate_.set_password(kCandidatePwd);
+ EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
+
+ EXPECT_EQ(0u, jsep_desc_->candidates(1)->count());
+}
+
+// Test that we can not add a candidate if there is no corresponding media
+// content in the session description.
+TEST_F(JsepSessionDescriptionTest, AddBadCandidate) {
+ JsepIceCandidate bad_candidate1("", 55, candidate_);
+ EXPECT_FALSE(jsep_desc_->AddCandidate(&bad_candidate1));
+
+ JsepIceCandidate bad_candidate2("some weird mid", 0, candidate_);
+ EXPECT_FALSE(jsep_desc_->AddCandidate(&bad_candidate2));
+}
+
+// Tests that repeatedly adding the same candidate, with or without credentials,
+// does not increase the number of candidates in the description.
+TEST_F(JsepSessionDescriptionTest, AddCandidateDuplicates) {
+ JsepIceCandidate jsep_candidate("", 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ EXPECT_EQ(1u, jsep_desc_->candidates(0)->count());
+
+ // Add the same candidate again. It should be ignored.
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ EXPECT_EQ(1u, jsep_desc_->candidates(0)->count());
+
+ // Create a new candidate, identical except that the ufrag and pwd are now
+ // populated.
+ candidate_.set_username(kCandidateUfragVoice);
+ candidate_.set_password(kCandidatePwdVoice);
+ JsepIceCandidate jsep_candidate_with_credentials("", 0, candidate_);
+
+ // This should also be identified as redundant and ignored.
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate_with_credentials));
+ EXPECT_EQ(1u, jsep_desc_->candidates(0)->count());
+}
+
+// Test that we can serialize a JsepSessionDescription and deserialize it again.
+TEST_F(JsepSessionDescriptionTest, SerializeDeserialize) {
+ std::string sdp = Serialize(jsep_desc_.get());
+
+ scoped_ptr<SessionDescriptionInterface> parsed_jsep_desc(DeSerialize(sdp));
+ EXPECT_EQ(2u, parsed_jsep_desc->number_of_mediasections());
+
+ std::string parsed_sdp = Serialize(parsed_jsep_desc.get());
+ EXPECT_EQ(sdp, parsed_sdp);
+}
+
+// Tests that we can serialize and deserialize a JsepSesssionDescription
+// with candidates.
+TEST_F(JsepSessionDescriptionTest, SerializeDeserializeWithCandidates) {
+ std::string sdp = Serialize(jsep_desc_.get());
+
+ // Add a candidate and check that the serialized result is different.
+ JsepIceCandidate jsep_candidate("audio", 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ std::string sdp_with_candidate = Serialize(jsep_desc_.get());
+ EXPECT_NE(sdp, sdp_with_candidate);
+
+ scoped_ptr<SessionDescriptionInterface> parsed_jsep_desc(
+ DeSerialize(sdp_with_candidate));
+ std::string parsed_sdp_with_candidate = Serialize(parsed_jsep_desc.get());
+
+ EXPECT_EQ(sdp_with_candidate, parsed_sdp_with_candidate);
+}
diff --git a/talk/app/webrtc/localaudiosource.cc b/talk/app/webrtc/localaudiosource.cc
new file mode 100644
index 0000000000..63c6f13a3d
--- /dev/null
+++ b/talk/app/webrtc/localaudiosource.cc
@@ -0,0 +1,113 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/localaudiosource.h"
+
+#include <vector>
+
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "talk/media/base/mediaengine.h"
+
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+
+namespace webrtc {
+
+namespace {
+
+// Convert constraints to audio options. Return false if constraints are
+// invalid.
+void FromConstraints(const MediaConstraintsInterface::Constraints& constraints,
+ cricket::AudioOptions* options) {
+ // This design relies on the fact that all the audio constraints are actually
+ // "options", i.e. boolean-valued and always satisfiable. If the constraints
+ // are extended to include non-boolean values or actual format constraints,
+ // a different algorithm will be required.
+ struct {
+ const char* name;
+ cricket::Settable<bool>& value;
+ } key_to_value[] = {
+ {MediaConstraintsInterface::kGoogEchoCancellation,
+ options->echo_cancellation},
+ {MediaConstraintsInterface::kExtendedFilterEchoCancellation,
+ options->extended_filter_aec},
+ {MediaConstraintsInterface::kDAEchoCancellation,
+ options->delay_agnostic_aec},
+ {MediaConstraintsInterface::kAutoGainControl, options->auto_gain_control},
+ {MediaConstraintsInterface::kExperimentalAutoGainControl,
+ options->experimental_agc},
+ {MediaConstraintsInterface::kNoiseSuppression,
+ options->noise_suppression},
+ {MediaConstraintsInterface::kExperimentalNoiseSuppression,
+ options->experimental_ns},
+ {MediaConstraintsInterface::kHighpassFilter, options->highpass_filter},
+ {MediaConstraintsInterface::kTypingNoiseDetection,
+ options->typing_detection},
+ {MediaConstraintsInterface::kAudioMirroring, options->stereo_swapping},
+ {MediaConstraintsInterface::kAecDump, options->aec_dump}
+ };
+
+ for (const auto& constraint : constraints) {
+ bool value = false;
+ if (!rtc::FromString(constraint.value, &value))
+ continue;
+
+ for (auto& entry : key_to_value) {
+ if (constraint.key.compare(entry.name) == 0)
+ entry.value.Set(value);
+ }
+ }
+}
+
+} // namespace
+
+rtc::scoped_refptr<LocalAudioSource> LocalAudioSource::Create(
+ const PeerConnectionFactoryInterface::Options& options,
+ const MediaConstraintsInterface* constraints) {
+ rtc::scoped_refptr<LocalAudioSource> source(
+ new rtc::RefCountedObject<LocalAudioSource>());
+ source->Initialize(options, constraints);
+ return source;
+}
+
+void LocalAudioSource::Initialize(
+ const PeerConnectionFactoryInterface::Options& options,
+ const MediaConstraintsInterface* constraints) {
+ if (!constraints)
+ return;
+
+ // Apply optional constraints first, they will be overwritten by mandatory
+ // constraints.
+ FromConstraints(constraints->GetOptional(), &options_);
+
+ cricket::AudioOptions mandatory_options;
+ FromConstraints(constraints->GetMandatory(), &mandatory_options);
+ options_.SetAll(mandatory_options);
+ source_state_ = kLive;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/localaudiosource.h b/talk/app/webrtc/localaudiosource.h
new file mode 100644
index 0000000000..557745b8b8
--- /dev/null
+++ b/talk/app/webrtc/localaudiosource.h
@@ -0,0 +1,72 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_LOCALAUDIOSOURCE_H_
+#define TALK_APP_WEBRTC_LOCALAUDIOSOURCE_H_
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/notifier.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/media/base/mediachannel.h"
+#include "webrtc/base/scoped_ptr.h"
+
+// LocalAudioSource implements AudioSourceInterface.
+// This contains settings for switching audio processing on and off.
+
+namespace webrtc {
+
+class MediaConstraintsInterface;
+
+class LocalAudioSource : public Notifier<AudioSourceInterface> {
+ public:
+ // Creates an instance of LocalAudioSource.
+ static rtc::scoped_refptr<LocalAudioSource> Create(
+ const PeerConnectionFactoryInterface::Options& options,
+ const MediaConstraintsInterface* constraints);
+
+ virtual SourceState state() const { return source_state_; }
+ virtual const cricket::AudioOptions& options() const { return options_; }
+
+ protected:
+ LocalAudioSource()
+ : source_state_(kInitializing) {
+ }
+
+ ~LocalAudioSource() {
+ }
+
+ private:
+ void Initialize(const PeerConnectionFactoryInterface::Options& options,
+ const MediaConstraintsInterface* constraints);
+
+ cricket::AudioOptions options_;
+ SourceState source_state_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_LOCALAUDIOSOURCE_H_
diff --git a/talk/app/webrtc/localaudiosource_unittest.cc b/talk/app/webrtc/localaudiosource_unittest.cc
new file mode 100644
index 0000000000..8e05c18287
--- /dev/null
+++ b/talk/app/webrtc/localaudiosource_unittest.cc
@@ -0,0 +1,133 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/localaudiosource.h"
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/test/fakeconstraints.h"
+#include "talk/media/base/fakemediaengine.h"
+#include "talk/media/base/fakevideorenderer.h"
+#include "webrtc/base/gunit.h"
+
+using webrtc::LocalAudioSource;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+using webrtc::PeerConnectionFactoryInterface;
+
+TEST(LocalAudioSourceTest, SetValidOptions) {
+ webrtc::FakeConstraints constraints;
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kGoogEchoCancellation, false);
+ constraints.AddOptional(
+ MediaConstraintsInterface::kExtendedFilterEchoCancellation, true);
+ constraints.AddOptional(MediaConstraintsInterface::kDAEchoCancellation, true);
+ constraints.AddOptional(MediaConstraintsInterface::kAutoGainControl, true);
+ constraints.AddOptional(
+ MediaConstraintsInterface::kExperimentalAutoGainControl, true);
+ constraints.AddMandatory(MediaConstraintsInterface::kNoiseSuppression, false);
+ constraints.AddOptional(MediaConstraintsInterface::kHighpassFilter, true);
+ constraints.AddOptional(MediaConstraintsInterface::kAecDump, true);
+
+ rtc::scoped_refptr<LocalAudioSource> source =
+ LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+ &constraints);
+
+ bool value;
+ EXPECT_TRUE(source->options().echo_cancellation.Get(&value));
+ EXPECT_FALSE(value);
+ EXPECT_TRUE(source->options().extended_filter_aec.Get(&value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(source->options().delay_agnostic_aec.Get(&value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(source->options().auto_gain_control.Get(&value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(source->options().experimental_agc.Get(&value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(source->options().noise_suppression.Get(&value));
+ EXPECT_FALSE(value);
+ EXPECT_TRUE(source->options().highpass_filter.Get(&value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(source->options().aec_dump.Get(&value));
+ EXPECT_TRUE(value);
+}
+
+TEST(LocalAudioSourceTest, OptionNotSet) {
+ webrtc::FakeConstraints constraints;
+ rtc::scoped_refptr<LocalAudioSource> source =
+ LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+ &constraints);
+ bool value;
+ EXPECT_FALSE(source->options().highpass_filter.Get(&value));
+}
+
+TEST(LocalAudioSourceTest, MandatoryOverridesOptional) {
+ webrtc::FakeConstraints constraints;
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kGoogEchoCancellation, false);
+ constraints.AddOptional(
+ MediaConstraintsInterface::kGoogEchoCancellation, true);
+
+ rtc::scoped_refptr<LocalAudioSource> source =
+ LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+ &constraints);
+
+ bool value;
+ EXPECT_TRUE(source->options().echo_cancellation.Get(&value));
+ EXPECT_FALSE(value);
+}
+
+TEST(LocalAudioSourceTest, InvalidOptional) {
+ webrtc::FakeConstraints constraints;
+ constraints.AddOptional(MediaConstraintsInterface::kHighpassFilter, false);
+ constraints.AddOptional("invalidKey", false);
+
+ rtc::scoped_refptr<LocalAudioSource> source =
+ LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+ &constraints);
+
+ EXPECT_EQ(MediaSourceInterface::kLive, source->state());
+ bool value;
+ EXPECT_TRUE(source->options().highpass_filter.Get(&value));
+ EXPECT_FALSE(value);
+}
+
+TEST(LocalAudioSourceTest, InvalidMandatory) {
+ webrtc::FakeConstraints constraints;
+ constraints.AddMandatory(MediaConstraintsInterface::kHighpassFilter, false);
+ constraints.AddMandatory("invalidKey", false);
+
+ rtc::scoped_refptr<LocalAudioSource> source =
+ LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+ &constraints);
+
+ EXPECT_EQ(MediaSourceInterface::kLive, source->state());
+ bool value;
+ EXPECT_TRUE(source->options().highpass_filter.Get(&value));
+ EXPECT_FALSE(value);
+}
diff --git a/talk/app/webrtc/mediaconstraintsinterface.cc b/talk/app/webrtc/mediaconstraintsinterface.cc
new file mode 100644
index 0000000000..46d8029c5b
--- /dev/null
+++ b/talk/app/webrtc/mediaconstraintsinterface.cc
@@ -0,0 +1,163 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+
+#include "webrtc/base/stringencode.h"
+
+namespace webrtc {
+
+const char MediaConstraintsInterface::kValueTrue[] = "true";
+const char MediaConstraintsInterface::kValueFalse[] = "false";
+
+// Constraints declared as static members in mediastreaminterface.h
+// Specified by draft-alvestrand-constraints-resolution-00b
+const char MediaConstraintsInterface::kMinAspectRatio[] = "minAspectRatio";
+const char MediaConstraintsInterface::kMaxAspectRatio[] = "maxAspectRatio";
+const char MediaConstraintsInterface::kMaxWidth[] = "maxWidth";
+const char MediaConstraintsInterface::kMinWidth[] = "minWidth";
+const char MediaConstraintsInterface::kMaxHeight[] = "maxHeight";
+const char MediaConstraintsInterface::kMinHeight[] = "minHeight";
+const char MediaConstraintsInterface::kMaxFrameRate[] = "maxFrameRate";
+const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate";
+
+// Audio constraints.
+const char MediaConstraintsInterface::kEchoCancellation[] =
+ "echoCancellation";
+const char MediaConstraintsInterface::kGoogEchoCancellation[] =
+ "googEchoCancellation";
+const char MediaConstraintsInterface::kExtendedFilterEchoCancellation[] =
+ "googEchoCancellation2";
+const char MediaConstraintsInterface::kDAEchoCancellation[] =
+ "googDAEchoCancellation";
+const char MediaConstraintsInterface::kAutoGainControl[] =
+ "googAutoGainControl";
+const char MediaConstraintsInterface::kExperimentalAutoGainControl[] =
+ "googAutoGainControl2";
+const char MediaConstraintsInterface::kNoiseSuppression[] =
+ "googNoiseSuppression";
+const char MediaConstraintsInterface::kExperimentalNoiseSuppression[] =
+ "googNoiseSuppression2";
+const char MediaConstraintsInterface::kHighpassFilter[] =
+ "googHighpassFilter";
+const char MediaConstraintsInterface::kTypingNoiseDetection[] =
+ "googTypingNoiseDetection";
+const char MediaConstraintsInterface::kAudioMirroring[] = "googAudioMirroring";
+const char MediaConstraintsInterface::kAecDump[] = "audioDebugRecording";
+
+// Google-specific constraint keys for a local video source (getUserMedia).
+const char MediaConstraintsInterface::kNoiseReduction[] = "googNoiseReduction";
+
+// Constraint keys for CreateOffer / CreateAnswer defined in W3C specification.
+const char MediaConstraintsInterface::kOfferToReceiveAudio[] =
+ "OfferToReceiveAudio";
+const char MediaConstraintsInterface::kOfferToReceiveVideo[] =
+ "OfferToReceiveVideo";
+const char MediaConstraintsInterface::kVoiceActivityDetection[] =
+ "VoiceActivityDetection";
+const char MediaConstraintsInterface::kIceRestart[] =
+ "IceRestart";
+// Google specific constraint for BUNDLE enable/disable.
+const char MediaConstraintsInterface::kUseRtpMux[] =
+ "googUseRtpMUX";
+
+// Below constraints should be used during PeerConnection construction.
+const char MediaConstraintsInterface::kEnableDtlsSrtp[] =
+ "DtlsSrtpKeyAgreement";
+const char MediaConstraintsInterface::kEnableRtpDataChannels[] =
+ "RtpDataChannels";
+// Google-specific constraint keys.
+const char MediaConstraintsInterface::kEnableDscp[] = "googDscp";
+const char MediaConstraintsInterface::kEnableIPv6[] = "googIPv6";
+const char MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate[] =
+ "googSuspendBelowMinBitrate";
+const char MediaConstraintsInterface::kNumUnsignalledRecvStreams[] =
+ "googNumUnsignalledRecvStreams";
+const char MediaConstraintsInterface::kCombinedAudioVideoBwe[] =
+ "googCombinedAudioVideoBwe";
+const char MediaConstraintsInterface::kScreencastMinBitrate[] =
+ "googScreencastMinBitrate";
+// TODO(ronghuawu): Remove once cpu overuse detection is stable.
+const char MediaConstraintsInterface::kCpuOveruseDetection[] =
+ "googCpuOveruseDetection";
+const char MediaConstraintsInterface::kCpuUnderuseThreshold[] =
+ "googCpuUnderuseThreshold";
+const char MediaConstraintsInterface::kCpuOveruseThreshold[] =
+ "googCpuOveruseThreshold";
+const char MediaConstraintsInterface::kCpuUnderuseEncodeRsdThreshold[] =
+ "googCpuUnderuseEncodeRsdThreshold";
+const char MediaConstraintsInterface::kCpuOveruseEncodeRsdThreshold[] =
+ "googCpuOveruseEncodeRsdThreshold";
+const char MediaConstraintsInterface::kCpuOveruseEncodeUsage[] =
+ "googCpuOveruseEncodeUsage";
+const char MediaConstraintsInterface::kHighStartBitrate[] =
+ "googHighStartBitrate";
+const char MediaConstraintsInterface::kPayloadPadding[] = "googPayloadPadding";
+
+
+// Set |value| to the value associated with the first appearance of |key|, or
+// return false if |key| is not found.
+bool MediaConstraintsInterface::Constraints::FindFirst(
+ const std::string& key, std::string* value) const {
+ for (Constraints::const_iterator iter = begin(); iter != end(); ++iter) {
+ if (iter->key == key) {
+ *value = iter->value;
+ return true;
+ }
+ }
+ return false;
+}
+
+// Find the highest-priority instance of the boolean-valued constraint) named by
+// |key| and return its value as |value|. |constraints| can be null.
+// If |mandatory_constraints| is non-null, it is incremented if the key appears
+// among the mandatory constraints.
+// Returns true if the key was found and has a valid boolean value.
+// If the key appears multiple times as an optional constraint, appearances
+// after the first are ignored.
+// Note: Because this uses FindFirst, repeated optional constraints whose
+// first instance has an unrecognized value are not handled precisely in
+// accordance with the specification.
+bool FindConstraint(const MediaConstraintsInterface* constraints,
+ const std::string& key, bool* value,
+ size_t* mandatory_constraints) {
+ std::string string_value;
+ if (!constraints) {
+ return false;
+ }
+ if (constraints->GetMandatory().FindFirst(key, &string_value)) {
+ if (mandatory_constraints)
+ ++*mandatory_constraints;
+ return rtc::FromString(string_value, value);
+ }
+ if (constraints->GetOptional().FindFirst(key, &string_value)) {
+ return rtc::FromString(string_value, value);
+ }
+ return false;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/mediaconstraintsinterface.h b/talk/app/webrtc/mediaconstraintsinterface.h
new file mode 100644
index 0000000000..68ed9f7bcf
--- /dev/null
+++ b/talk/app/webrtc/mediaconstraintsinterface.h
@@ -0,0 +1,153 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains the interface for MediaConstraints, corresponding to
+// the definition at
+// http://www.w3.org/TR/mediacapture-streams/#mediastreamconstraints and also
+// used in WebRTC: http://dev.w3.org/2011/webrtc/editor/webrtc.html#constraints.
+
+#ifndef TALK_APP_WEBRTC_MEDIACONSTRAINTSINTERFACE_H_
+#define TALK_APP_WEBRTC_MEDIACONSTRAINTSINTERFACE_H_
+
+#include <string>
+#include <vector>
+
+namespace webrtc {
+
+// MediaConstraintsInterface
+// Interface used for passing arguments about media constraints
+// to the MediaStream and PeerConnection implementation.
+class MediaConstraintsInterface {
+ public:
+ struct Constraint {
+ Constraint() {}
+ Constraint(const std::string& key, const std::string value)
+ : key(key), value(value) {
+ }
+ std::string key;
+ std::string value;
+ };
+
+ class Constraints : public std::vector<Constraint> {
+ public:
+ bool FindFirst(const std::string& key, std::string* value) const;
+ };
+
+ virtual const Constraints& GetMandatory() const = 0;
+ virtual const Constraints& GetOptional() const = 0;
+
+ // Constraint keys used by a local video source.
+ // Specified by draft-alvestrand-constraints-resolution-00b
+ static const char kMinAspectRatio[]; // minAspectRatio
+ static const char kMaxAspectRatio[]; // maxAspectRatio
+ static const char kMaxWidth[]; // maxWidth
+ static const char kMinWidth[]; // minWidth
+ static const char kMaxHeight[]; // maxHeight
+ static const char kMinHeight[]; // minHeight
+ static const char kMaxFrameRate[]; // maxFrameRate
+ static const char kMinFrameRate[]; // minFrameRate
+
+ // Constraint keys used by a local audio source.
+ static const char kEchoCancellation[]; // echoCancellation
+
+ // These keys are google specific.
+ static const char kGoogEchoCancellation[]; // googEchoCancellation
+
+ static const char kExtendedFilterEchoCancellation[]; // googEchoCancellation2
+ static const char kDAEchoCancellation[]; // googDAEchoCancellation
+ static const char kAutoGainControl[]; // googAutoGainControl
+ static const char kExperimentalAutoGainControl[]; // googAutoGainControl2
+ static const char kNoiseSuppression[]; // googNoiseSuppression
+ static const char kExperimentalNoiseSuppression[]; // googNoiseSuppression2
+ static const char kHighpassFilter[]; // googHighpassFilter
+ static const char kTypingNoiseDetection[]; // googTypingNoiseDetection
+ static const char kAudioMirroring[]; // googAudioMirroring
+ static const char kAecDump[]; // audioDebugRecording
+
+ // Google-specific constraint keys for a local video source
+ static const char kNoiseReduction[]; // googNoiseReduction
+
+ // Constraint keys for CreateOffer / CreateAnswer
+ // Specified by the W3C PeerConnection spec
+ static const char kOfferToReceiveVideo[]; // OfferToReceiveVideo
+ static const char kOfferToReceiveAudio[]; // OfferToReceiveAudio
+ static const char kVoiceActivityDetection[]; // VoiceActivityDetection
+ static const char kIceRestart[]; // IceRestart
+ // These keys are google specific.
+ static const char kUseRtpMux[]; // googUseRtpMUX
+
+ // Constraints values.
+ static const char kValueTrue[]; // true
+ static const char kValueFalse[]; // false
+
+ // PeerConnection constraint keys.
+ // Temporary pseudo-constraints used to enable DTLS-SRTP
+ static const char kEnableDtlsSrtp[]; // Enable DTLS-SRTP
+ // Temporary pseudo-constraints used to enable DataChannels
+ static const char kEnableRtpDataChannels[]; // Enable RTP DataChannels
+ // Google-specific constraint keys.
+ // Temporary pseudo-constraint for enabling DSCP through JS.
+ static const char kEnableDscp[]; // googDscp
+ // Constraint to enable IPv6 through JS.
+ static const char kEnableIPv6[]; // googIPv6
+ // Temporary constraint to enable suspend below min bitrate feature.
+ static const char kEnableVideoSuspendBelowMinBitrate[];
+ // googSuspendBelowMinBitrate
+ static const char kNumUnsignalledRecvStreams[];
+ // googNumUnsignalledRecvStreams
+ // Constraint to enable combined audio+video bandwidth estimation.
+ static const char kCombinedAudioVideoBwe[]; // googCombinedAudioVideoBwe
+ static const char kScreencastMinBitrate[]; // googScreencastMinBitrate
+ static const char kCpuOveruseDetection[]; // googCpuOveruseDetection
+ static const char kCpuUnderuseThreshold[]; // googCpuUnderuseThreshold
+ static const char kCpuOveruseThreshold[]; // googCpuOveruseThreshold
+ // Low cpu adaptation threshold for relative standard deviation of encode
+ // time.
+ static const char kCpuUnderuseEncodeRsdThreshold[];
+ // High cpu adaptation threshold for relative standard deviation of encode
+ // time.
+ static const char kCpuOveruseEncodeRsdThreshold[];
+ static const char kCpuOveruseEncodeUsage[]; // googCpuOveruseEncodeUsage
+ static const char kHighStartBitrate[]; // googHighStartBitrate
+ static const char kPayloadPadding[]; // googPayloadPadding
+
+ // The prefix of internal-only constraints whose JS set values should be
+ // stripped by Chrome before passed down to Libjingle.
+ static const char kInternalConstraintPrefix[];
+
+ protected:
+ // Dtor protected as objects shouldn't be deleted via this interface
+ virtual ~MediaConstraintsInterface() {}
+};
+
+bool FindConstraint(const MediaConstraintsInterface* constraints,
+ const std::string& key, bool* value,
+ size_t* mandatory_constraints);
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_MEDIACONSTRAINTSINTERFACE_H_
diff --git a/talk/app/webrtc/mediacontroller.cc b/talk/app/webrtc/mediacontroller.cc
new file mode 100644
index 0000000000..f7d85116b1
--- /dev/null
+++ b/talk/app/webrtc/mediacontroller.cc
@@ -0,0 +1,96 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediacontroller.h"
+
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/call.h"
+
+namespace {
+
+const int kMinBandwidthBps = 30000;
+const int kStartBandwidthBps = 300000;
+const int kMaxBandwidthBps = 2000000;
+
+class MediaController : public webrtc::MediaControllerInterface,
+ public sigslot::has_slots<> {
+ public:
+ MediaController(rtc::Thread* worker_thread,
+ cricket::ChannelManager* channel_manager)
+ : worker_thread_(worker_thread), channel_manager_(channel_manager) {
+ RTC_DCHECK(nullptr != worker_thread);
+ worker_thread_->Invoke<void>(
+ rtc::Bind(&MediaController::Construct_w, this,
+ channel_manager_->media_engine()->GetVoE()));
+ }
+ ~MediaController() override {
+ worker_thread_->Invoke<void>(
+ rtc::Bind(&MediaController::Destruct_w, this));
+ }
+
+ webrtc::Call* call_w() override {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ return call_.get();
+ }
+
+ cricket::ChannelManager* channel_manager() const override {
+ return channel_manager_;
+ }
+
+ private:
+ void Construct_w(webrtc::VoiceEngine* voice_engine) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ webrtc::Call::Config config;
+ config.voice_engine = voice_engine;
+ config.bitrate_config.min_bitrate_bps = kMinBandwidthBps;
+ config.bitrate_config.start_bitrate_bps = kStartBandwidthBps;
+ config.bitrate_config.max_bitrate_bps = kMaxBandwidthBps;
+ call_.reset(webrtc::Call::Create(config));
+ }
+ void Destruct_w() {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ call_.reset();
+ }
+
+ rtc::Thread* const worker_thread_;
+ cricket::ChannelManager* const channel_manager_;
+ rtc::scoped_ptr<webrtc::Call> call_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MediaController);
+};
+} // namespace {
+
+namespace webrtc {
+
+MediaControllerInterface* MediaControllerInterface::Create(
+ rtc::Thread* worker_thread,
+ cricket::ChannelManager* channel_manager) {
+ return new MediaController(worker_thread, channel_manager);
+}
+} // namespace webrtc
diff --git a/talk/app/webrtc/mediacontroller.h b/talk/app/webrtc/mediacontroller.h
new file mode 100644
index 0000000000..1b51be7ca2
--- /dev/null
+++ b/talk/app/webrtc/mediacontroller.h
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_MEDIACONTROLLER_H_
+#define TALK_APP_WEBRTC_MEDIACONTROLLER_H_
+
+#include "webrtc/base/thread.h"
+
+namespace cricket {
+class ChannelManager;
+} // namespace cricket
+
+namespace webrtc {
+class Call;
+class VoiceEngine;
+
+// The MediaController currently owns shared state between media channels, but
+// in the future will create and own RtpSenders and RtpReceivers.
+class MediaControllerInterface {
+ public:
+ static MediaControllerInterface* Create(
+ rtc::Thread* worker_thread,
+ cricket::ChannelManager* channel_manager);
+
+ virtual ~MediaControllerInterface() {}
+ virtual webrtc::Call* call_w() = 0;
+ virtual cricket::ChannelManager* channel_manager() const = 0;
+};
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_MEDIACONTROLLER_H_
diff --git a/talk/app/webrtc/mediastream.cc b/talk/app/webrtc/mediastream.cc
new file mode 100644
index 0000000000..0d206300df
--- /dev/null
+++ b/talk/app/webrtc/mediastream.cc
@@ -0,0 +1,112 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediastream.h"
+#include "webrtc/base/logging.h"
+
+namespace webrtc {
+
+template <class V>
+static typename V::iterator FindTrack(V* vector,
+ const std::string& track_id) {
+ typename V::iterator it = vector->begin();
+ for (; it != vector->end(); ++it) {
+ if ((*it)->id() == track_id) {
+ break;
+ }
+ }
+ return it;
+};
+
+rtc::scoped_refptr<MediaStream> MediaStream::Create(
+ const std::string& label) {
+ rtc::RefCountedObject<MediaStream>* stream =
+ new rtc::RefCountedObject<MediaStream>(label);
+ return stream;
+}
+
+MediaStream::MediaStream(const std::string& label)
+ : label_(label) {
+}
+
+bool MediaStream::AddTrack(AudioTrackInterface* track) {
+ return AddTrack<AudioTrackVector, AudioTrackInterface>(&audio_tracks_, track);
+}
+
+bool MediaStream::AddTrack(VideoTrackInterface* track) {
+ return AddTrack<VideoTrackVector, VideoTrackInterface>(&video_tracks_, track);
+}
+
+bool MediaStream::RemoveTrack(AudioTrackInterface* track) {
+ return RemoveTrack<AudioTrackVector>(&audio_tracks_, track);
+}
+
+bool MediaStream::RemoveTrack(VideoTrackInterface* track) {
+ return RemoveTrack<VideoTrackVector>(&video_tracks_, track);
+}
+
+rtc::scoped_refptr<AudioTrackInterface>
+MediaStream::FindAudioTrack(const std::string& track_id) {
+ AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id);
+ if (it == audio_tracks_.end())
+ return NULL;
+ return *it;
+}
+
+rtc::scoped_refptr<VideoTrackInterface>
+MediaStream::FindVideoTrack(const std::string& track_id) {
+ VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id);
+ if (it == video_tracks_.end())
+ return NULL;
+ return *it;
+}
+
+template <typename TrackVector, typename Track>
+bool MediaStream::AddTrack(TrackVector* tracks, Track* track) {
+ typename TrackVector::iterator it = FindTrack(tracks, track->id());
+ if (it != tracks->end())
+ return false;
+ tracks->push_back(track);
+ FireOnChanged();
+ return true;
+}
+
+template <typename TrackVector>
+bool MediaStream::RemoveTrack(TrackVector* tracks,
+ MediaStreamTrackInterface* track) {
+ ASSERT(tracks != NULL);
+ if (!track)
+ return false;
+ typename TrackVector::iterator it = FindTrack(tracks, track->id());
+ if (it == tracks->end())
+ return false;
+ tracks->erase(it);
+ FireOnChanged();
+ return true;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/mediastream.h b/talk/app/webrtc/mediastream.h
new file mode 100644
index 0000000000..240512d9c9
--- /dev/null
+++ b/talk/app/webrtc/mediastream.h
@@ -0,0 +1,75 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains the implementation of MediaStreamInterface interface.
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAM_H_
+#define TALK_APP_WEBRTC_MEDIASTREAM_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/notifier.h"
+
+namespace webrtc {
+
+class MediaStream : public Notifier<MediaStreamInterface> {
+ public:
+ static rtc::scoped_refptr<MediaStream> Create(const std::string& label);
+
+ std::string label() const override { return label_; }
+
+ bool AddTrack(AudioTrackInterface* track) override;
+ bool AddTrack(VideoTrackInterface* track) override;
+ bool RemoveTrack(AudioTrackInterface* track) override;
+ bool RemoveTrack(VideoTrackInterface* track) override;
+ virtual rtc::scoped_refptr<AudioTrackInterface>
+ FindAudioTrack(const std::string& track_id);
+ virtual rtc::scoped_refptr<VideoTrackInterface>
+ FindVideoTrack(const std::string& track_id);
+
+ AudioTrackVector GetAudioTracks() override { return audio_tracks_; }
+ VideoTrackVector GetVideoTracks() override { return video_tracks_; }
+
+ protected:
+ explicit MediaStream(const std::string& label);
+
+ private:
+ template <typename TrackVector, typename Track>
+ bool AddTrack(TrackVector* Tracks, Track* track);
+ template <typename TrackVector>
+ bool RemoveTrack(TrackVector* Tracks, MediaStreamTrackInterface* track);
+
+ std::string label_;
+ AudioTrackVector audio_tracks_;
+ VideoTrackVector video_tracks_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_MEDIASTREAM_H_
diff --git a/talk/app/webrtc/mediastream_unittest.cc b/talk/app/webrtc/mediastream_unittest.cc
new file mode 100644
index 0000000000..2cf930c4c0
--- /dev/null
+++ b/talk/app/webrtc/mediastream_unittest.cc
@@ -0,0 +1,163 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/audiotrack.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ptr.h"
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kVideoTrackId[] = "dummy_video_cam_1";
+static const char kAudioTrackId[] = "dummy_microphone_1";
+
+using rtc::scoped_refptr;
+using ::testing::Exactly;
+
+namespace webrtc {
+
+// Helper class to test Observer.
+class MockObserver : public ObserverInterface {
+ public:
+ MockObserver() {}
+
+ MOCK_METHOD0(OnChanged, void());
+};
+
+class MediaStreamTest: public testing::Test {
+ protected:
+ virtual void SetUp() {
+ stream_ = MediaStream::Create(kStreamLabel1);
+ ASSERT_TRUE(stream_.get() != NULL);
+
+ video_track_ = VideoTrack::Create(kVideoTrackId, NULL);
+ ASSERT_TRUE(video_track_.get() != NULL);
+ EXPECT_EQ(MediaStreamTrackInterface::kInitializing, video_track_->state());
+
+ audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
+
+ ASSERT_TRUE(audio_track_.get() != NULL);
+ EXPECT_EQ(MediaStreamTrackInterface::kInitializing, audio_track_->state());
+
+ EXPECT_TRUE(stream_->AddTrack(video_track_));
+ EXPECT_FALSE(stream_->AddTrack(video_track_));
+ EXPECT_TRUE(stream_->AddTrack(audio_track_));
+ EXPECT_FALSE(stream_->AddTrack(audio_track_));
+ }
+
+ void ChangeTrack(MediaStreamTrackInterface* track) {
+ MockObserver observer;
+ track->RegisterObserver(&observer);
+
+ EXPECT_CALL(observer, OnChanged())
+ .Times(Exactly(1));
+ track->set_enabled(false);
+ EXPECT_FALSE(track->enabled());
+
+ EXPECT_CALL(observer, OnChanged())
+ .Times(Exactly(1));
+ track->set_state(MediaStreamTrackInterface::kLive);
+ EXPECT_EQ(MediaStreamTrackInterface::kLive, track->state());
+ }
+
+ scoped_refptr<MediaStreamInterface> stream_;
+ scoped_refptr<AudioTrackInterface> audio_track_;
+ scoped_refptr<VideoTrackInterface> video_track_;
+};
+
+TEST_F(MediaStreamTest, GetTrackInfo) {
+ ASSERT_EQ(1u, stream_->GetVideoTracks().size());
+ ASSERT_EQ(1u, stream_->GetAudioTracks().size());
+
+ // Verify the video track.
+ scoped_refptr<webrtc::MediaStreamTrackInterface> video_track(
+ stream_->GetVideoTracks()[0]);
+ EXPECT_EQ(0, video_track->id().compare(kVideoTrackId));
+ EXPECT_TRUE(video_track->enabled());
+
+ ASSERT_EQ(1u, stream_->GetVideoTracks().size());
+ EXPECT_TRUE(stream_->GetVideoTracks()[0].get() == video_track.get());
+ EXPECT_TRUE(stream_->FindVideoTrack(video_track->id()).get()
+ == video_track.get());
+ video_track = stream_->GetVideoTracks()[0];
+ EXPECT_EQ(0, video_track->id().compare(kVideoTrackId));
+ EXPECT_TRUE(video_track->enabled());
+
+ // Verify the audio track.
+ scoped_refptr<webrtc::MediaStreamTrackInterface> audio_track(
+ stream_->GetAudioTracks()[0]);
+ EXPECT_EQ(0, audio_track->id().compare(kAudioTrackId));
+ EXPECT_TRUE(audio_track->enabled());
+ ASSERT_EQ(1u, stream_->GetAudioTracks().size());
+ EXPECT_TRUE(stream_->GetAudioTracks()[0].get() == audio_track.get());
+ EXPECT_TRUE(stream_->FindAudioTrack(audio_track->id()).get()
+ == audio_track.get());
+ audio_track = stream_->GetAudioTracks()[0];
+ EXPECT_EQ(0, audio_track->id().compare(kAudioTrackId));
+ EXPECT_TRUE(audio_track->enabled());
+}
+
+TEST_F(MediaStreamTest, RemoveTrack) {
+ MockObserver observer;
+ stream_->RegisterObserver(&observer);
+
+ EXPECT_CALL(observer, OnChanged())
+ .Times(Exactly(2));
+
+ EXPECT_TRUE(stream_->RemoveTrack(audio_track_));
+ EXPECT_FALSE(stream_->RemoveTrack(audio_track_));
+ EXPECT_EQ(0u, stream_->GetAudioTracks().size());
+ EXPECT_EQ(0u, stream_->GetAudioTracks().size());
+
+ EXPECT_TRUE(stream_->RemoveTrack(video_track_));
+ EXPECT_FALSE(stream_->RemoveTrack(video_track_));
+
+ EXPECT_EQ(0u, stream_->GetVideoTracks().size());
+ EXPECT_EQ(0u, stream_->GetVideoTracks().size());
+
+ EXPECT_FALSE(stream_->RemoveTrack(static_cast<AudioTrackInterface*>(NULL)));
+ EXPECT_FALSE(stream_->RemoveTrack(static_cast<VideoTrackInterface*>(NULL)));
+}
+
+TEST_F(MediaStreamTest, ChangeVideoTrack) {
+ scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ stream_->GetVideoTracks()[0]);
+ ChangeTrack(video_track.get());
+}
+
+TEST_F(MediaStreamTest, ChangeAudioTrack) {
+ scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ stream_->GetAudioTracks()[0]);
+ ChangeTrack(audio_track.get());
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/mediastreamhandler.cc b/talk/app/webrtc/mediastreamhandler.cc
new file mode 100644
index 0000000000..be493f14e7
--- /dev/null
+++ b/talk/app/webrtc/mediastreamhandler.cc
@@ -0,0 +1,29 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(deadbeef): Remove this file once Chrome build files no longer reference
+// it.
diff --git a/talk/app/webrtc/mediastreamhandler.h b/talk/app/webrtc/mediastreamhandler.h
new file mode 100644
index 0000000000..be493f14e7
--- /dev/null
+++ b/talk/app/webrtc/mediastreamhandler.h
@@ -0,0 +1,29 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(deadbeef): Remove this file once Chrome build files no longer reference
+// it.
diff --git a/talk/app/webrtc/mediastreaminterface.h b/talk/app/webrtc/mediastreaminterface.h
new file mode 100644
index 0000000000..5911e85e8e
--- /dev/null
+++ b/talk/app/webrtc/mediastreaminterface.h
@@ -0,0 +1,273 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains interfaces for MediaStream, MediaTrack and MediaSource.
+// These interfaces are used for implementing MediaStream and MediaTrack as
+// defined in http://dev.w3.org/2011/webrtc/editor/webrtc.html#stream-api. These
+// interfaces must be used only with PeerConnection. PeerConnectionManager
+// interface provides the factory methods to create MediaStream and MediaTracks.
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMINTERFACE_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMINTERFACE_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace cricket {
+
+class AudioRenderer;
+class VideoCapturer;
+class VideoRenderer;
+class VideoFrame;
+
+} // namespace cricket
+
+namespace webrtc {
+
+// Generic observer interface.
+class ObserverInterface {
+ public:
+ virtual void OnChanged() = 0;
+
+ protected:
+ virtual ~ObserverInterface() {}
+};
+
+class NotifierInterface {
+ public:
+ virtual void RegisterObserver(ObserverInterface* observer) = 0;
+ virtual void UnregisterObserver(ObserverInterface* observer) = 0;
+
+ virtual ~NotifierInterface() {}
+};
+
+// Base class for sources. A MediaStreamTrack have an underlying source that
+// provide media. A source can be shared with multiple tracks.
+// TODO(perkj): Implement sources for local and remote audio tracks and
+// remote video tracks.
+class MediaSourceInterface : public rtc::RefCountInterface,
+ public NotifierInterface {
+ public:
+ enum SourceState {
+ kInitializing,
+ kLive,
+ kEnded,
+ kMuted
+ };
+
+ virtual SourceState state() const = 0;
+
+ protected:
+ virtual ~MediaSourceInterface() {}
+};
+
+// Information about a track.
+class MediaStreamTrackInterface : public rtc::RefCountInterface,
+ public NotifierInterface {
+ public:
+ enum TrackState {
+ kInitializing, // Track is beeing negotiated.
+ kLive = 1, // Track alive
+ kEnded = 2, // Track have ended
+ kFailed = 3, // Track negotiation failed.
+ };
+
+ virtual std::string kind() const = 0;
+ virtual std::string id() const = 0;
+ virtual bool enabled() const = 0;
+ virtual TrackState state() const = 0;
+ virtual bool set_enabled(bool enable) = 0;
+ // These methods should be called by implementation only.
+ virtual bool set_state(TrackState new_state) = 0;
+
+ protected:
+ virtual ~MediaStreamTrackInterface() {}
+};
+
+// Interface for rendering VideoFrames from a VideoTrack
+class VideoRendererInterface {
+ public:
+ // TODO(guoweis): Remove this function. Obsolete. The implementation of
+ // VideoRendererInterface should be able to handle different frame size as
+ // well as pending rotation. If it can't apply the frame rotation by itself,
+ // it should call |frame|.GetCopyWithRotationApplied() to get a frame that has
+ // the rotation applied.
+ virtual void SetSize(int width, int height) {}
+
+ // |frame| may have pending rotation. For clients which can't apply rotation,
+ // |frame|->GetCopyWithRotationApplied() will return a frame that has the
+ // rotation applied.
+ virtual void RenderFrame(const cricket::VideoFrame* frame) = 0;
+
+ protected:
+ // The destructor is protected to prevent deletion via the interface.
+ // This is so that we allow reference counted classes, where the destructor
+ // should never be public, to implement the interface.
+ virtual ~VideoRendererInterface() {}
+};
+
+class VideoSourceInterface;
+
+class VideoTrackInterface : public MediaStreamTrackInterface {
+ public:
+ // Register a renderer that will render all frames received on this track.
+ virtual void AddRenderer(VideoRendererInterface* renderer) = 0;
+ // Deregister a renderer.
+ virtual void RemoveRenderer(VideoRendererInterface* renderer) = 0;
+
+ virtual VideoSourceInterface* GetSource() const = 0;
+
+ protected:
+ virtual ~VideoTrackInterface() {}
+};
+
+// AudioSourceInterface is a reference counted source used for AudioTracks.
+// The same source can be used in multiple AudioTracks.
+class AudioSourceInterface : public MediaSourceInterface {
+ public:
+ class AudioObserver {
+ public:
+ virtual void OnSetVolume(double volume) = 0;
+
+ protected:
+ virtual ~AudioObserver() {}
+ };
+
+ // TODO(xians): Makes all the interface pure virtual after Chrome has their
+ // implementations.
+ // Sets the volume to the source. |volume| is in the range of [0, 10].
+ virtual void SetVolume(double volume) {}
+
+ // Registers/unregisters observer to the audio source.
+ virtual void RegisterAudioObserver(AudioObserver* observer) {}
+ virtual void UnregisterAudioObserver(AudioObserver* observer) {}
+};
+
+// Interface for receiving audio data from a AudioTrack.
+class AudioTrackSinkInterface {
+ public:
+ virtual void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ int number_of_channels,
+ size_t number_of_frames) = 0;
+ protected:
+ virtual ~AudioTrackSinkInterface() {}
+};
+
+// Interface of the audio processor used by the audio track to collect
+// statistics.
+class AudioProcessorInterface : public rtc::RefCountInterface {
+ public:
+ struct AudioProcessorStats {
+ AudioProcessorStats() : typing_noise_detected(false),
+ echo_return_loss(0),
+ echo_return_loss_enhancement(0),
+ echo_delay_median_ms(0),
+ aec_quality_min(0.0),
+ echo_delay_std_ms(0) {}
+ ~AudioProcessorStats() {}
+
+ bool typing_noise_detected;
+ int echo_return_loss;
+ int echo_return_loss_enhancement;
+ int echo_delay_median_ms;
+ float aec_quality_min;
+ int echo_delay_std_ms;
+ };
+
+ // Get audio processor statistics.
+ virtual void GetStats(AudioProcessorStats* stats) = 0;
+
+ protected:
+ virtual ~AudioProcessorInterface() {}
+};
+
+class AudioTrackInterface : public MediaStreamTrackInterface {
+ public:
+ // TODO(xians): Figure out if the following interface should be const or not.
+ virtual AudioSourceInterface* GetSource() const = 0;
+
+ // Add/Remove a sink that will receive the audio data from the track.
+ virtual void AddSink(AudioTrackSinkInterface* sink) = 0;
+ virtual void RemoveSink(AudioTrackSinkInterface* sink) = 0;
+
+ // Get the signal level from the audio track.
+ // Return true on success, otherwise false.
+ // TODO(xians): Change the interface to int GetSignalLevel() and pure virtual
+ // after Chrome has the correct implementation of the interface.
+ virtual bool GetSignalLevel(int* level) { return false; }
+
+ // Get the audio processor used by the audio track. Return NULL if the track
+ // does not have any processor.
+ // TODO(xians): Make the interface pure virtual.
+ virtual rtc::scoped_refptr<AudioProcessorInterface>
+ GetAudioProcessor() { return NULL; }
+
+ // Get a pointer to the audio renderer of this AudioTrack.
+ // The pointer is valid for the lifetime of this AudioTrack.
+ // TODO(xians): Remove the following interface after Chrome switches to
+ // AddSink() and RemoveSink() interfaces.
+ virtual cricket::AudioRenderer* GetRenderer() { return NULL; }
+
+ protected:
+ virtual ~AudioTrackInterface() {}
+};
+
+typedef std::vector<rtc::scoped_refptr<AudioTrackInterface> >
+ AudioTrackVector;
+typedef std::vector<rtc::scoped_refptr<VideoTrackInterface> >
+ VideoTrackVector;
+
+class MediaStreamInterface : public rtc::RefCountInterface,
+ public NotifierInterface {
+ public:
+ virtual std::string label() const = 0;
+
+ virtual AudioTrackVector GetAudioTracks() = 0;
+ virtual VideoTrackVector GetVideoTracks() = 0;
+ virtual rtc::scoped_refptr<AudioTrackInterface>
+ FindAudioTrack(const std::string& track_id) = 0;
+ virtual rtc::scoped_refptr<VideoTrackInterface>
+ FindVideoTrack(const std::string& track_id) = 0;
+
+ virtual bool AddTrack(AudioTrackInterface* track) = 0;
+ virtual bool AddTrack(VideoTrackInterface* track) = 0;
+ virtual bool RemoveTrack(AudioTrackInterface* track) = 0;
+ virtual bool RemoveTrack(VideoTrackInterface* track) = 0;
+
+ protected:
+ virtual ~MediaStreamInterface() {}
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_MEDIASTREAMINTERFACE_H_
diff --git a/talk/app/webrtc/mediastreamprovider.h b/talk/app/webrtc/mediastreamprovider.h
new file mode 100644
index 0000000000..1c62daf9f1
--- /dev/null
+++ b/talk/app/webrtc/mediastreamprovider.h
@@ -0,0 +1,96 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
+
+#include "webrtc/base/basictypes.h"
+
+namespace cricket {
+
+class AudioRenderer;
+class VideoCapturer;
+class VideoRenderer;
+struct AudioOptions;
+struct VideoOptions;
+
+} // namespace cricket
+
+namespace webrtc {
+
+// TODO(deadbeef): Change the key from an ssrc to a "sender_id" or
+// "receiver_id" string, which will be the MSID in the short term and MID in
+// the long term.
+
+// TODO(deadbeef): These interfaces are effectively just a way for the
+// RtpSenders/Receivers to get to the BaseChannels. These interfaces should be
+// refactored away eventually, as the classes converge.
+
+// This interface is called by AudioTrackHandler classes in mediastreamhandler.h
+// to change the settings of an audio track connected to certain PeerConnection.
+class AudioProviderInterface {
+ public:
+ // Enable/disable the audio playout of a remote audio track with |ssrc|.
+ virtual void SetAudioPlayout(uint32_t ssrc, bool enable) = 0;
+ // Enable/disable sending audio on the local audio track with |ssrc|.
+ // When |enable| is true |options| should be applied to the audio track.
+ virtual void SetAudioSend(uint32_t ssrc,
+ bool enable,
+ const cricket::AudioOptions& options,
+ cricket::AudioRenderer* renderer) = 0;
+
+ // Sets the audio playout volume of a remote audio track with |ssrc|.
+ // |volume| is in the range of [0, 10].
+ virtual void SetAudioPlayoutVolume(uint32_t ssrc, double volume) = 0;
+
+ protected:
+ virtual ~AudioProviderInterface() {}
+};
+
+// This interface is called by VideoTrackHandler classes in mediastreamhandler.h
+// to change the settings of a video track connected to a certain
+// PeerConnection.
+class VideoProviderInterface {
+ public:
+ virtual bool SetCaptureDevice(uint32_t ssrc,
+ cricket::VideoCapturer* camera) = 0;
+ // Enable/disable the video playout of a remote video track with |ssrc|.
+ virtual void SetVideoPlayout(uint32_t ssrc,
+ bool enable,
+ cricket::VideoRenderer* renderer) = 0;
+ // Enable sending video on the local video track with |ssrc|.
+ virtual void SetVideoSend(uint32_t ssrc,
+ bool enable,
+ const cricket::VideoOptions* options) = 0;
+
+ protected:
+ virtual ~VideoProviderInterface() {}
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
diff --git a/talk/app/webrtc/mediastreamproxy.h b/talk/app/webrtc/mediastreamproxy.h
new file mode 100644
index 0000000000..bde7dcfe2d
--- /dev/null
+++ b/talk/app/webrtc/mediastreamproxy.h
@@ -0,0 +1,54 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/proxy.h"
+
+namespace webrtc {
+
+BEGIN_PROXY_MAP(MediaStream)
+ PROXY_CONSTMETHOD0(std::string, label)
+ PROXY_METHOD0(AudioTrackVector, GetAudioTracks)
+ PROXY_METHOD0(VideoTrackVector, GetVideoTracks)
+ PROXY_METHOD1(rtc::scoped_refptr<AudioTrackInterface>,
+ FindAudioTrack, const std::string&)
+ PROXY_METHOD1(rtc::scoped_refptr<VideoTrackInterface>,
+ FindVideoTrack, const std::string&)
+ PROXY_METHOD1(bool, AddTrack, AudioTrackInterface*)
+ PROXY_METHOD1(bool, AddTrack, VideoTrackInterface*)
+ PROXY_METHOD1(bool, RemoveTrack, AudioTrackInterface*)
+ PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*)
+ PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+ PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY()
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
diff --git a/talk/app/webrtc/mediastreamsignaling.cc b/talk/app/webrtc/mediastreamsignaling.cc
new file mode 100644
index 0000000000..b405273902
--- /dev/null
+++ b/talk/app/webrtc/mediastreamsignaling.cc
@@ -0,0 +1,30 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediastreamsignaling.h"
+
+// TODO(deadbeef): Remove this file once Chrome build files don't reference it.
diff --git a/talk/app/webrtc/mediastreamsignaling.h b/talk/app/webrtc/mediastreamsignaling.h
new file mode 100644
index 0000000000..e8c5c110d0
--- /dev/null
+++ b/talk/app/webrtc/mediastreamsignaling.h
@@ -0,0 +1,28 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(deadbeef): Remove this file once Chrome build files don't reference it.
diff --git a/talk/app/webrtc/mediastreamtrack.h b/talk/app/webrtc/mediastreamtrack.h
new file mode 100644
index 0000000000..2097d9083d
--- /dev/null
+++ b/talk/app/webrtc/mediastreamtrack.h
@@ -0,0 +1,81 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMTRACK_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMTRACK_H_
+
+#include <string>
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/notifier.h"
+
+namespace webrtc {
+
+// MediaTrack implements the interface common to AudioTrackInterface and
+// VideoTrackInterface.
+template <typename T>
+class MediaStreamTrack : public Notifier<T> {
+ public:
+ typedef typename T::TrackState TypedTrackState;
+
+ virtual std::string id() const { return id_; }
+ virtual MediaStreamTrackInterface::TrackState state() const {
+ return state_;
+ }
+ virtual bool enabled() const { return enabled_; }
+ virtual bool set_enabled(bool enable) {
+ bool fire_on_change = (enable != enabled_);
+ enabled_ = enable;
+ if (fire_on_change) {
+ Notifier<T>::FireOnChanged();
+ }
+ return fire_on_change;
+ }
+ virtual bool set_state(MediaStreamTrackInterface::TrackState new_state) {
+ bool fire_on_change = (state_ != new_state);
+ state_ = new_state;
+ if (fire_on_change)
+ Notifier<T>::FireOnChanged();
+ return true;
+ }
+
+ protected:
+ explicit MediaStreamTrack(const std::string& id)
+ : enabled_(true),
+ id_(id),
+ state_(MediaStreamTrackInterface::kInitializing) {
+ }
+
+ private:
+ bool enabled_;
+ std::string id_;
+ MediaStreamTrackInterface::TrackState state_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_MEDIASTREAMTRACK_H_
diff --git a/talk/app/webrtc/mediastreamtrackproxy.h b/talk/app/webrtc/mediastreamtrackproxy.h
new file mode 100644
index 0000000000..d97ba3c62c
--- /dev/null
+++ b/talk/app/webrtc/mediastreamtrackproxy.h
@@ -0,0 +1,77 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file includes proxy classes for tracks. The purpose is
+// to make sure tracks are only accessed from the signaling thread.
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/proxy.h"
+
+namespace webrtc {
+
+BEGIN_PROXY_MAP(AudioTrack)
+ PROXY_CONSTMETHOD0(std::string, kind)
+ PROXY_CONSTMETHOD0(std::string, id)
+ PROXY_CONSTMETHOD0(TrackState, state)
+ PROXY_CONSTMETHOD0(bool, enabled)
+ PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource)
+ PROXY_METHOD1(void, AddSink, AudioTrackSinkInterface*)
+ PROXY_METHOD1(void, RemoveSink, AudioTrackSinkInterface*)
+ PROXY_METHOD1(bool, GetSignalLevel, int*)
+ PROXY_METHOD0(rtc::scoped_refptr<AudioProcessorInterface>,
+ GetAudioProcessor)
+ PROXY_METHOD0(cricket::AudioRenderer*, GetRenderer)
+
+ PROXY_METHOD1(bool, set_enabled, bool)
+ PROXY_METHOD1(bool, set_state, TrackState)
+
+ PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+ PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY()
+
+BEGIN_PROXY_MAP(VideoTrack)
+ PROXY_CONSTMETHOD0(std::string, kind)
+ PROXY_CONSTMETHOD0(std::string, id)
+ PROXY_CONSTMETHOD0(TrackState, state)
+ PROXY_CONSTMETHOD0(bool, enabled)
+ PROXY_METHOD1(bool, set_enabled, bool)
+ PROXY_METHOD1(bool, set_state, TrackState)
+
+ PROXY_METHOD1(void, AddRenderer, VideoRendererInterface*)
+ PROXY_METHOD1(void, RemoveRenderer, VideoRendererInterface*)
+ PROXY_CONSTMETHOD0(VideoSourceInterface*, GetSource)
+
+ PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+ PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY()
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
diff --git a/talk/app/webrtc/notifier.h b/talk/app/webrtc/notifier.h
new file mode 100644
index 0000000000..ecc16b9788
--- /dev/null
+++ b/talk/app/webrtc/notifier.h
@@ -0,0 +1,77 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_NOTIFIER_H_
+#define TALK_APP_WEBRTC_NOTIFIER_H_
+
+#include <list>
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/base/common.h"
+
+namespace webrtc {
+
+// Implement a template version of a notifier.
+template <class T>
+class Notifier : public T {
+ public:
+ Notifier() {
+ }
+
+ virtual void RegisterObserver(ObserverInterface* observer) {
+ ASSERT(observer != NULL);
+ observers_.push_back(observer);
+ }
+
+ virtual void UnregisterObserver(ObserverInterface* observer) {
+ for (std::list<ObserverInterface*>::iterator it = observers_.begin();
+ it != observers_.end(); it++) {
+ if (*it == observer) {
+ observers_.erase(it);
+ break;
+ }
+ }
+ }
+
+ void FireOnChanged() {
+ // Copy the list of observers to avoid a crash if the observer object
+ // unregisters as a result of the OnChanged() call. If the same list is used
+ // UnregisterObserver will affect the list make the iterator invalid.
+ std::list<ObserverInterface*> observers = observers_;
+ for (std::list<ObserverInterface*>::iterator it = observers.begin();
+ it != observers.end(); ++it) {
+ (*it)->OnChanged();
+ }
+ }
+
+ protected:
+ std::list<ObserverInterface*> observers_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_NOTIFIER_H_
diff --git a/talk/app/webrtc/objc/.clang-format b/talk/app/webrtc/objc/.clang-format
new file mode 100644
index 0000000000..34694e57dc
--- /dev/null
+++ b/talk/app/webrtc/objc/.clang-format
@@ -0,0 +1,10 @@
+BasedOnStyle: Chromium
+ColumnLimit: 100
+BinPackParameters: false
+AllowAllParametersOfDeclarationOnNextLine: true
+DerivePointerAlignment: false
+PointerAlignment: Right
+SpacesBeforeTrailingComments: 1
+ObjCBlockIndentWidth: 2
+ObjCSpaceAfterProperty: false
+ObjCSpaceBeforeProtocolList: true
diff --git a/talk/app/webrtc/objc/OWNERS b/talk/app/webrtc/objc/OWNERS
new file mode 100644
index 0000000000..cd06158b7f
--- /dev/null
+++ b/talk/app/webrtc/objc/OWNERS
@@ -0,0 +1 @@
+tkchin@webrtc.org
diff --git a/talk/app/webrtc/objc/README b/talk/app/webrtc/objc/README
new file mode 100644
index 0000000000..692fbbc564
--- /dev/null
+++ b/talk/app/webrtc/objc/README
@@ -0,0 +1,80 @@
+This directory contains the ObjectiveC implementation of the
+webrtc::PeerConnection API. This can be built for Mac or iOS. This
+file describes building the API, unit test, and AppRTCDemo sample app.
+
+Prerequisites:
+- Make sure gclient is checking out tools necessary to target iOS: your
+ .gclient file should contain a line like:
+ target_os = ['ios', 'mac']
+ Make sure to re-run gclient sync after adding this to download the tools.
+
+- Set up webrtc-related $GYP_DEFINES; example shell functions that set
+ up for building for iOS-device, iOS-simulator, and Mac (resp) are:
+function wrbase() {
+ cd /path/to/webrtc/trunk
+ export GYP_DEFINES="build_with_libjingle=1 build_with_chromium=0 libjingle_objc=1"
+ export GYP_GENERATORS="ninja"
+}
+
+function wrios() {
+ wrbase
+ export GYP_DEFINES="$GYP_DEFINES OS=ios target_arch=armv7"
+ export GYP_GENERATOR_FLAGS="$GYP_GENERATOR_FLAGS output_dir=out_ios"
+ export GYP_CROSSCOMPILE=1
+}
+
+function wrsim() {
+ wrbase
+ export GYP_DEFINES="$GYP_DEFINES OS=ios target_arch=ia32"
+ export GYP_GENERATOR_FLAGS="$GYP_GENERATOR_FLAGS output_dir=out_sim"
+ export GYP_CROSSCOMPILE=1
+}
+
+function wrmac() {
+ wrbase
+ export GYP_DEFINES="$GYP_DEFINES OS=mac target_arch=x64"
+ export GYP_GENERATOR_FLAGS="$GYP_GENERATOR_FLAGS output_dir=out_mac"
+}
+
+- Finally, run "gclient runhooks" to generate ninja files.
+
+Example of building & using the unittest & app:
+
+- To build & run the unittest (must target mac):
+ wrmac && gclient runhooks && \
+ ninja -C out_mac/Debug libjingle_peerconnection_objc_test && \
+ ./out_mac/Debug/libjingle_peerconnection_objc_test.app/Contents/MacOS/libjingle_peerconnection_objc_test
+
+- To build & launch the sample app on OSX:
+ wrmac && gclient runhooks && ninja -C out_mac/Debug AppRTCDemo && \
+ ./out_mac/Debug/AppRTCDemo.app/Contents/MacOS/AppRTCDemo
+
+- To build & launch the sample app on the iOS simulator:
+ wrsim && gclient runhooks && ninja -C out_sim/Debug iossim AppRTCDemo && \
+ ./out_sim/Debug/iossim out_sim/Debug/AppRTCDemo.app
+
+- To build & sign the sample app for an iOS device:
+ wrios && gclient runhooks && ninja -C out_ios/Debug-iphoneos AppRTCDemo
+
+- To install the sample app on an iOS device:
+ ideviceinstaller -i out_ios/Debug-iphoneos/AppRTCDemo.app
+ (if installing ideviceinstaller from brew, use --HEAD to get support
+ for .app directories)
+- Alternatively, use iPhone Configuration Utility:
+ - Open "iPhone Configuration Utility" (http://support.apple.com/kb/DL1465)
+ - Click the "Add" icon (command-o)
+ - Open the app under out_ios/Debug-iphoneos/AppRTCDemo (should be added to the Applications tab)
+ - Click the device's name in the left-hand panel and select the Applications tab
+ - Click Install on the AppRTCDemo line.
+ (If you have any problems deploying for the first time, check
+ the Info.plist file to ensure that the Bundle Identifier matches
+ your phone provisioning profile, or use a development wildcard
+ provisioning profile.)
+- Alternately, use ios-deploy:
+ ios-deploy -d -b out_ios/Debug-iphoneos/AppRTCDemo.app
+
+- Once installed:
+ - Tap AppRTCDemo on the iOS device's home screen (might have to scroll to find it).
+ - In desktop chrome, navigate to http://apprtc.appspot.com and note
+ the r=<NNN> room number in the resulting URL; enter that number
+ into the text field on the phone.
diff --git a/talk/app/webrtc/objc/RTCAVFoundationVideoSource+Internal.h b/talk/app/webrtc/objc/RTCAVFoundationVideoSource+Internal.h
new file mode 100644
index 0000000000..ffa6629c41
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCAVFoundationVideoSource+Internal.h
@@ -0,0 +1,36 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCAVFoundationVideoSource.h"
+
+#include "avfoundationvideocapturer.h"
+
+@interface RTCAVFoundationVideoSource ()
+
+@property(nonatomic, readonly) webrtc::AVFoundationVideoCapturer* capturer;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCAVFoundationVideoSource.mm b/talk/app/webrtc/objc/RTCAVFoundationVideoSource.mm
new file mode 100644
index 0000000000..525cead7da
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCAVFoundationVideoSource.mm
@@ -0,0 +1,69 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCAVFoundationVideoSource+Internal.h"
+
+#import "RTCMediaConstraints+Internal.h"
+#import "RTCMediaSource+Internal.h"
+#import "RTCPeerConnectionFactory+Internal.h"
+#import "RTCVideoSource+Internal.h"
+
+@implementation RTCAVFoundationVideoSource
+
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
+ constraints:(RTCMediaConstraints*)constraints {
+ NSParameterAssert(factory);
+ rtc::scoped_ptr<webrtc::AVFoundationVideoCapturer> capturer;
+ capturer.reset(new webrtc::AVFoundationVideoCapturer());
+ rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
+ factory.nativeFactory->CreateVideoSource(capturer.release(),
+ constraints.constraints);
+ return [super initWithMediaSource:source];
+}
+
+- (BOOL)useBackCamera {
+ return self.capturer->GetUseBackCamera();
+}
+
+- (void)setUseBackCamera:(BOOL)useBackCamera {
+ self.capturer->SetUseBackCamera(useBackCamera);
+}
+
+- (AVCaptureSession*)captureSession {
+ return self.capturer->GetCaptureSession();
+}
+
+- (webrtc::AVFoundationVideoCapturer*)capturer {
+ cricket::VideoCapturer* capturer = self.videoSource->GetVideoCapturer();
+ // This should be safe because no one should have changed the underlying video
+ // source.
+ webrtc::AVFoundationVideoCapturer* foundationCapturer =
+ static_cast<webrtc::AVFoundationVideoCapturer*>(capturer);
+ return foundationCapturer;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCAudioTrack+Internal.h b/talk/app/webrtc/objc/RTCAudioTrack+Internal.h
new file mode 100644
index 0000000000..3d2a9830b0
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCAudioTrack+Internal.h
@@ -0,0 +1,37 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCAudioTrack.h"
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+
+@interface RTCAudioTrack (Internal)
+
+@property(nonatomic, assign, readonly)
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audioTrack;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCAudioTrack.mm b/talk/app/webrtc/objc/RTCAudioTrack.mm
new file mode 100644
index 0000000000..78635b6b8f
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCAudioTrack.mm
@@ -0,0 +1,45 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCAudioTrack+Internal.h"
+
+#import "RTCMediaStreamTrack+Internal.h"
+
+@implementation RTCAudioTrack
+@end
+
+@implementation RTCAudioTrack (Internal)
+
+- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)audioTrack {
+ return static_cast<webrtc::AudioTrackInterface*>(self.mediaTrack.get());
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCDataChannel+Internal.h b/talk/app/webrtc/objc/RTCDataChannel+Internal.h
new file mode 100644
index 0000000000..78063f4cca
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCDataChannel+Internal.h
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCDataChannel.h"
+
+#include "talk/app/webrtc/datachannelinterface.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+@interface RTCDataBuffer (Internal)
+
+@property(nonatomic, readonly) const webrtc::DataBuffer* dataBuffer;
+
+- (instancetype)initWithDataBuffer:(const webrtc::DataBuffer&)buffer;
+
+@end
+
+@interface RTCDataChannelInit (Internal)
+
+@property(nonatomic, readonly) const webrtc::DataChannelInit* dataChannelInit;
+
+@end
+
+@interface RTCDataChannel (Internal)
+
+@property(nonatomic, readonly)
+ rtc::scoped_refptr<webrtc::DataChannelInterface> dataChannel;
+
+- (instancetype)initWithDataChannel:
+ (rtc::scoped_refptr<webrtc::DataChannelInterface>)dataChannel;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCDataChannel.mm b/talk/app/webrtc/objc/RTCDataChannel.mm
new file mode 100644
index 0000000000..fdb5c99a83
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCDataChannel.mm
@@ -0,0 +1,290 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCDataChannel+Internal.h"
+
+#include "talk/app/webrtc/datachannelinterface.h"
+
+namespace webrtc {
+
+class RTCDataChannelObserver : public DataChannelObserver {
+ public:
+ RTCDataChannelObserver(RTCDataChannel* channel) { _channel = channel; }
+
+ void OnStateChange() override {
+ [_channel.delegate channelDidChangeState:_channel];
+ }
+
+ void OnBufferedAmountChange(uint64_t previousAmount) override {
+ RTCDataChannel* channel = _channel;
+ id<RTCDataChannelDelegate> delegate = channel.delegate;
+ if ([delegate
+ respondsToSelector:@selector(channel:didChangeBufferedAmount:)]) {
+ [delegate channel:channel didChangeBufferedAmount:previousAmount];
+ }
+ }
+
+ void OnMessage(const DataBuffer& buffer) override {
+ if (!_channel.delegate) {
+ return;
+ }
+ RTCDataBuffer* dataBuffer =
+ [[RTCDataBuffer alloc] initWithDataBuffer:buffer];
+ [_channel.delegate channel:_channel didReceiveMessageWithBuffer:dataBuffer];
+ }
+
+ private:
+ __weak RTCDataChannel* _channel;
+};
+}
+
+// TODO(henrika): move to shared location.
+// See https://code.google.com/p/webrtc/issues/detail?id=4773 for details.
+NSString* NSStringFromStdString(const std::string& stdString) {
+ // std::string may contain null termination character so we construct
+ // using length.
+ return [[NSString alloc] initWithBytes:stdString.data()
+ length:stdString.length()
+ encoding:NSUTF8StringEncoding];
+}
+
+std::string StdStringFromNSString(NSString* nsString) {
+ NSData* charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
+ return std::string(reinterpret_cast<const char*>([charData bytes]),
+ [charData length]);
+}
+
+@implementation RTCDataChannelInit {
+ webrtc::DataChannelInit _dataChannelInit;
+}
+
+- (BOOL)isOrdered {
+ return _dataChannelInit.ordered;
+}
+
+- (void)setIsOrdered:(BOOL)isOrdered {
+ _dataChannelInit.ordered = isOrdered;
+}
+
+- (NSInteger)maxRetransmitTime {
+ return _dataChannelInit.maxRetransmitTime;
+}
+
+- (void)setMaxRetransmitTime:(NSInteger)maxRetransmitTime {
+ _dataChannelInit.maxRetransmitTime = maxRetransmitTime;
+}
+
+- (NSInteger)maxRetransmits {
+ return _dataChannelInit.maxRetransmits;
+}
+
+- (void)setMaxRetransmits:(NSInteger)maxRetransmits {
+ _dataChannelInit.maxRetransmits = maxRetransmits;
+}
+
+- (NSString*)protocol {
+ return NSStringFromStdString(_dataChannelInit.protocol);
+}
+
+- (void)setProtocol:(NSString*)protocol {
+ _dataChannelInit.protocol = StdStringFromNSString(protocol);
+}
+
+- (BOOL)isNegotiated {
+ return _dataChannelInit.negotiated;
+}
+
+- (void)setIsNegotiated:(BOOL)isNegotiated {
+ _dataChannelInit.negotiated = isNegotiated;
+}
+
+- (NSInteger)streamId {
+ return _dataChannelInit.id;
+}
+
+- (void)setStreamId:(NSInteger)streamId {
+ _dataChannelInit.id = streamId;
+}
+
+@end
+
+@implementation RTCDataChannelInit (Internal)
+
+- (const webrtc::DataChannelInit*)dataChannelInit {
+ return &_dataChannelInit;
+}
+
+@end
+
+@implementation RTCDataBuffer {
+ rtc::scoped_ptr<webrtc::DataBuffer> _dataBuffer;
+}
+
+- (instancetype)initWithData:(NSData*)data isBinary:(BOOL)isBinary {
+ NSAssert(data, @"data cannot be nil");
+ if (self = [super init]) {
+ rtc::Buffer buffer(reinterpret_cast<const uint8_t*>([data bytes]),
+ [data length]);
+ _dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
+ }
+ return self;
+}
+
+- (NSData*)data {
+ return [NSData dataWithBytes:_dataBuffer->data.data()
+ length:_dataBuffer->data.size()];
+}
+
+- (BOOL)isBinary {
+ return _dataBuffer->binary;
+}
+
+@end
+
+@implementation RTCDataBuffer (Internal)
+
+- (instancetype)initWithDataBuffer:(const webrtc::DataBuffer&)buffer {
+ if (self = [super init]) {
+ _dataBuffer.reset(new webrtc::DataBuffer(buffer));
+ }
+ return self;
+}
+
+- (const webrtc::DataBuffer*)dataBuffer {
+ return _dataBuffer.get();
+}
+
+@end
+
+@implementation RTCDataChannel {
+ rtc::scoped_refptr<webrtc::DataChannelInterface> _dataChannel;
+ rtc::scoped_ptr<webrtc::RTCDataChannelObserver> _observer;
+ BOOL _isObserverRegistered;
+}
+
+- (void)dealloc {
+ // Handles unregistering the observer properly. We need to do this because
+ // there may still be other references to the underlying data channel.
+ self.delegate = nil;
+}
+
+- (NSString*)label {
+ return NSStringFromStdString(_dataChannel->label());
+}
+
+- (BOOL)isReliable {
+ return _dataChannel->reliable();
+}
+
+- (BOOL)isOrdered {
+ return _dataChannel->ordered();
+}
+
+- (NSUInteger)maxRetransmitTimeMs {
+ return _dataChannel->maxRetransmitTime();
+}
+
+- (NSUInteger)maxRetransmits {
+ return _dataChannel->maxRetransmits();
+}
+
+- (NSString*)protocol {
+ return NSStringFromStdString(_dataChannel->protocol());
+}
+
+- (BOOL)isNegotiated {
+ return _dataChannel->negotiated();
+}
+
+- (NSInteger)streamId {
+ return _dataChannel->id();
+}
+
+- (RTCDataChannelState)state {
+ switch (_dataChannel->state()) {
+ case webrtc::DataChannelInterface::DataState::kConnecting:
+ return kRTCDataChannelStateConnecting;
+ case webrtc::DataChannelInterface::DataState::kOpen:
+ return kRTCDataChannelStateOpen;
+ case webrtc::DataChannelInterface::DataState::kClosing:
+ return kRTCDataChannelStateClosing;
+ case webrtc::DataChannelInterface::DataState::kClosed:
+ return kRTCDataChannelStateClosed;
+ }
+}
+
+- (NSUInteger)bufferedAmount {
+ return _dataChannel->buffered_amount();
+}
+
+- (void)setDelegate:(id<RTCDataChannelDelegate>)delegate {
+ if (_delegate == delegate) {
+ return;
+ }
+ if (_isObserverRegistered) {
+ _dataChannel->UnregisterObserver();
+ _isObserverRegistered = NO;
+ }
+ _delegate = delegate;
+ if (_delegate) {
+ _dataChannel->RegisterObserver(_observer.get());
+ _isObserverRegistered = YES;
+ }
+}
+
+- (void)close {
+ _dataChannel->Close();
+}
+
+- (BOOL)sendData:(RTCDataBuffer*)data {
+ return _dataChannel->Send(*data.dataBuffer);
+}
+
+@end
+
+@implementation RTCDataChannel (Internal)
+
+- (instancetype)initWithDataChannel:
+ (rtc::scoped_refptr<webrtc::DataChannelInterface>)
+ dataChannel {
+ NSAssert(dataChannel != NULL, @"dataChannel cannot be NULL");
+ if (self = [super init]) {
+ _dataChannel = dataChannel;
+ _observer.reset(new webrtc::RTCDataChannelObserver(self));
+ }
+ return self;
+}
+
+- (rtc::scoped_refptr<webrtc::DataChannelInterface>)dataChannel {
+ return _dataChannel;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCEAGLVideoView.m b/talk/app/webrtc/objc/RTCEAGLVideoView.m
new file mode 100644
index 0000000000..d19462c9d9
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCEAGLVideoView.m
@@ -0,0 +1,277 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCEAGLVideoView.h"
+
+#import <GLKit/GLKit.h>
+
+#import "RTCI420Frame.h"
+#import "RTCOpenGLVideoRenderer.h"
+
+// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
+// refreshes, which should be 30fps. We wrap the display link in order to avoid
+// a retain cycle since CADisplayLink takes a strong reference onto its target.
+// The timer is paused by default.
+@interface RTCDisplayLinkTimer : NSObject
+
+@property(nonatomic) BOOL isPaused;
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler;
+- (void)invalidate;
+
+@end
+
+@implementation RTCDisplayLinkTimer {
+ CADisplayLink* _displayLink;
+ void (^_timerHandler)(void);
+}
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
+ NSParameterAssert(timerHandler);
+ if (self = [super init]) {
+ _timerHandler = timerHandler;
+ _displayLink =
+ [CADisplayLink displayLinkWithTarget:self
+ selector:@selector(displayLinkDidFire:)];
+ _displayLink.paused = YES;
+ // Set to half of screen refresh, which should be 30fps.
+ [_displayLink setFrameInterval:2];
+ [_displayLink addToRunLoop:[NSRunLoop currentRunLoop]
+ forMode:NSRunLoopCommonModes];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self invalidate];
+}
+
+- (BOOL)isPaused {
+ return _displayLink.paused;
+}
+
+- (void)setIsPaused:(BOOL)isPaused {
+ _displayLink.paused = isPaused;
+}
+
+- (void)invalidate {
+ [_displayLink invalidate];
+}
+
+- (void)displayLinkDidFire:(CADisplayLink*)displayLink {
+ _timerHandler();
+}
+
+@end
+
+// RTCEAGLVideoView wraps a GLKView which is setup with
+// enableSetNeedsDisplay = NO for the purpose of gaining control of
+// exactly when to call -[GLKView display]. This need for extra
+// control is required to avoid triggering method calls on GLKView
+// that results in attempting to bind the underlying render buffer
+// when the drawable size would be empty which would result in the
+// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
+// the method that will trigger the binding of the render
+// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
+// is disabled for the reasons above, the RTCEAGLVideoView maintains
+// its own |isDirty| flag.
+
+@interface RTCEAGLVideoView () <GLKViewDelegate>
+// |i420Frame| is set when we receive a frame from a worker thread and is read
+// from the display link callback so atomicity is required.
+@property(atomic, strong) RTCI420Frame* i420Frame;
+@property(nonatomic, readonly) GLKView* glkView;
+@property(nonatomic, readonly) RTCOpenGLVideoRenderer* glRenderer;
+@end
+
+@implementation RTCEAGLVideoView {
+ RTCDisplayLinkTimer* _timer;
+ GLKView* _glkView;
+ RTCOpenGLVideoRenderer* _glRenderer;
+ // This flag should only be set and read on the main thread (e.g. by
+ // setNeedsDisplay)
+ BOOL _isDirty;
+}
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ [self configure];
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder {
+ if (self = [super initWithCoder:aDecoder]) {
+ [self configure];
+ }
+ return self;
+}
+
+- (void)configure {
+ EAGLContext* glContext =
+ [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
+ if (!glContext) {
+ glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+ }
+ _glRenderer = [[RTCOpenGLVideoRenderer alloc] initWithContext:glContext];
+
+ // GLKView manages a framebuffer for us.
+ _glkView = [[GLKView alloc] initWithFrame:CGRectZero
+ context:glContext];
+ _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
+ _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
+ _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
+ _glkView.drawableMultisample = GLKViewDrawableMultisampleNone;
+ _glkView.delegate = self;
+ _glkView.layer.masksToBounds = YES;
+ _glkView.enableSetNeedsDisplay = NO;
+ [self addSubview:_glkView];
+
+ // Listen to application state in order to clean up OpenGL before app goes
+ // away.
+ NSNotificationCenter* notificationCenter =
+ [NSNotificationCenter defaultCenter];
+ [notificationCenter addObserver:self
+ selector:@selector(willResignActive)
+ name:UIApplicationWillResignActiveNotification
+ object:nil];
+ [notificationCenter addObserver:self
+ selector:@selector(didBecomeActive)
+ name:UIApplicationDidBecomeActiveNotification
+ object:nil];
+
+ // Frames are received on a separate thread, so we poll for current frame
+ // using a refresh rate proportional to screen refresh frequency. This
+ // occurs on the main thread.
+ __weak RTCEAGLVideoView* weakSelf = self;
+ _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
+ RTCEAGLVideoView* strongSelf = weakSelf;
+ [strongSelf displayLinkTimerDidFire];
+ }];
+ [self setupGL];
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+ UIApplicationState appState =
+ [UIApplication sharedApplication].applicationState;
+ if (appState == UIApplicationStateActive) {
+ [self teardownGL];
+ }
+ [_timer invalidate];
+}
+
+#pragma mark - UIView
+
+- (void)setNeedsDisplay {
+ [super setNeedsDisplay];
+ _isDirty = YES;
+}
+
+- (void)setNeedsDisplayInRect:(CGRect)rect {
+ [super setNeedsDisplayInRect:rect];
+ _isDirty = YES;
+}
+
+- (void)layoutSubviews {
+ [super layoutSubviews];
+ _glkView.frame = self.bounds;
+}
+
+#pragma mark - GLKViewDelegate
+
+// This method is called when the GLKView's content is dirty and needs to be
+// redrawn. This occurs on main thread.
+- (void)glkView:(GLKView*)view drawInRect:(CGRect)rect {
+ // The renderer will draw the frame to the framebuffer corresponding to the
+ // one used by |view|.
+ [_glRenderer drawFrame:self.i420Frame];
+}
+
+#pragma mark - RTCVideoRenderer
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+ __weak RTCEAGLVideoView* weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ RTCEAGLVideoView* strongSelf = weakSelf;
+ [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(RTCI420Frame*)frame {
+ self.i420Frame = frame;
+}
+
+#pragma mark - Private
+
+- (void)displayLinkTimerDidFire {
+ // Don't render unless video frame have changed or the view content
+ // has explicitly been marked dirty.
+ if (!_isDirty && _glRenderer.lastDrawnFrame == self.i420Frame) {
+ return;
+ }
+
+ // Always reset isDirty at this point, even if -[GLKView display]
+ // won't be called in the case the drawable size is empty.
+ _isDirty = NO;
+
+ // Only call -[GLKView display] if the drawable size is
+ // non-empty. Calling display will make the GLKView setup its
+ // render buffer if necessary, but that will fail with error
+ // GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT if size is empty.
+ if (self.bounds.size.width > 0 && self.bounds.size.height > 0) {
+ [_glkView display];
+ }
+}
+
+- (void)setupGL {
+ self.i420Frame = nil;
+ [_glRenderer setupGL];
+ _timer.isPaused = NO;
+}
+
+- (void)teardownGL {
+ self.i420Frame = nil;
+ _timer.isPaused = YES;
+ [_glkView deleteDrawable];
+ [_glRenderer teardownGL];
+}
+
+- (void)didBecomeActive {
+ [self setupGL];
+}
+
+- (void)willResignActive {
+ [self teardownGL];
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCEnumConverter.h b/talk/app/webrtc/objc/RTCEnumConverter.h
new file mode 100644
index 0000000000..eb06c18645
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCEnumConverter.h
@@ -0,0 +1,83 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(tkchin): remove this in favor of having objc headers mirror their C++ counterparts.
+// TODO(tkchin): see if we can move C++ enums into their own file so we can avoid all this
+// conversion code.
+#import "RTCTypes.h"
+
+#import "talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h"
+
+@interface RTCEnumConverter : NSObject
+
+// TODO(tkchin): rename these.
++ (RTCICEConnectionState)convertIceConnectionStateToObjC:
+ (webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
+
++ (RTCICEGatheringState)convertIceGatheringStateToObjC:
+ (webrtc::PeerConnectionInterface::IceGatheringState)nativeState;
+
++ (RTCSignalingState)convertSignalingStateToObjC:
+ (webrtc::PeerConnectionInterface::SignalingState)nativeState;
+
++ (webrtc::PeerConnectionInterface::StatsOutputLevel)
+ convertStatsOutputLevelToNative:(RTCStatsOutputLevel)statsOutputLevel;
+
++ (RTCSourceState)convertSourceStateToObjC:
+ (webrtc::MediaSourceInterface::SourceState)nativeState;
+
++ (webrtc::MediaStreamTrackInterface::TrackState)convertTrackStateToNative:
+ (RTCTrackState)state;
+
++ (RTCTrackState)convertTrackStateToObjC:
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeState;
+
++ (RTCIceTransportsType)iceTransportsTypeForNativeEnum:
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeEnum;
+
++ (webrtc::PeerConnectionInterface::IceTransportsType)nativeEnumForIceTransportsType:
+ (RTCIceTransportsType)iceTransportsType;
+
++ (RTCBundlePolicy)bundlePolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeEnum;
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeEnumForBundlePolicy:
+ (RTCBundlePolicy)bundlePolicy;
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeEnum;
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeEnumForRtcpMuxPolicy:
+ (RTCRtcpMuxPolicy)rtcpMuxPolicy;
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeEnum;
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeEnumForTcpCandidatePolicy:
+ (RTCTcpCandidatePolicy)tcpCandidatePolicy;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCEnumConverter.mm b/talk/app/webrtc/objc/RTCEnumConverter.mm
new file mode 100644
index 0000000000..fa4608a560
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCEnumConverter.mm
@@ -0,0 +1,231 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCEnumConverter.h"
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+@implementation RTCEnumConverter
+
++ (RTCICEConnectionState)convertIceConnectionStateToObjC:
+ (webrtc::PeerConnectionInterface::IceConnectionState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::kIceConnectionNew:
+ return RTCICEConnectionNew;
+ case webrtc::PeerConnectionInterface::kIceConnectionChecking:
+ return RTCICEConnectionChecking;
+ case webrtc::PeerConnectionInterface::kIceConnectionConnected:
+ return RTCICEConnectionConnected;
+ case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
+ return RTCICEConnectionCompleted;
+ case webrtc::PeerConnectionInterface::kIceConnectionFailed:
+ return RTCICEConnectionFailed;
+ case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
+ return RTCICEConnectionDisconnected;
+ case webrtc::PeerConnectionInterface::kIceConnectionClosed:
+ return RTCICEConnectionClosed;
+ case webrtc::PeerConnectionInterface::kIceConnectionMax:
+ NSAssert(NO, @"kIceConnectionMax not allowed");
+ return RTCICEConnectionMax;
+ }
+}
+
++ (RTCICEGatheringState)convertIceGatheringStateToObjC:
+ (webrtc::PeerConnectionInterface::IceGatheringState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::kIceGatheringNew:
+ return RTCICEGatheringNew;
+ case webrtc::PeerConnectionInterface::kIceGatheringGathering:
+ return RTCICEGatheringGathering;
+ case webrtc::PeerConnectionInterface::kIceGatheringComplete:
+ return RTCICEGatheringComplete;
+ }
+}
+
++ (RTCSignalingState)convertSignalingStateToObjC:
+ (webrtc::PeerConnectionInterface::SignalingState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::kStable:
+ return RTCSignalingStable;
+ case webrtc::PeerConnectionInterface::kHaveLocalOffer:
+ return RTCSignalingHaveLocalOffer;
+ case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer:
+ return RTCSignalingHaveLocalPrAnswer;
+ case webrtc::PeerConnectionInterface::kHaveRemoteOffer:
+ return RTCSignalingHaveRemoteOffer;
+ case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer:
+ return RTCSignalingHaveRemotePrAnswer;
+ case webrtc::PeerConnectionInterface::kClosed:
+ return RTCSignalingClosed;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::StatsOutputLevel)
+ convertStatsOutputLevelToNative:(RTCStatsOutputLevel)statsOutputLevel {
+ switch (statsOutputLevel) {
+ case RTCStatsOutputLevelStandard:
+ return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard;
+ case RTCStatsOutputLevelDebug:
+ return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug;
+ }
+}
+
++ (RTCSourceState)convertSourceStateToObjC:
+ (webrtc::MediaSourceInterface::SourceState)nativeState {
+ switch (nativeState) {
+ case webrtc::MediaSourceInterface::kInitializing:
+ return RTCSourceStateInitializing;
+ case webrtc::MediaSourceInterface::kLive:
+ return RTCSourceStateLive;
+ case webrtc::MediaSourceInterface::kEnded:
+ return RTCSourceStateEnded;
+ case webrtc::MediaSourceInterface::kMuted:
+ return RTCSourceStateMuted;
+ }
+}
+
++ (webrtc::MediaStreamTrackInterface::TrackState)
+ convertTrackStateToNative:(RTCTrackState)state {
+ switch (state) {
+ case RTCTrackStateInitializing:
+ return webrtc::MediaStreamTrackInterface::kInitializing;
+ case RTCTrackStateLive:
+ return webrtc::MediaStreamTrackInterface::kLive;
+ case RTCTrackStateEnded:
+ return webrtc::MediaStreamTrackInterface::kEnded;
+ case RTCTrackStateFailed:
+ return webrtc::MediaStreamTrackInterface::kFailed;
+ }
+}
+
++ (RTCTrackState)convertTrackStateToObjC:
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeState {
+ switch (nativeState) {
+ case webrtc::MediaStreamTrackInterface::kInitializing:
+ return RTCTrackStateInitializing;
+ case webrtc::MediaStreamTrackInterface::kLive:
+ return RTCTrackStateLive;
+ case webrtc::MediaStreamTrackInterface::kEnded:
+ return RTCTrackStateEnded;
+ case webrtc::MediaStreamTrackInterface::kFailed:
+ return RTCTrackStateFailed;
+ }
+}
+
++ (RTCIceTransportsType)iceTransportsTypeForNativeEnum:
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeEnum {
+ switch (nativeEnum) {
+ case webrtc::PeerConnectionInterface::kNone:
+ return kRTCIceTransportsTypeNone;
+ case webrtc::PeerConnectionInterface::kRelay:
+ return kRTCIceTransportsTypeRelay;
+ case webrtc::PeerConnectionInterface::kNoHost:
+ return kRTCIceTransportsTypeNoHost;
+ case webrtc::PeerConnectionInterface::kAll:
+ return kRTCIceTransportsTypeAll;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::IceTransportsType)nativeEnumForIceTransportsType:
+ (RTCIceTransportsType)iceTransportsType {
+ switch (iceTransportsType) {
+ case kRTCIceTransportsTypeNone:
+ return webrtc::PeerConnectionInterface::kNone;
+ case kRTCIceTransportsTypeRelay:
+ return webrtc::PeerConnectionInterface::kRelay;
+ case kRTCIceTransportsTypeNoHost:
+ return webrtc::PeerConnectionInterface::kNoHost;
+ case kRTCIceTransportsTypeAll:
+ return webrtc::PeerConnectionInterface::kAll;
+ }
+}
+
++ (RTCBundlePolicy)bundlePolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeEnum {
+ switch (nativeEnum) {
+ case webrtc::PeerConnectionInterface::kBundlePolicyBalanced:
+ return kRTCBundlePolicyBalanced;
+ case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle:
+ return kRTCBundlePolicyMaxBundle;
+ case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat:
+ return kRTCBundlePolicyMaxCompat;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeEnumForBundlePolicy:
+ (RTCBundlePolicy)bundlePolicy {
+ switch (bundlePolicy) {
+ case kRTCBundlePolicyBalanced:
+ return webrtc::PeerConnectionInterface::kBundlePolicyBalanced;
+ case kRTCBundlePolicyMaxBundle:
+ return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle;
+ case kRTCBundlePolicyMaxCompat:
+ return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat;
+ }
+}
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeEnum {
+ switch (nativeEnum) {
+ case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate:
+ return kRTCRtcpMuxPolicyNegotiate;
+ case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire:
+ return kRTCRtcpMuxPolicyRequire;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeEnumForRtcpMuxPolicy:
+ (RTCRtcpMuxPolicy)rtcpMuxPolicy {
+ switch (rtcpMuxPolicy) {
+ case kRTCRtcpMuxPolicyNegotiate:
+ return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+ case kRTCRtcpMuxPolicyRequire:
+ return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ }
+}
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeEnum {
+ switch (nativeEnum) {
+ case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled:
+ return kRTCTcpCandidatePolicyEnabled;
+ case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled:
+ return kRTCTcpCandidatePolicyDisabled;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeEnumForTcpCandidatePolicy:
+ (RTCTcpCandidatePolicy)tcpCandidatePolicy {
+ switch (tcpCandidatePolicy) {
+ case kRTCTcpCandidatePolicyEnabled:
+ return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+ case kRTCTcpCandidatePolicyDisabled:
+ return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+ }
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCFileLogger.mm b/talk/app/webrtc/objc/RTCFileLogger.mm
new file mode 100644
index 0000000000..c4e469655d
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCFileLogger.mm
@@ -0,0 +1,157 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCFileLogger.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/filerotatingstream.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/logsinks.h"
+#include "webrtc/base/scoped_ptr.h"
+
+NSString *const kDefaultLogDirName = @"webrtc_logs";
+NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
+
+@implementation RTCFileLogger {
+ BOOL _hasStarted;
+ NSString *_dirPath;
+ NSUInteger _maxFileSize;
+ rtc::scoped_ptr<rtc::CallSessionFileRotatingLogSink> _logSink;
+}
+
+@synthesize severity = _severity;
+
+- (instancetype)init {
+ NSArray *paths = NSSearchPathForDirectoriesInDomains(
+ NSDocumentDirectory, NSUserDomainMask, YES);
+ NSString *documentsDirPath = [paths firstObject];
+ NSString *defaultDirPath =
+ [documentsDirPath stringByAppendingPathComponent:kDefaultLogDirName];
+ return [self initWithDirPath:defaultDirPath
+ maxFileSize:kDefaultMaxFileSize];
+}
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize {
+ NSParameterAssert(dirPath.length);
+ NSParameterAssert(maxFileSize);
+ if (self = [super init]) {
+ BOOL isDir = NO;
+ NSFileManager *fileManager = [NSFileManager defaultManager];
+ if ([fileManager fileExistsAtPath:dirPath isDirectory:&isDir]) {
+ if (!isDir) {
+ // Bail if something already exists there.
+ return nil;
+ }
+ } else {
+ if (![fileManager createDirectoryAtPath:dirPath
+ withIntermediateDirectories:NO
+ attributes:nil
+ error:nil]) {
+ // Bail if we failed to create a directory.
+ return nil;
+ }
+ }
+ _dirPath = dirPath;
+ _maxFileSize = maxFileSize;
+ _severity = kRTCFileLoggerSeverityInfo;
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self stop];
+}
+
+- (void)start {
+ if (_hasStarted) {
+ return;
+ }
+ _logSink.reset(new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String,
+ _maxFileSize));
+ if (!_logSink->Init()) {
+ LOG(LS_ERROR) << "Failed to open log files at path: "
+ << _dirPath.UTF8String;
+ _logSink.reset();
+ return;
+ }
+ rtc::LogMessage::LogThreads(true);
+ rtc::LogMessage::LogTimestamps(true);
+ rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
+ _hasStarted = YES;
+}
+
+- (void)stop {
+ if (!_hasStarted) {
+ return;
+ }
+ RTC_DCHECK(_logSink);
+ rtc::LogMessage::RemoveLogToStream(_logSink.get());
+ _hasStarted = NO;
+ _logSink.reset();
+}
+
+- (NSData *)logData {
+ if (_hasStarted) {
+ return nil;
+ }
+ NSMutableData* logData = [NSMutableData data];
+ rtc::scoped_ptr<rtc::CallSessionFileRotatingStream> stream(
+ new rtc::CallSessionFileRotatingStream(_dirPath.UTF8String));
+ if (!stream->Open()) {
+ return logData;
+ }
+ size_t bufferSize = 0;
+ if (!stream->GetSize(&bufferSize) || bufferSize == 0) {
+ return logData;
+ }
+ size_t read = 0;
+ // Allocate memory using malloc so we can pass it direcly to NSData without
+ // copying.
+ rtc::scoped_ptr<uint8_t[]> buffer(static_cast<uint8_t*>(malloc(bufferSize)));
+ stream->ReadAll(buffer.get(), bufferSize, &read, nullptr);
+ logData = [[NSMutableData alloc] initWithBytesNoCopy:buffer.release()
+ length:read];
+ return logData;
+}
+
+#pragma mark - Private
+
+- (rtc::LoggingSeverity)rtcSeverity {
+ switch (_severity) {
+ case kRTCFileLoggerSeverityVerbose:
+ return rtc::LS_VERBOSE;
+ case kRTCFileLoggerSeverityInfo:
+ return rtc::LS_INFO;
+ case kRTCFileLoggerSeverityWarning:
+ return rtc::LS_WARNING;
+ case kRTCFileLoggerSeverityError:
+ return rtc::LS_ERROR;
+ }
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCI420Frame+Internal.h b/talk/app/webrtc/objc/RTCI420Frame+Internal.h
new file mode 100644
index 0000000000..b38df2087f
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCI420Frame+Internal.h
@@ -0,0 +1,36 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCI420Frame.h"
+
+#include "talk/media/base/videoframe.h"
+
+@interface RTCI420Frame (Internal)
+
+- (instancetype)initWithVideoFrame:(const cricket::VideoFrame*)videoFrame;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCI420Frame.mm b/talk/app/webrtc/objc/RTCI420Frame.mm
new file mode 100644
index 0000000000..c1a9cc0d45
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCI420Frame.mm
@@ -0,0 +1,101 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCI420Frame.h"
+
+#include "talk/media/base/videoframe.h"
+#include "webrtc/base/scoped_ptr.h"
+
+@implementation RTCI420Frame {
+ rtc::scoped_ptr<cricket::VideoFrame> _videoFrame;
+}
+
+- (NSUInteger)width {
+ return _videoFrame->GetWidth();
+}
+
+- (NSUInteger)height {
+ return _videoFrame->GetHeight();
+}
+
+- (NSUInteger)chromaWidth {
+ return _videoFrame->GetChromaWidth();
+}
+
+- (NSUInteger)chromaHeight {
+ return _videoFrame->GetChromaHeight();
+}
+
+- (NSUInteger)chromaSize {
+ return _videoFrame->GetChromaSize();
+}
+
+- (const uint8_t*)yPlane {
+ const cricket::VideoFrame* const_frame = _videoFrame.get();
+ return const_frame->GetYPlane();
+}
+
+- (const uint8_t*)uPlane {
+ const cricket::VideoFrame* const_frame = _videoFrame.get();
+ return const_frame->GetUPlane();
+}
+
+- (const uint8_t*)vPlane {
+ const cricket::VideoFrame* const_frame = _videoFrame.get();
+ return const_frame->GetVPlane();
+}
+
+- (NSInteger)yPitch {
+ return _videoFrame->GetYPitch();
+}
+
+- (NSInteger)uPitch {
+ return _videoFrame->GetUPitch();
+}
+
+- (NSInteger)vPitch {
+ return _videoFrame->GetVPitch();
+}
+
+- (BOOL)makeExclusive {
+ return _videoFrame->MakeExclusive();
+}
+
+@end
+
+@implementation RTCI420Frame (Internal)
+
+- (instancetype)initWithVideoFrame:(cricket::VideoFrame*)videoFrame {
+ if (self = [super init]) {
+ // Keep a shallow copy of the video frame. The underlying frame buffer is
+ // not copied.
+ _videoFrame.reset(videoFrame->Copy());
+ }
+ return self;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCICECandidate+Internal.h b/talk/app/webrtc/objc/RTCICECandidate+Internal.h
new file mode 100644
index 0000000000..7c35ceab22
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCICECandidate+Internal.h
@@ -0,0 +1,39 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCICECandidate.h"
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+@interface RTCICECandidate (Internal)
+
+@property(nonatomic, assign, readonly) const
+ webrtc::IceCandidateInterface* candidate;
+
+- (id)initWithCandidate:(const webrtc::IceCandidateInterface*)candidate;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCICECandidate.mm b/talk/app/webrtc/objc/RTCICECandidate.mm
new file mode 100644
index 0000000000..1510946bac
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCICECandidate.mm
@@ -0,0 +1,87 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCICECandidate+Internal.h"
+
+@implementation RTCICECandidate
+
+@synthesize sdpMid = _sdpMid;
+@synthesize sdpMLineIndex = _sdpMLineIndex;
+@synthesize sdp = _sdp;
+
+- (id)initWithMid:(NSString*)sdpMid
+ index:(NSInteger)sdpMLineIndex
+ sdp:(NSString*)sdp {
+ if (!sdpMid || !sdp) {
+ NSAssert(NO, @"nil arguments not allowed");
+ return nil;
+ }
+ if ((self = [super init])) {
+ _sdpMid = [sdpMid copy];
+ _sdpMLineIndex = sdpMLineIndex;
+ _sdp = [sdp copy];
+ }
+ return self;
+}
+
+- (NSString*)description {
+ return [NSString stringWithFormat:@"%@:%ld:%@",
+ self.sdpMid,
+ (long)self.sdpMLineIndex,
+ self.sdp];
+}
+
+@end
+
+@implementation RTCICECandidate (Internal)
+
+- (id)initWithCandidate:(const webrtc::IceCandidateInterface*)candidate {
+ if ((self = [super init])) {
+ std::string sdp;
+ if (candidate->ToString(&sdp)) {
+ _sdpMid = @(candidate->sdp_mid().c_str());
+ _sdpMLineIndex = candidate->sdp_mline_index();
+ _sdp = @(sdp.c_str());
+ } else {
+ self = nil;
+ NSAssert(NO, @"ICECandidateInterface->ToString failed");
+ }
+ }
+ return self;
+}
+
+- (const webrtc::IceCandidateInterface*)candidate {
+ return webrtc::CreateIceCandidate(
+ [self.sdpMid UTF8String], self.sdpMLineIndex, [self.sdp UTF8String],
+ nullptr);
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCICEServer+Internal.h b/talk/app/webrtc/objc/RTCICEServer+Internal.h
new file mode 100644
index 0000000000..1bbe864fe3
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCICEServer+Internal.h
@@ -0,0 +1,38 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCICEServer.h"
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+@interface RTCICEServer (Internal)
+
+@property(nonatomic,
+ assign,
+ readonly) webrtc::PeerConnectionInterface::IceServer iceServer;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCICEServer.mm b/talk/app/webrtc/objc/RTCICEServer.mm
new file mode 100644
index 0000000000..dc1eb2d090
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCICEServer.mm
@@ -0,0 +1,75 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCICEServer+Internal.h"
+
+@implementation RTCICEServer
+
+@synthesize URI = _URI;
+@synthesize username = _username;
+@synthesize password = _password;
+
+- (id)initWithURI:(NSURL*)URI
+ username:(NSString*)username
+ password:(NSString*)password {
+ if (!URI || !username || !password) {
+ NSAssert(NO, @"nil arguments not allowed");
+ self = nil;
+ return nil;
+ }
+ if ((self = [super init])) {
+ _URI = URI;
+ _username = [username copy];
+ _password = [password copy];
+ }
+ return self;
+}
+
+- (NSString*)description {
+ return [NSString stringWithFormat:@"RTCICEServer: [%@:%@:%@]",
+ [self.URI absoluteString],
+ self.username,
+ self.password];
+}
+
+@end
+
+@implementation RTCICEServer (Internal)
+
+- (webrtc::PeerConnectionInterface::IceServer)iceServer {
+ webrtc::PeerConnectionInterface::IceServer iceServer;
+ iceServer.uri = [[self.URI absoluteString] UTF8String];
+ iceServer.username = [self.username UTF8String];
+ iceServer.password = [self.password UTF8String];
+ return iceServer;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCLogging.mm b/talk/app/webrtc/objc/RTCLogging.mm
new file mode 100644
index 0000000000..fab358422c
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCLogging.mm
@@ -0,0 +1,64 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCLogging.h"
+
+#include "webrtc/base/logging.h"
+
+rtc::LoggingSeverity RTCGetNativeLoggingSeverity(RTCLoggingSeverity severity) {
+ switch (severity) {
+ case kRTCLoggingSeverityVerbose:
+ return rtc::LS_VERBOSE;
+ case kRTCLoggingSeverityInfo:
+ return rtc::LS_INFO;
+ case kRTCLoggingSeverityWarning:
+ return rtc::LS_WARNING;
+ case kRTCLoggingSeverityError:
+ return rtc::LS_ERROR;
+ }
+}
+
+void RTCLogEx(RTCLoggingSeverity severity, NSString* logString) {
+ if (logString.length) {
+ const char* utf8String = logString.UTF8String;
+ LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8String;
+ }
+}
+
+void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) {
+ rtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity));
+}
+
+NSString* RTCFileName(const char* filePath) {
+ NSString* nsFilePath =
+ [[NSString alloc] initWithBytesNoCopy:const_cast<char*>(filePath)
+ length:strlen(filePath)
+ encoding:NSUTF8StringEncoding
+ freeWhenDone:NO];
+ return nsFilePath.lastPathComponent;
+}
+
diff --git a/talk/app/webrtc/objc/RTCMediaConstraints+Internal.h b/talk/app/webrtc/objc/RTCMediaConstraints+Internal.h
new file mode 100644
index 0000000000..ac52a8fd3d
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaConstraints+Internal.h
@@ -0,0 +1,40 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCMediaConstraints.h"
+
+#import "RTCMediaConstraintsNative.h"
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+
+@interface RTCMediaConstraints (Internal)
+
+// Ownership is retained for the lifetime of this object.
+@property(nonatomic, assign, readonly) const
+ webrtc::RTCMediaConstraintsNative *constraints;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCMediaConstraints.mm b/talk/app/webrtc/objc/RTCMediaConstraints.mm
new file mode 100644
index 0000000000..f5cfe33619
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaConstraints.mm
@@ -0,0 +1,76 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCMediaConstraints+Internal.h"
+
+#import "RTCPair.h"
+
+#include "webrtc/base/scoped_ptr.h"
+
+// TODO(hughv): Add accessors for mandatory and optional constraints.
+// TODO(hughv): Add description.
+
+@implementation RTCMediaConstraints {
+ rtc::scoped_ptr<webrtc::RTCMediaConstraintsNative> _constraints;
+ webrtc::MediaConstraintsInterface::Constraints _mandatory;
+ webrtc::MediaConstraintsInterface::Constraints _optional;
+}
+
+- (id)initWithMandatoryConstraints:(NSArray*)mandatory
+ optionalConstraints:(NSArray*)optional {
+ if ((self = [super init])) {
+ _mandatory = [[self class] constraintsFromArray:mandatory];
+ _optional = [[self class] constraintsFromArray:optional];
+ _constraints.reset(
+ new webrtc::RTCMediaConstraintsNative(_mandatory, _optional));
+ }
+ return self;
+}
+
++ (webrtc::MediaConstraintsInterface::Constraints)constraintsFromArray:
+ (NSArray*)array {
+ webrtc::MediaConstraintsInterface::Constraints constraints;
+ for (RTCPair* pair in array) {
+ constraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
+ [pair.key UTF8String], [pair.value UTF8String]));
+ }
+ return constraints;
+}
+
+@end
+
+@implementation RTCMediaConstraints (internal)
+
+- (const webrtc::RTCMediaConstraintsNative*)constraints {
+ return _constraints.get();
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCMediaConstraintsNative.cc b/talk/app/webrtc/objc/RTCMediaConstraintsNative.cc
new file mode 100644
index 0000000000..e502639521
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaConstraintsNative.cc
@@ -0,0 +1,51 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/objc/RTCMediaConstraintsNative.h"
+
+namespace webrtc {
+
+RTCMediaConstraintsNative::~RTCMediaConstraintsNative() {}
+
+RTCMediaConstraintsNative::RTCMediaConstraintsNative() {}
+
+RTCMediaConstraintsNative::RTCMediaConstraintsNative(
+ const MediaConstraintsInterface::Constraints& mandatory,
+ const MediaConstraintsInterface::Constraints& optional)
+ : mandatory_(mandatory), optional_(optional) {}
+
+const MediaConstraintsInterface::Constraints&
+RTCMediaConstraintsNative::GetMandatory() const {
+ return mandatory_;
+}
+
+const MediaConstraintsInterface::Constraints&
+RTCMediaConstraintsNative::GetOptional() const {
+ return optional_;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/objc/RTCMediaConstraintsNative.h b/talk/app/webrtc/objc/RTCMediaConstraintsNative.h
new file mode 100644
index 0000000000..558f2ec5c9
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaConstraintsNative.h
@@ -0,0 +1,50 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_OBJC_RTCMEDIACONSTRAINTSNATIVE_H_
+#define TALK_APP_WEBRTC_OBJC_RTCMEDIACONSTRAINTSNATIVE_H_
+
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+
+namespace webrtc {
+class RTCMediaConstraintsNative : public MediaConstraintsInterface {
+ public:
+ virtual ~RTCMediaConstraintsNative();
+ RTCMediaConstraintsNative();
+ RTCMediaConstraintsNative(
+ const MediaConstraintsInterface::Constraints& mandatory,
+ const MediaConstraintsInterface::Constraints& optional);
+ virtual const Constraints& GetMandatory() const;
+ virtual const Constraints& GetOptional() const;
+
+ private:
+ MediaConstraintsInterface::Constraints mandatory_;
+ MediaConstraintsInterface::Constraints optional_;
+};
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_OBJC_RTCMEDIACONSTRAINTSNATIVE_H_
diff --git a/talk/app/webrtc/objc/RTCMediaSource+Internal.h b/talk/app/webrtc/objc/RTCMediaSource+Internal.h
new file mode 100644
index 0000000000..f60dc610c6
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaSource+Internal.h
@@ -0,0 +1,40 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCMediaSource.h"
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+
+@interface RTCMediaSource (Internal)
+
+@property(nonatomic, assign, readonly)
+ rtc::scoped_refptr<webrtc::MediaSourceInterface> mediaSource;
+
+- (id)initWithMediaSource:
+ (rtc::scoped_refptr<webrtc::MediaSourceInterface>)mediaSource;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCMediaSource.mm b/talk/app/webrtc/objc/RTCMediaSource.mm
new file mode 100644
index 0000000000..8d2bb1f417
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaSource.mm
@@ -0,0 +1,65 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCMediaSource+Internal.h"
+
+#import "RTCEnumConverter.h"
+
+@implementation RTCMediaSource {
+ rtc::scoped_refptr<webrtc::MediaSourceInterface> _mediaSource;
+}
+
+- (RTCSourceState)state {
+ return [RTCEnumConverter convertSourceStateToObjC:self.mediaSource->state()];
+}
+
+@end
+
+@implementation RTCMediaSource (Internal)
+
+- (id)initWithMediaSource:
+ (rtc::scoped_refptr<webrtc::MediaSourceInterface>)mediaSource {
+ if (!mediaSource) {
+ NSAssert(NO, @"nil arguments not allowed");
+ self = nil;
+ return nil;
+ }
+ if ((self = [super init])) {
+ _mediaSource = mediaSource;
+ }
+ return self;
+}
+
+- (rtc::scoped_refptr<webrtc::MediaSourceInterface>)mediaSource {
+ return _mediaSource;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCMediaStream+Internal.h b/talk/app/webrtc/objc/RTCMediaStream+Internal.h
new file mode 100644
index 0000000000..c5e2d7897d
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaStream+Internal.h
@@ -0,0 +1,40 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCMediaStream.h"
+
+#include "talk/app/webrtc/mediastreamtrack.h"
+
+@interface RTCMediaStream (Internal)
+
+@property(nonatomic, assign, readonly)
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> mediaStream;
+
+- (id)initWithMediaStream:
+ (rtc::scoped_refptr<webrtc::MediaStreamInterface>)mediaStream;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCMediaStream.mm b/talk/app/webrtc/objc/RTCMediaStream.mm
new file mode 100644
index 0000000000..87f838d7df
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaStream.mm
@@ -0,0 +1,146 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCMediaStream+Internal.h"
+
+#import "RTCAudioTrack+Internal.h"
+#import "RTCMediaStreamTrack+Internal.h"
+#import "RTCVideoTrack+Internal.h"
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+
+@implementation RTCMediaStream {
+ NSMutableArray* _audioTracks;
+ NSMutableArray* _videoTracks;
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> _mediaStream;
+}
+
+- (NSString*)description {
+ return [NSString stringWithFormat:@"[%@:A=%lu:V=%lu]",
+ [self label],
+ (unsigned long)[self.audioTracks count],
+ (unsigned long)[self.videoTracks count]];
+}
+
+- (NSArray*)audioTracks {
+ return [_audioTracks copy];
+}
+
+- (NSArray*)videoTracks {
+ return [_videoTracks copy];
+}
+
+- (NSString*)label {
+ return @(self.mediaStream->label().c_str());
+}
+
+- (BOOL)addAudioTrack:(RTCAudioTrack*)track {
+ if (self.mediaStream->AddTrack(track.audioTrack)) {
+ [_audioTracks addObject:track];
+ return YES;
+ }
+ return NO;
+}
+
+- (BOOL)addVideoTrack:(RTCVideoTrack*)track {
+ if (self.mediaStream->AddTrack(track.nativeVideoTrack)) {
+ [_videoTracks addObject:track];
+ return YES;
+ }
+ return NO;
+}
+
+- (BOOL)removeAudioTrack:(RTCAudioTrack*)track {
+ NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:track];
+ NSAssert(index != NSNotFound,
+ @"|removeAudioTrack| called on unexpected RTCAudioTrack");
+ if (index != NSNotFound && self.mediaStream->RemoveTrack(track.audioTrack)) {
+ [_audioTracks removeObjectAtIndex:index];
+ return YES;
+ }
+ return NO;
+}
+
+- (BOOL)removeVideoTrack:(RTCVideoTrack*)track {
+ NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:track];
+ NSAssert(index != NSNotFound,
+ @"|removeAudioTrack| called on unexpected RTCVideoTrack");
+ if (index != NSNotFound &&
+ self.mediaStream->RemoveTrack(track.nativeVideoTrack)) {
+ [_videoTracks removeObjectAtIndex:index];
+ return YES;
+ }
+ return NO;
+}
+
+@end
+
+@implementation RTCMediaStream (Internal)
+
+- (id)initWithMediaStream:
+ (rtc::scoped_refptr<webrtc::MediaStreamInterface>)mediaStream {
+ if (!mediaStream) {
+ NSAssert(NO, @"nil arguments not allowed");
+ self = nil;
+ return nil;
+ }
+ if ((self = [super init])) {
+ webrtc::AudioTrackVector audio_tracks = mediaStream->GetAudioTracks();
+ webrtc::VideoTrackVector video_tracks = mediaStream->GetVideoTracks();
+
+ _audioTracks = [NSMutableArray arrayWithCapacity:audio_tracks.size()];
+ _videoTracks = [NSMutableArray arrayWithCapacity:video_tracks.size()];
+ _mediaStream = mediaStream;
+
+ for (size_t i = 0; i < audio_tracks.size(); ++i) {
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
+ audio_tracks[i];
+ RTCAudioTrack* audioTrack =
+ [[RTCAudioTrack alloc] initWithMediaTrack:track];
+ [_audioTracks addObject:audioTrack];
+ }
+
+ for (size_t i = 0; i < video_tracks.size(); ++i) {
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
+ video_tracks[i];
+ RTCVideoTrack* videoTrack =
+ [[RTCVideoTrack alloc] initWithMediaTrack:track];
+ [_videoTracks addObject:videoTrack];
+ }
+ }
+ return self;
+}
+
+- (rtc::scoped_refptr<webrtc::MediaStreamInterface>)mediaStream {
+ return _mediaStream;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCMediaStreamTrack+Internal.h b/talk/app/webrtc/objc/RTCMediaStreamTrack+Internal.h
new file mode 100644
index 0000000000..e5383fe8e0
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaStreamTrack+Internal.h
@@ -0,0 +1,40 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCMediaStreamTrack.h"
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+
+@interface RTCMediaStreamTrack (Internal)
+
+@property(nonatomic, assign, readonly)
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> mediaTrack;
+
+- (id)initWithMediaTrack:
+ (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCMediaStreamTrack.mm b/talk/app/webrtc/objc/RTCMediaStreamTrack.mm
new file mode 100644
index 0000000000..c529f5a4b2
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCMediaStreamTrack.mm
@@ -0,0 +1,127 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCEnumConverter.h"
+#import "RTCMediaStreamTrack+Internal.h"
+
+namespace webrtc {
+
+class RTCMediaStreamTrackObserver : public ObserverInterface {
+ public:
+ RTCMediaStreamTrackObserver(RTCMediaStreamTrack* track) { _track = track; }
+
+ void OnChanged() override {
+ [_track.delegate mediaStreamTrackDidChange:_track];
+ }
+
+ private:
+ __weak RTCMediaStreamTrack* _track;
+};
+}
+
+@implementation RTCMediaStreamTrack {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _mediaTrack;
+ rtc::scoped_ptr<webrtc::RTCMediaStreamTrackObserver> _observer;
+}
+
+@synthesize label;
+
+- (BOOL)isEqual:(id)other {
+ // Equality is purely based on the label just like the C++ implementation.
+ if (self == other)
+ return YES;
+ if (![other isKindOfClass:[self class]] ||
+ ![self isKindOfClass:[other class]]) {
+ return NO;
+ }
+ RTCMediaStreamTrack* otherMediaStream = (RTCMediaStreamTrack*)other;
+ return [self.label isEqual:otherMediaStream.label];
+}
+
+- (NSUInteger)hash {
+ return [self.label hash];
+}
+
+- (NSString*)kind {
+ return @(self.mediaTrack->kind().c_str());
+}
+
+- (NSString*)label {
+ return @(self.mediaTrack->id().c_str());
+}
+
+- (BOOL)isEnabled {
+ return self.mediaTrack->enabled();
+}
+
+- (BOOL)setEnabled:(BOOL)enabled {
+ return self.mediaTrack->set_enabled(enabled);
+}
+
+- (RTCTrackState)state {
+ return [RTCEnumConverter convertTrackStateToObjC:self.mediaTrack->state()];
+}
+
+- (BOOL)setState:(RTCTrackState)state {
+ return self.mediaTrack->set_state(
+ [RTCEnumConverter convertTrackStateToNative:state]);
+}
+
+@end
+
+@implementation RTCMediaStreamTrack (Internal)
+
+- (id)initWithMediaTrack:
+ (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)
+ mediaTrack {
+ if (!mediaTrack) {
+ NSAssert(NO, @"nil arguments not allowed");
+ self = nil;
+ return nil;
+ }
+ if (self = [super init]) {
+ _mediaTrack = mediaTrack;
+ label = @(mediaTrack->id().c_str());
+ _observer.reset(new webrtc::RTCMediaStreamTrackObserver(self));
+ _mediaTrack->RegisterObserver(_observer.get());
+ }
+ return self;
+}
+
+- (void)dealloc {
+ _mediaTrack->UnregisterObserver(_observer.get());
+}
+
+- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
+ return _mediaTrack;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCNSGLVideoView.m b/talk/app/webrtc/objc/RTCNSGLVideoView.m
new file mode 100644
index 0000000000..34a866bd06
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCNSGLVideoView.m
@@ -0,0 +1,158 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCNSGLVideoView.h"
+
+#import <CoreVideo/CVDisplayLink.h>
+#import <OpenGL/gl3.h>
+#import "RTCI420Frame.h"
+#import "RTCOpenGLVideoRenderer.h"
+
+@interface RTCNSGLVideoView ()
+// |i420Frame| is set when we receive a frame from a worker thread and is read
+// from the display link callback so atomicity is required.
+@property(atomic, strong) RTCI420Frame* i420Frame;
+@property(atomic, strong) RTCOpenGLVideoRenderer* glRenderer;
+- (void)drawFrame;
+@end
+
+static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
+ const CVTimeStamp* now,
+ const CVTimeStamp* outputTime,
+ CVOptionFlags flagsIn,
+ CVOptionFlags* flagsOut,
+ void* displayLinkContext) {
+ RTCNSGLVideoView* view = (__bridge RTCNSGLVideoView*)displayLinkContext;
+ [view drawFrame];
+ return kCVReturnSuccess;
+}
+
+@implementation RTCNSGLVideoView {
+ CVDisplayLinkRef _displayLink;
+}
+
+- (void)dealloc {
+ [self teardownDisplayLink];
+}
+
+- (void)drawRect:(NSRect)rect {
+ [self drawFrame];
+}
+
+- (void)reshape {
+ [super reshape];
+ NSRect frame = [self frame];
+ CGLLockContext([[self openGLContext] CGLContextObj]);
+ glViewport(0, 0, frame.size.width, frame.size.height);
+ CGLUnlockContext([[self openGLContext] CGLContextObj]);
+}
+
+- (void)lockFocus {
+ NSOpenGLContext* context = [self openGLContext];
+ [super lockFocus];
+ if ([context view] != self) {
+ [context setView:self];
+ }
+ [context makeCurrentContext];
+}
+
+- (void)prepareOpenGL {
+ [super prepareOpenGL];
+ if (!self.glRenderer) {
+ self.glRenderer =
+ [[RTCOpenGLVideoRenderer alloc] initWithContext:[self openGLContext]];
+ }
+ [self.glRenderer setupGL];
+ [self setupDisplayLink];
+}
+
+- (void)clearGLContext {
+ [self.glRenderer teardownGL];
+ self.glRenderer = nil;
+ [super clearGLContext];
+}
+
+#pragma mark - RTCVideoRenderer
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [self.delegate videoView:self didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(RTCI420Frame*)frame {
+ self.i420Frame = frame;
+}
+
+#pragma mark - Private
+
+- (void)drawFrame {
+ RTCI420Frame* i420Frame = self.i420Frame;
+ if (self.glRenderer.lastDrawnFrame != i420Frame) {
+ // This method may be called from CVDisplayLink callback which isn't on the
+ // main thread so we have to lock the GL context before drawing.
+ CGLLockContext([[self openGLContext] CGLContextObj]);
+ [self.glRenderer drawFrame:i420Frame];
+ CGLUnlockContext([[self openGLContext] CGLContextObj]);
+ }
+}
+
+- (void)setupDisplayLink {
+ if (_displayLink) {
+ return;
+ }
+ // Synchronize buffer swaps with vertical refresh rate.
+ GLint swapInt = 1;
+ [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
+
+ // Create display link.
+ CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
+ CVDisplayLinkSetOutputCallback(_displayLink,
+ &OnDisplayLinkFired,
+ (__bridge void*)self);
+ // Set the display link for the current renderer.
+ CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
+ CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
+ CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
+ _displayLink, cglContext, cglPixelFormat);
+ CVDisplayLinkStart(_displayLink);
+}
+
+- (void)teardownDisplayLink {
+ if (!_displayLink) {
+ return;
+ }
+ CVDisplayLinkRelease(_displayLink);
+ _displayLink = NULL;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCOpenGLVideoRenderer.mm b/talk/app/webrtc/objc/RTCOpenGLVideoRenderer.mm
new file mode 100644
index 0000000000..cfead91bca
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCOpenGLVideoRenderer.mm
@@ -0,0 +1,503 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCOpenGLVideoRenderer.h"
+
+#include <string.h>
+
+#include "webrtc/base/scoped_ptr.h"
+
+#if TARGET_OS_IPHONE
+#import <OpenGLES/ES3/gl.h>
+#else
+#import <OpenGL/gl3.h>
+#endif
+
+#import "RTCI420Frame.h"
+
+// TODO(tkchin): check and log openGL errors. Methods here return BOOLs in
+// anticipation of that happening in the future.
+
+#if TARGET_OS_IPHONE
+#define RTC_PIXEL_FORMAT GL_LUMINANCE
+#define SHADER_VERSION
+#define VERTEX_SHADER_IN "attribute"
+#define VERTEX_SHADER_OUT "varying"
+#define FRAGMENT_SHADER_IN "varying"
+#define FRAGMENT_SHADER_OUT
+#define FRAGMENT_SHADER_COLOR "gl_FragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture2D"
+#else
+#define RTC_PIXEL_FORMAT GL_RED
+#define SHADER_VERSION "#version 150\n"
+#define VERTEX_SHADER_IN "in"
+#define VERTEX_SHADER_OUT "out"
+#define FRAGMENT_SHADER_IN "in"
+#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
+#define FRAGMENT_SHADER_COLOR "fragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture"
+#endif
+
+// Vertex shader doesn't do anything except pass coordinates through.
+static const char kVertexShaderSource[] =
+ SHADER_VERSION
+ VERTEX_SHADER_IN " vec2 position;\n"
+ VERTEX_SHADER_IN " vec2 texcoord;\n"
+ VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
+ "void main() {\n"
+ " gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
+ " v_texcoord = texcoord;\n"
+ "}\n";
+
+// Fragment shader converts YUV values from input textures into a final RGB
+// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
+static const char kFragmentShaderSource[] =
+ SHADER_VERSION
+ "precision highp float;"
+ FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
+ "uniform lowp sampler2D s_textureY;\n"
+ "uniform lowp sampler2D s_textureU;\n"
+ "uniform lowp sampler2D s_textureV;\n"
+ FRAGMENT_SHADER_OUT
+ "void main() {\n"
+ " float y, u, v, r, g, b;\n"
+ " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
+ " u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
+ " v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
+ " u = u - 0.5;\n"
+ " v = v - 0.5;\n"
+ " r = y + 1.403 * v;\n"
+ " g = y - 0.344 * u - 0.714 * v;\n"
+ " b = y + 1.770 * u;\n"
+ " " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
+ " }\n";
+
+// Compiles a shader of the given |type| with GLSL source |source| and returns
+// the shader handle or 0 on error.
+GLuint CreateShader(GLenum type, const GLchar* source) {
+ GLuint shader = glCreateShader(type);
+ if (!shader) {
+ return 0;
+ }
+ glShaderSource(shader, 1, &source, NULL);
+ glCompileShader(shader);
+ GLint compileStatus = GL_FALSE;
+ glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
+ if (compileStatus == GL_FALSE) {
+ glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+}
+
+// Links a shader program with the given vertex and fragment shaders and
+// returns the program handle or 0 on error.
+GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) {
+ if (vertexShader == 0 || fragmentShader == 0) {
+ return 0;
+ }
+ GLuint program = glCreateProgram();
+ if (!program) {
+ return 0;
+ }
+ glAttachShader(program, vertexShader);
+ glAttachShader(program, fragmentShader);
+ glLinkProgram(program);
+ GLint linkStatus = GL_FALSE;
+ glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+ if (linkStatus == GL_FALSE) {
+ glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+}
+
+// When modelview and projection matrices are identity (default) the world is
+// contained in the square around origin with unit size 2. Drawing to these
+// coordinates is equivalent to drawing to the entire screen. The texture is
+// stretched over that square using texture coordinates (u, v) that range
+// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
+// here because the incoming frame has origin in upper left hand corner but
+// OpenGL expects origin in bottom left corner.
+const GLfloat gVertices[] = {
+ // X, Y, U, V.
+ -1, -1, 0, 1, // Bottom left.
+ 1, -1, 1, 1, // Bottom right.
+ 1, 1, 1, 0, // Top right.
+ -1, 1, 0, 0, // Top left.
+};
+
+// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
+// of 3 textures are used here, one for each of the Y, U and V planes. Having
+// two sets alleviates CPU blockage in the event that the GPU is asked to render
+// to a texture that is already in use.
+static const GLsizei kNumTextureSets = 2;
+static const GLsizei kNumTextures = 3 * kNumTextureSets;
+
+@implementation RTCOpenGLVideoRenderer {
+#if TARGET_OS_IPHONE
+ EAGLContext* _context;
+#else
+ NSOpenGLContext* _context;
+#endif
+ BOOL _isInitialized;
+ NSUInteger _currentTextureSet;
+ // Handles for OpenGL constructs.
+ GLuint _textures[kNumTextures];
+ GLuint _program;
+#if !TARGET_OS_IPHONE
+ GLuint _vertexArray;
+#endif
+ GLuint _vertexBuffer;
+ GLint _position;
+ GLint _texcoord;
+ GLint _ySampler;
+ GLint _uSampler;
+ GLint _vSampler;
+ // Used to create a non-padded plane for GPU upload when we receive padded
+ // frames.
+ rtc::scoped_ptr<uint8_t[]> _planeBuffer;
+}
+
++ (void)initialize {
+ // Disable dithering for performance.
+ glDisable(GL_DITHER);
+}
+
+#if TARGET_OS_IPHONE
+- (instancetype)initWithContext:(EAGLContext*)context {
+#else
+- (instancetype)initWithContext:(NSOpenGLContext*)context {
+#endif
+ NSAssert(context != nil, @"context cannot be nil");
+ if (self = [super init]) {
+ _context = context;
+ }
+ return self;
+}
+
+- (BOOL)drawFrame:(RTCI420Frame*)frame {
+ if (!_isInitialized) {
+ return NO;
+ }
+ if (_lastDrawnFrame == frame) {
+ return NO;
+ }
+ [self ensureGLContext];
+ glClear(GL_COLOR_BUFFER_BIT);
+ if (frame) {
+ if (![self updateTextureSizesForFrame:frame] ||
+ ![self updateTextureDataForFrame:frame]) {
+ return NO;
+ }
+#if !TARGET_OS_IPHONE
+ glBindVertexArray(_vertexArray);
+#endif
+ glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+ }
+#if !TARGET_OS_IPHONE
+ [_context flushBuffer];
+#endif
+ _lastDrawnFrame = frame;
+ return YES;
+}
+
+- (void)setupGL {
+ if (_isInitialized) {
+ return;
+ }
+ [self ensureGLContext];
+ if (![self setupProgram]) {
+ return;
+ }
+ if (![self setupTextures]) {
+ return;
+ }
+ if (![self setupVertices]) {
+ return;
+ }
+ glUseProgram(_program);
+ glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+ _isInitialized = YES;
+}
+
+- (void)teardownGL {
+ if (!_isInitialized) {
+ return;
+ }
+ [self ensureGLContext];
+ glDeleteProgram(_program);
+ _program = 0;
+ glDeleteTextures(kNumTextures, _textures);
+ glDeleteBuffers(1, &_vertexBuffer);
+ _vertexBuffer = 0;
+#if !TARGET_OS_IPHONE
+ glDeleteVertexArrays(1, &_vertexArray);
+#endif
+ _isInitialized = NO;
+}
+
+#pragma mark - Private
+
+- (void)ensureGLContext {
+ NSAssert(_context, @"context shouldn't be nil");
+#if TARGET_OS_IPHONE
+ if ([EAGLContext currentContext] != _context) {
+ [EAGLContext setCurrentContext:_context];
+ }
+#else
+ if ([NSOpenGLContext currentContext] != _context) {
+ [_context makeCurrentContext];
+ }
+#endif
+}
+
+- (BOOL)setupProgram {
+ NSAssert(!_program, @"program already set up");
+ GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource);
+ NSAssert(vertexShader, @"failed to create vertex shader");
+ GLuint fragmentShader =
+ CreateShader(GL_FRAGMENT_SHADER, kFragmentShaderSource);
+ NSAssert(fragmentShader, @"failed to create fragment shader");
+ _program = CreateProgram(vertexShader, fragmentShader);
+ // Shaders are created only to generate program.
+ if (vertexShader) {
+ glDeleteShader(vertexShader);
+ }
+ if (fragmentShader) {
+ glDeleteShader(fragmentShader);
+ }
+ if (!_program) {
+ return NO;
+ }
+ _position = glGetAttribLocation(_program, "position");
+ _texcoord = glGetAttribLocation(_program, "texcoord");
+ _ySampler = glGetUniformLocation(_program, "s_textureY");
+ _uSampler = glGetUniformLocation(_program, "s_textureU");
+ _vSampler = glGetUniformLocation(_program, "s_textureV");
+ if (_position < 0 || _texcoord < 0 || _ySampler < 0 || _uSampler < 0 ||
+ _vSampler < 0) {
+ return NO;
+ }
+ return YES;
+}
+
+- (BOOL)setupTextures {
+ glGenTextures(kNumTextures, _textures);
+ // Set parameters for each of the textures we created.
+ for (GLsizei i = 0; i < kNumTextures; i++) {
+ glActiveTexture(GL_TEXTURE0 + i);
+ glBindTexture(GL_TEXTURE_2D, _textures[i]);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ }
+ return YES;
+}
+
+- (BOOL)updateTextureSizesForFrame:(RTCI420Frame*)frame {
+ if (frame.height == _lastDrawnFrame.height &&
+ frame.width == _lastDrawnFrame.width &&
+ frame.chromaWidth == _lastDrawnFrame.chromaWidth &&
+ frame.chromaHeight == _lastDrawnFrame.chromaHeight) {
+ return YES;
+ }
+ GLsizei lumaWidth = frame.width;
+ GLsizei lumaHeight = frame.height;
+ GLsizei chromaWidth = frame.chromaWidth;
+ GLsizei chromaHeight = frame.chromaHeight;
+ for (GLint i = 0; i < kNumTextureSets; i++) {
+ glActiveTexture(GL_TEXTURE0 + i * 3);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ lumaWidth,
+ lumaHeight,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ 0);
+ glActiveTexture(GL_TEXTURE0 + i * 3 + 1);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ chromaWidth,
+ chromaHeight,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ 0);
+ glActiveTexture(GL_TEXTURE0 + i * 3 + 2);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ chromaWidth,
+ chromaHeight,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ 0);
+ }
+ if (frame.yPitch != frame.width || frame.uPitch != frame.chromaWidth ||
+ frame.vPitch != frame.chromaWidth) {
+ _planeBuffer.reset(new uint8_t[frame.width * frame.height]);
+ } else {
+ _planeBuffer.reset();
+ }
+ return YES;
+}
+
+- (void)uploadPlane:(const uint8_t*)plane
+ sampler:(GLint)sampler
+ offset:(NSUInteger)offset
+ width:(NSUInteger)width
+ height:(NSUInteger)height
+ stride:(NSInteger)stride {
+ glActiveTexture(GL_TEXTURE0 + offset);
+ // When setting texture sampler uniforms, the texture index is used not
+ // the texture handle.
+ glUniform1i(sampler, offset);
+#if TARGET_OS_IPHONE
+ BOOL hasUnpackRowLength = _context.API == kEAGLRenderingAPIOpenGLES3;
+#else
+ BOOL hasUnpackRowLength = YES;
+#endif
+ const uint8_t* uploadPlane = plane;
+ if (stride != width) {
+ if (hasUnpackRowLength) {
+ // GLES3 allows us to specify stride.
+ glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ width,
+ height,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ uploadPlane);
+ glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
+ return;
+ } else {
+ // Make an unpadded copy and upload that instead. Quick profiling showed
+ // that this is faster than uploading row by row using glTexSubImage2D.
+ uint8_t* unpaddedPlane = _planeBuffer.get();
+ for (NSUInteger y = 0; y < height; ++y) {
+ memcpy(unpaddedPlane + y * width, plane + y * stride, width);
+ }
+ uploadPlane = unpaddedPlane;
+ }
+ }
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ width,
+ height,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ uploadPlane);
+}
+
+- (BOOL)updateTextureDataForFrame:(RTCI420Frame*)frame {
+ NSUInteger textureOffset = _currentTextureSet * 3;
+ NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
+
+ [self uploadPlane:frame.yPlane
+ sampler:_ySampler
+ offset:textureOffset
+ width:frame.width
+ height:frame.height
+ stride:frame.yPitch];
+
+ [self uploadPlane:frame.uPlane
+ sampler:_uSampler
+ offset:textureOffset + 1
+ width:frame.chromaWidth
+ height:frame.chromaHeight
+ stride:frame.uPitch];
+
+ [self uploadPlane:frame.vPlane
+ sampler:_vSampler
+ offset:textureOffset + 2
+ width:frame.chromaWidth
+ height:frame.chromaHeight
+ stride:frame.vPitch];
+
+ _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
+ return YES;
+}
+
+- (BOOL)setupVertices {
+#if !TARGET_OS_IPHONE
+ NSAssert(!_vertexArray, @"vertex array already set up");
+ glGenVertexArrays(1, &_vertexArray);
+ if (!_vertexArray) {
+ return NO;
+ }
+ glBindVertexArray(_vertexArray);
+#endif
+ NSAssert(!_vertexBuffer, @"vertex buffer already set up");
+ glGenBuffers(1, &_vertexBuffer);
+ if (!_vertexBuffer) {
+#if !TARGET_OS_IPHONE
+ glDeleteVertexArrays(1, &_vertexArray);
+ _vertexArray = 0;
+#endif
+ return NO;
+ }
+ glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
+ glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
+
+ // Read position attribute from |gVertices| with size of 2 and stride of 4
+ // beginning at the start of the array. The last argument indicates offset
+ // of data within |gVertices| as supplied to the vertex buffer.
+ glVertexAttribPointer(
+ _position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void*)0);
+ glEnableVertexAttribArray(_position);
+
+ // Read texcoord attribute from |gVertices| with size of 2 and stride of 4
+ // beginning at the first texcoord in the array. The last argument indicates
+ // offset of data within |gVertices| as supplied to the vertex buffer.
+ glVertexAttribPointer(_texcoord,
+ 2,
+ GL_FLOAT,
+ GL_FALSE,
+ 4 * sizeof(GLfloat),
+ (void*)(2 * sizeof(GLfloat)));
+ glEnableVertexAttribArray(_texcoord);
+
+ return YES;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCPair.m b/talk/app/webrtc/objc/RTCPair.m
new file mode 100644
index 0000000000..2171c9ecd5
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCPair.m
@@ -0,0 +1,47 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCPair.h"
+
+@implementation RTCPair
+
+@synthesize key = _key;
+@synthesize value = _value;
+
+- (id)initWithKey:(NSString*)key value:(NSString*)value {
+ if ((self = [super init])) {
+ _key = [key copy];
+ _value = [value copy];
+ }
+ return self;
+}
+
+- (NSString*)description {
+ return [NSString stringWithFormat:@"%@: %@", _key, _value];
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCPeerConnection+Internal.h b/talk/app/webrtc/objc/RTCPeerConnection+Internal.h
new file mode 100644
index 0000000000..96d63ab412
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCPeerConnection+Internal.h
@@ -0,0 +1,48 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCPeerConnection.h"
+
+#import "RTCPeerConnectionDelegate.h"
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+@interface RTCPeerConnection (Internal)
+
+@property(nonatomic, assign, readonly)
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peerConnection;
+
+- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface *)factory
+ iceServers:(const webrtc::PeerConnectionInterface::IceServers &)iceServers
+ constraints:(const webrtc::MediaConstraintsInterface *)constraints;
+
+- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface *)factory
+ config:(const webrtc::PeerConnectionInterface::RTCConfiguration &)config
+ constraints:(const webrtc::MediaConstraintsInterface *)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCPeerConnection.mm b/talk/app/webrtc/objc/RTCPeerConnection.mm
new file mode 100644
index 0000000000..44d39cb090
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCPeerConnection.mm
@@ -0,0 +1,303 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCPeerConnection+Internal.h"
+
+#import "RTCDataChannel+Internal.h"
+#import "RTCEnumConverter.h"
+#import "RTCICECandidate+Internal.h"
+#import "RTCICEServer+Internal.h"
+#import "RTCMediaConstraints+Internal.h"
+#import "RTCMediaStream+Internal.h"
+#import "RTCMediaStreamTrack+Internal.h"
+#import "RTCPeerConnectionObserver.h"
+#import "RTCSessionDescription+Internal.h"
+#import "RTCSessionDescription.h"
+#import "RTCSessionDescriptionDelegate.h"
+#import "RTCStatsDelegate.h"
+#import "RTCStatsReport+Internal.h"
+
+#include "talk/app/webrtc/jsep.h"
+
+NSString* const kRTCSessionDescriptionDelegateErrorDomain = @"RTCSDPError";
+int const kRTCSessionDescriptionDelegateErrorCode = -1;
+
+namespace webrtc {
+
+class RTCCreateSessionDescriptionObserver
+ : public CreateSessionDescriptionObserver {
+ public:
+ RTCCreateSessionDescriptionObserver(
+ id<RTCSessionDescriptionDelegate> delegate,
+ RTCPeerConnection* peerConnection) {
+ _delegate = delegate;
+ _peerConnection = peerConnection;
+ }
+
+ void OnSuccess(SessionDescriptionInterface* desc) override {
+ RTCSessionDescription* session =
+ [[RTCSessionDescription alloc] initWithSessionDescription:desc];
+ [_delegate peerConnection:_peerConnection
+ didCreateSessionDescription:session
+ error:nil];
+ delete desc;
+ }
+
+ void OnFailure(const std::string& error) override {
+ NSString* str = @(error.c_str());
+ NSError* err =
+ [NSError errorWithDomain:kRTCSessionDescriptionDelegateErrorDomain
+ code:kRTCSessionDescriptionDelegateErrorCode
+ userInfo:@{@"error" : str}];
+ [_delegate peerConnection:_peerConnection
+ didCreateSessionDescription:nil
+ error:err];
+ }
+
+ private:
+ id<RTCSessionDescriptionDelegate> _delegate;
+ RTCPeerConnection* _peerConnection;
+};
+
+class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
+ public:
+ RTCSetSessionDescriptionObserver(id<RTCSessionDescriptionDelegate> delegate,
+ RTCPeerConnection* peerConnection) {
+ _delegate = delegate;
+ _peerConnection = peerConnection;
+ }
+
+ void OnSuccess() override {
+ [_delegate peerConnection:_peerConnection
+ didSetSessionDescriptionWithError:nil];
+ }
+
+ void OnFailure(const std::string& error) override {
+ NSString* str = @(error.c_str());
+ NSError* err =
+ [NSError errorWithDomain:kRTCSessionDescriptionDelegateErrorDomain
+ code:kRTCSessionDescriptionDelegateErrorCode
+ userInfo:@{@"error" : str}];
+ [_delegate peerConnection:_peerConnection
+ didSetSessionDescriptionWithError:err];
+ }
+
+ private:
+ id<RTCSessionDescriptionDelegate> _delegate;
+ RTCPeerConnection* _peerConnection;
+};
+
+class RTCStatsObserver : public StatsObserver {
+ public:
+ RTCStatsObserver(id<RTCStatsDelegate> delegate,
+ RTCPeerConnection* peerConnection) {
+ _delegate = delegate;
+ _peerConnection = peerConnection;
+ }
+
+ void OnComplete(const StatsReports& reports) override {
+ NSMutableArray* stats = [NSMutableArray arrayWithCapacity:reports.size()];
+ for (const auto* report : reports) {
+ RTCStatsReport* statsReport =
+ [[RTCStatsReport alloc] initWithStatsReport:*report];
+ [stats addObject:statsReport];
+ }
+ [_delegate peerConnection:_peerConnection didGetStats:stats];
+ }
+
+ private:
+ id<RTCStatsDelegate> _delegate;
+ RTCPeerConnection* _peerConnection;
+};
+}
+
+@implementation RTCPeerConnection {
+ NSMutableArray* _localStreams;
+ rtc::scoped_ptr<webrtc::RTCPeerConnectionObserver> _observer;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
+}
+
+- (BOOL)addICECandidate:(RTCICECandidate*)candidate {
+ rtc::scoped_ptr<const webrtc::IceCandidateInterface> iceCandidate(
+ candidate.candidate);
+ return self.peerConnection->AddIceCandidate(iceCandidate.get());
+}
+
+- (BOOL)addStream:(RTCMediaStream*)stream {
+ BOOL ret = self.peerConnection->AddStream(stream.mediaStream);
+ if (!ret) {
+ return NO;
+ }
+ [_localStreams addObject:stream];
+ return YES;
+}
+
+- (RTCDataChannel*)createDataChannelWithLabel:(NSString*)label
+ config:(RTCDataChannelInit*)config {
+ std::string labelString([label UTF8String]);
+ rtc::scoped_refptr<webrtc::DataChannelInterface> dataChannel =
+ self.peerConnection->CreateDataChannel(labelString,
+ config.dataChannelInit);
+ return [[RTCDataChannel alloc] initWithDataChannel:dataChannel];
+}
+
+- (void)createAnswerWithDelegate:(id<RTCSessionDescriptionDelegate>)delegate
+ constraints:(RTCMediaConstraints*)constraints {
+ rtc::scoped_refptr<webrtc::RTCCreateSessionDescriptionObserver>
+ observer(new rtc::RefCountedObject<
+ webrtc::RTCCreateSessionDescriptionObserver>(delegate, self));
+ self.peerConnection->CreateAnswer(observer, constraints.constraints);
+}
+
+- (void)createOfferWithDelegate:(id<RTCSessionDescriptionDelegate>)delegate
+ constraints:(RTCMediaConstraints*)constraints {
+ rtc::scoped_refptr<webrtc::RTCCreateSessionDescriptionObserver>
+ observer(new rtc::RefCountedObject<
+ webrtc::RTCCreateSessionDescriptionObserver>(delegate, self));
+ self.peerConnection->CreateOffer(observer, constraints.constraints);
+}
+
+- (void)removeStream:(RTCMediaStream*)stream {
+ self.peerConnection->RemoveStream(stream.mediaStream);
+ [_localStreams removeObject:stream];
+}
+
+- (void)setLocalDescriptionWithDelegate:
+ (id<RTCSessionDescriptionDelegate>)delegate
+ sessionDescription:(RTCSessionDescription*)sdp {
+ rtc::scoped_refptr<webrtc::RTCSetSessionDescriptionObserver> observer(
+ new rtc::RefCountedObject<webrtc::RTCSetSessionDescriptionObserver>(
+ delegate, self));
+ self.peerConnection->SetLocalDescription(observer, sdp.sessionDescription);
+}
+
+- (void)setRemoteDescriptionWithDelegate:
+ (id<RTCSessionDescriptionDelegate>)delegate
+ sessionDescription:(RTCSessionDescription*)sdp {
+ rtc::scoped_refptr<webrtc::RTCSetSessionDescriptionObserver> observer(
+ new rtc::RefCountedObject<webrtc::RTCSetSessionDescriptionObserver>(
+ delegate, self));
+ self.peerConnection->SetRemoteDescription(observer, sdp.sessionDescription);
+}
+
+- (BOOL)setConfiguration:(RTCConfiguration *)configuration {
+ return self.peerConnection->SetConfiguration(
+ configuration.nativeConfiguration);
+}
+
+- (RTCSessionDescription*)localDescription {
+ const webrtc::SessionDescriptionInterface* sdi =
+ self.peerConnection->local_description();
+ return sdi ? [[RTCSessionDescription alloc] initWithSessionDescription:sdi]
+ : nil;
+}
+
+- (NSArray*)localStreams {
+ return [_localStreams copy];
+}
+
+- (RTCSessionDescription*)remoteDescription {
+ const webrtc::SessionDescriptionInterface* sdi =
+ self.peerConnection->remote_description();
+ return sdi ? [[RTCSessionDescription alloc] initWithSessionDescription:sdi]
+ : nil;
+}
+
+- (RTCICEConnectionState)iceConnectionState {
+ return [RTCEnumConverter
+ convertIceConnectionStateToObjC:self.peerConnection
+ ->ice_connection_state()];
+}
+
+- (RTCICEGatheringState)iceGatheringState {
+ return [RTCEnumConverter
+ convertIceGatheringStateToObjC:self.peerConnection
+ ->ice_gathering_state()];
+}
+
+- (RTCSignalingState)signalingState {
+ return [RTCEnumConverter
+ convertSignalingStateToObjC:self.peerConnection->signaling_state()];
+}
+
+- (void)close {
+ self.peerConnection->Close();
+}
+
+- (BOOL)getStatsWithDelegate:(id<RTCStatsDelegate>)delegate
+ mediaStreamTrack:(RTCMediaStreamTrack*)mediaStreamTrack
+ statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel {
+ rtc::scoped_refptr<webrtc::RTCStatsObserver> observer(
+ new rtc::RefCountedObject<webrtc::RTCStatsObserver>(delegate,
+ self));
+ webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
+ [RTCEnumConverter convertStatsOutputLevelToNative:statsOutputLevel];
+ return self.peerConnection->GetStats(
+ observer, mediaStreamTrack.mediaTrack, nativeOutputLevel);
+}
+
+@end
+
+@implementation RTCPeerConnection (Internal)
+
+- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface*)factory
+ iceServers:(const webrtc::PeerConnectionInterface::IceServers&)iceServers
+ constraints:(const webrtc::MediaConstraintsInterface*)constraints {
+ NSParameterAssert(factory != NULL);
+ if (self = [super init]) {
+ _observer.reset(new webrtc::RTCPeerConnectionObserver(self));
+ _peerConnection = factory->CreatePeerConnection(
+ iceServers, constraints, NULL, NULL, _observer.get());
+ _localStreams = [[NSMutableArray alloc] init];
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface *)factory
+ config:(const webrtc::PeerConnectionInterface::RTCConfiguration &)config
+ constraints:(const webrtc::MediaConstraintsInterface *)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate {
+ NSParameterAssert(factory);
+ if (self = [super init]) {
+ _observer.reset(new webrtc::RTCPeerConnectionObserver(self));
+ _peerConnection =
+ factory->CreatePeerConnection(config, constraints, nullptr, nullptr, _observer.get());
+ _localStreams = [[NSMutableArray alloc] init];
+ _delegate = delegate;
+ }
+ return self;
+}
+
+- (rtc::scoped_refptr<webrtc::PeerConnectionInterface>)peerConnection {
+ return _peerConnection;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCPeerConnectionFactory+Internal.h b/talk/app/webrtc/objc/RTCPeerConnectionFactory+Internal.h
new file mode 100644
index 0000000000..5d6fa12711
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCPeerConnectionFactory+Internal.h
@@ -0,0 +1,38 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCPeerConnectionFactory.h"
+
+#include "talk/app/webrtc/peerconnectionfactory.h"
+#include "webrtc/base/scoped_ptr.h"
+
+@interface RTCPeerConnectionFactory ()
+
+@property(nonatomic, assign) rtc::scoped_refptr<
+ webrtc::PeerConnectionFactoryInterface> nativeFactory;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm b/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm
new file mode 100644
index 0000000000..b7b8966239
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm
@@ -0,0 +1,148 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCPeerConnectionFactory+Internal.h"
+
+#include <vector>
+
+#import "RTCAudioTrack+Internal.h"
+#import "RTCICEServer+Internal.h"
+#import "RTCMediaConstraints+Internal.h"
+#import "RTCMediaSource+Internal.h"
+#import "RTCMediaStream+Internal.h"
+#import "RTCMediaStreamTrack+Internal.h"
+#import "RTCPeerConnection+Internal.h"
+#import "RTCPeerConnectionDelegate.h"
+#import "RTCPeerConnectionInterface+Internal.h"
+#import "RTCVideoCapturer+Internal.h"
+#import "RTCVideoSource+Internal.h"
+#import "RTCVideoTrack+Internal.h"
+
+#include "talk/app/webrtc/audiotrack.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/videosourceinterface.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/ssladapter.h"
+
+@implementation RTCPeerConnectionFactory {
+ rtc::scoped_ptr<rtc::Thread> _signalingThread;
+ rtc::scoped_ptr<rtc::Thread> _workerThread;
+}
+
+@synthesize nativeFactory = _nativeFactory;
+
++ (void)initializeSSL {
+ BOOL initialized = rtc::InitializeSSL();
+ NSAssert(initialized, @"Failed to initialize SSL library");
+}
+
++ (void)deinitializeSSL {
+ BOOL deinitialized = rtc::CleanupSSL();
+ NSAssert(deinitialized, @"Failed to deinitialize SSL library");
+}
+
+- (id)init {
+ if ((self = [super init])) {
+ _signalingThread.reset(new rtc::Thread());
+ BOOL result = _signalingThread->Start();
+ NSAssert(result, @"Failed to start signaling thread.");
+ _workerThread.reset(new rtc::Thread());
+ result = _workerThread->Start();
+ NSAssert(result, @"Failed to start worker thread.");
+
+ _nativeFactory = webrtc::CreatePeerConnectionFactory(
+ _signalingThread.get(), _workerThread.get(), nullptr, nullptr, nullptr);
+ NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
+ // Uncomment to get sensitive logs emitted (to stderr or logcat).
+ // rtc::LogMessage::LogToDebug(rtc::LS_SENSITIVE);
+ }
+ return self;
+}
+
+- (RTCPeerConnection *)peerConnectionWithConfiguration:(RTCConfiguration *)configuration
+ constraints:(RTCMediaConstraints *)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate {
+ return [[RTCPeerConnection alloc] initWithFactory:self.nativeFactory.get()
+ config:configuration.nativeConfiguration
+ constraints:constraints.constraints
+ delegate:delegate];
+}
+
+- (RTCPeerConnection*)
+ peerConnectionWithICEServers:(NSArray*)servers
+ constraints:(RTCMediaConstraints*)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate {
+ webrtc::PeerConnectionInterface::IceServers iceServers;
+ for (RTCICEServer* server in servers) {
+ iceServers.push_back(server.iceServer);
+ }
+ RTCPeerConnection* pc =
+ [[RTCPeerConnection alloc] initWithFactory:self.nativeFactory.get()
+ iceServers:iceServers
+ constraints:constraints.constraints];
+ pc.delegate = delegate;
+ return pc;
+}
+
+- (RTCMediaStream*)mediaStreamWithLabel:(NSString*)label {
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream =
+ self.nativeFactory->CreateLocalMediaStream([label UTF8String]);
+ return [[RTCMediaStream alloc] initWithMediaStream:nativeMediaStream];
+}
+
+- (RTCVideoSource*)videoSourceWithCapturer:(RTCVideoCapturer*)capturer
+ constraints:(RTCMediaConstraints*)constraints {
+ if (!capturer) {
+ return nil;
+ }
+ rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
+ self.nativeFactory->CreateVideoSource([capturer takeNativeCapturer],
+ constraints.constraints);
+ return [[RTCVideoSource alloc] initWithMediaSource:source];
+}
+
+- (RTCVideoTrack*)videoTrackWithID:(NSString*)videoId
+ source:(RTCVideoSource*)source {
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
+ self.nativeFactory->CreateVideoTrack([videoId UTF8String],
+ source.videoSource);
+ return [[RTCVideoTrack alloc] initWithMediaTrack:track];
+}
+
+- (RTCAudioTrack*)audioTrackWithID:(NSString*)audioId {
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
+ self.nativeFactory->CreateAudioTrack([audioId UTF8String], NULL);
+ return [[RTCAudioTrack alloc] initWithMediaTrack:track];
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h b/talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h
new file mode 100644
index 0000000000..5e8dbbf604
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h
@@ -0,0 +1,37 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h"
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+@interface RTCConfiguration ()
+
+@property(nonatomic, readonly)
+ webrtc::PeerConnectionInterface::RTCConfiguration nativeConfiguration;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm b/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm
new file mode 100644
index 0000000000..58d12ace4c
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm
@@ -0,0 +1,93 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h"
+
+#import "talk/app/webrtc/objc/RTCEnumConverter.h"
+#import "talk/app/webrtc/objc/RTCICEServer+Internal.h"
+
+@implementation RTCConfiguration
+
+@synthesize iceTransportsType = _iceTransportsType;
+@synthesize iceServers = _iceServers;
+@synthesize bundlePolicy = _bundlePolicy;
+@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
+@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
+@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
+@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ // Copy defaults.
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ _iceTransportsType = [RTCEnumConverter iceTransportsTypeForNativeEnum:config.type];
+ _bundlePolicy = [RTCEnumConverter bundlePolicyForNativeEnum:config.bundle_policy];
+ _rtcpMuxPolicy = [RTCEnumConverter rtcpMuxPolicyForNativeEnum:config.rtcp_mux_policy];
+ _tcpCandidatePolicy =
+ [RTCEnumConverter tcpCandidatePolicyForNativeEnum:config.tcp_candidate_policy];
+ _audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
+ _iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
+ }
+ return self;
+}
+
+- (instancetype)initWithIceTransportsType:(RTCIceTransportsType)iceTransportsType
+ bundlePolicy:(RTCBundlePolicy)bundlePolicy
+ rtcpMuxPolicy:(RTCRtcpMuxPolicy)rtcpMuxPolicy
+ tcpCandidatePolicy:(RTCTcpCandidatePolicy)tcpCandidatePolicy
+ audioJitterBufferMaxPackets:(int)audioJitterBufferMaxPackets
+ iceConnectionReceivingTimeout:(int)iceConnectionReceivingTimeout {
+ if (self = [super init]) {
+ _iceTransportsType = iceTransportsType;
+ _bundlePolicy = bundlePolicy;
+ _rtcpMuxPolicy = rtcpMuxPolicy;
+ _tcpCandidatePolicy = tcpCandidatePolicy;
+ _audioJitterBufferMaxPackets = audioJitterBufferMaxPackets;
+ _iceConnectionReceivingTimeout = iceConnectionReceivingTimeout;
+ }
+ return self;
+}
+
+#pragma mark - Private
+
+- (webrtc::PeerConnectionInterface::RTCConfiguration)nativeConfiguration {
+ webrtc::PeerConnectionInterface::RTCConfiguration nativeConfig;
+ nativeConfig.type = [RTCEnumConverter nativeEnumForIceTransportsType:_iceTransportsType];
+ for (RTCICEServer *iceServer : _iceServers) {
+ nativeConfig.servers.push_back(iceServer.iceServer);
+ }
+ nativeConfig.bundle_policy = [RTCEnumConverter nativeEnumForBundlePolicy:_bundlePolicy];
+ nativeConfig.rtcp_mux_policy = [RTCEnumConverter nativeEnumForRtcpMuxPolicy:_rtcpMuxPolicy];
+ nativeConfig.tcp_candidate_policy =
+ [RTCEnumConverter nativeEnumForTcpCandidatePolicy:_tcpCandidatePolicy];
+ nativeConfig.audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
+ nativeConfig.ice_connection_receiving_timeout =
+ _iceConnectionReceivingTimeout;
+ return nativeConfig;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCPeerConnectionObserver.h b/talk/app/webrtc/objc/RTCPeerConnectionObserver.h
new file mode 100644
index 0000000000..9b981b9307
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCPeerConnectionObserver.h
@@ -0,0 +1,75 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+#import "RTCPeerConnection.h"
+#import "RTCPeerConnectionDelegate.h"
+
+// These objects are created by RTCPeerConnectionFactory to wrap an
+// id<RTCPeerConnectionDelegate> and call methods on that interface.
+
+namespace webrtc {
+
+class RTCPeerConnectionObserver : public PeerConnectionObserver {
+
+ public:
+ RTCPeerConnectionObserver(RTCPeerConnection* peerConnection);
+ virtual ~RTCPeerConnectionObserver();
+
+ // Triggered when the SignalingState changed.
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override;
+
+ // Triggered when media is received on a new stream from remote peer.
+ void OnAddStream(MediaStreamInterface* stream) override;
+
+ // Triggered when a remote peer close a stream.
+ void OnRemoveStream(MediaStreamInterface* stream) override;
+
+ // Triggered when a remote peer open a data channel.
+ void OnDataChannel(DataChannelInterface* data_channel) override;
+
+ // Triggered when renegotiation is needed, for example the ICE has restarted.
+ void OnRenegotiationNeeded() override;
+
+ // Called any time the ICEConnectionState changes
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override;
+
+ // Called any time the ICEGatheringState changes
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override;
+
+ // New Ice candidate have been found.
+ void OnIceCandidate(const IceCandidateInterface* candidate) override;
+
+ private:
+ __weak RTCPeerConnection* _peerConnection;
+};
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/objc/RTCPeerConnectionObserver.mm b/talk/app/webrtc/objc/RTCPeerConnectionObserver.mm
new file mode 100644
index 0000000000..411cd6cb89
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCPeerConnectionObserver.mm
@@ -0,0 +1,108 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCPeerConnectionObserver.h"
+
+#import "RTCDataChannel+Internal.h"
+#import "RTCEnumConverter.h"
+#import "RTCICECandidate+Internal.h"
+#import "RTCMediaStream+Internal.h"
+
+namespace webrtc {
+
+RTCPeerConnectionObserver::RTCPeerConnectionObserver(
+ RTCPeerConnection* peerConnection) {
+ _peerConnection = peerConnection;
+}
+
+RTCPeerConnectionObserver::~RTCPeerConnectionObserver() {
+}
+
+void RTCPeerConnectionObserver::OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) {
+ RTCSignalingState state =
+ [RTCEnumConverter convertSignalingStateToObjC:new_state];
+ [_peerConnection.delegate peerConnection:_peerConnection
+ signalingStateChanged:state];
+}
+
+void RTCPeerConnectionObserver::OnAddStream(MediaStreamInterface* stream) {
+ RTCMediaStream* mediaStream =
+ [[RTCMediaStream alloc] initWithMediaStream:stream];
+ [_peerConnection.delegate peerConnection:_peerConnection
+ addedStream:mediaStream];
+}
+
+void RTCPeerConnectionObserver::OnRemoveStream(MediaStreamInterface* stream) {
+ RTCMediaStream* mediaStream =
+ [[RTCMediaStream alloc] initWithMediaStream:stream];
+ [_peerConnection.delegate peerConnection:_peerConnection
+ removedStream:mediaStream];
+}
+
+void RTCPeerConnectionObserver::OnDataChannel(
+ DataChannelInterface* data_channel) {
+ RTCDataChannel* dataChannel =
+ [[RTCDataChannel alloc] initWithDataChannel:data_channel];
+ [_peerConnection.delegate peerConnection:_peerConnection
+ didOpenDataChannel:dataChannel];
+}
+
+void RTCPeerConnectionObserver::OnRenegotiationNeeded() {
+ id<RTCPeerConnectionDelegate> delegate = _peerConnection.delegate;
+ [delegate peerConnectionOnRenegotiationNeeded:_peerConnection];
+}
+
+void RTCPeerConnectionObserver::OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ RTCICEConnectionState state =
+ [RTCEnumConverter convertIceConnectionStateToObjC:new_state];
+ [_peerConnection.delegate peerConnection:_peerConnection
+ iceConnectionChanged:state];
+}
+
+void RTCPeerConnectionObserver::OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {
+ RTCICEGatheringState state =
+ [RTCEnumConverter convertIceGatheringStateToObjC:new_state];
+ [_peerConnection.delegate peerConnection:_peerConnection
+ iceGatheringChanged:state];
+}
+
+void RTCPeerConnectionObserver::OnIceCandidate(
+ const IceCandidateInterface* candidate) {
+ RTCICECandidate* iceCandidate =
+ [[RTCICECandidate alloc] initWithCandidate:candidate];
+ [_peerConnection.delegate peerConnection:_peerConnection
+ gotICECandidate:iceCandidate];
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/objc/RTCSessionDescription+Internal.h b/talk/app/webrtc/objc/RTCSessionDescription+Internal.h
new file mode 100644
index 0000000000..662c538bfb
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCSessionDescription+Internal.h
@@ -0,0 +1,41 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCSessionDescription.h"
+
+#include "talk/app/webrtc/jsep.h"
+#include "talk/app/webrtc/webrtcsession.h"
+
+@interface RTCSessionDescription (Internal)
+
+// Caller assumes ownership of this object!
+- (webrtc::SessionDescriptionInterface *)sessionDescription;
+
+- (id)initWithSessionDescription:
+ (const webrtc::SessionDescriptionInterface*)sessionDescription;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCSessionDescription.mm b/talk/app/webrtc/objc/RTCSessionDescription.mm
new file mode 100644
index 0000000000..5d62511925
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCSessionDescription.mm
@@ -0,0 +1,81 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCSessionDescription+Internal.h"
+
+@implementation RTCSessionDescription
+
+@synthesize description = _description;
+@synthesize type = _type;
+
+- (id)initWithType:(NSString*)type sdp:(NSString*)sdp {
+ if (!type || !sdp) {
+ NSAssert(NO, @"nil arguments not allowed");
+ return nil;
+ }
+ if ((self = [super init])) {
+ _description = sdp;
+ _type = type;
+ }
+ return self;
+}
+
+@end
+
+@implementation RTCSessionDescription (Internal)
+
+- (id)initWithSessionDescription:
+ (const webrtc::SessionDescriptionInterface*)sessionDescription {
+ if (!sessionDescription) {
+ NSAssert(NO, @"nil arguments not allowed");
+ self = nil;
+ return nil;
+ }
+ if ((self = [super init])) {
+ const std::string& type = sessionDescription->type();
+ std::string sdp;
+ if (!sessionDescription->ToString(&sdp)) {
+ NSAssert(NO, @"Invalid SessionDescriptionInterface.");
+ self = nil;
+ } else {
+ _description = @(sdp.c_str());
+ _type = @(type.c_str());
+ }
+ }
+ return self;
+}
+
+- (webrtc::SessionDescriptionInterface*)sessionDescription {
+ return webrtc::CreateSessionDescription(
+ [self.type UTF8String], [self.description UTF8String], NULL);
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCStatsReport+Internal.h b/talk/app/webrtc/objc/RTCStatsReport+Internal.h
new file mode 100644
index 0000000000..7a4124642c
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCStatsReport+Internal.h
@@ -0,0 +1,36 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCStatsReport.h"
+
+#include "talk/app/webrtc/statstypes.h"
+
+@interface RTCStatsReport (Internal)
+
+- (instancetype)initWithStatsReport:(const webrtc::StatsReport&)statsReport;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCStatsReport.mm b/talk/app/webrtc/objc/RTCStatsReport.mm
new file mode 100644
index 0000000000..04a3d274d1
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCStatsReport.mm
@@ -0,0 +1,69 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCStatsReport+Internal.h"
+
+#import "RTCPair.h"
+
+@implementation RTCStatsReport
+
+- (NSString*)description {
+ NSString* format = @"id: %@, type: %@, timestamp: %f, values: %@";
+ return [NSString stringWithFormat:format,
+ self.reportId,
+ self.type,
+ self.timestamp,
+ self.values];
+}
+
+@end
+
+@implementation RTCStatsReport (Internal)
+
+- (instancetype)initWithStatsReport:(const webrtc::StatsReport&)statsReport {
+ if (self = [super init]) {
+ _reportId = @(statsReport.id()->ToString().c_str());
+ _type = @(statsReport.TypeToString());
+ _timestamp = statsReport.timestamp();
+ NSMutableArray* values =
+ [NSMutableArray arrayWithCapacity:statsReport.values().size()];
+ for (const auto& it : statsReport.values()) {
+ RTCPair* pair =
+ [[RTCPair alloc] initWithKey:@(it.second->display_name())
+ value:@(it.second->ToString().c_str())];
+ [values addObject:pair];
+ }
+ _values = values;
+ }
+ return self;
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h b/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h
new file mode 100644
index 0000000000..10a72e2572
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h
@@ -0,0 +1,38 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCVideoCapturer.h"
+
+#include "talk/app/webrtc/videosourceinterface.h"
+
+@interface RTCVideoCapturer (Internal)
+
+- (cricket::VideoCapturer*)takeNativeCapturer;
+
+- (id)initWithCapturer:(cricket::VideoCapturer*)capturer;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCVideoCapturer.mm b/talk/app/webrtc/objc/RTCVideoCapturer.mm
new file mode 100644
index 0000000000..11973ee289
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCVideoCapturer.mm
@@ -0,0 +1,74 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCVideoCapturer+Internal.h"
+
+#include "talk/media/base/videocapturer.h"
+#include "talk/media/devices/devicemanager.h"
+
+@implementation RTCVideoCapturer {
+ rtc::scoped_ptr<cricket::VideoCapturer> _capturer;
+}
+
++ (RTCVideoCapturer*)capturerWithDeviceName:(NSString*)deviceName {
+ const std::string& device_name = std::string([deviceName UTF8String]);
+ rtc::scoped_ptr<cricket::DeviceManagerInterface> device_manager(
+ cricket::DeviceManagerFactory::Create());
+ bool initialized = device_manager->Init();
+ NSAssert(initialized, @"DeviceManager::Init() failed");
+ cricket::Device device;
+ if (!device_manager->GetVideoCaptureDevice(device_name, &device)) {
+ LOG(LS_ERROR) << "GetVideoCaptureDevice failed";
+ return 0;
+ }
+ rtc::scoped_ptr<cricket::VideoCapturer> capturer(
+ device_manager->CreateVideoCapturer(device));
+ RTCVideoCapturer* rtcCapturer =
+ [[RTCVideoCapturer alloc] initWithCapturer:capturer.release()];
+ return rtcCapturer;
+}
+
+@end
+
+@implementation RTCVideoCapturer (Internal)
+
+- (id)initWithCapturer:(cricket::VideoCapturer*)capturer {
+ if ((self = [super init])) {
+ _capturer.reset(capturer);
+ }
+ return self;
+}
+
+- (cricket::VideoCapturer*)takeNativeCapturer {
+ return _capturer.release();
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCVideoRendererAdapter.h b/talk/app/webrtc/objc/RTCVideoRendererAdapter.h
new file mode 100644
index 0000000000..20a4cf1458
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCVideoRendererAdapter.h
@@ -0,0 +1,40 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCVideoRenderer.h"
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+
+@interface RTCVideoRendererAdapter : NSObject
+
+@property(nonatomic, readonly) id<RTCVideoRenderer> videoRenderer;
+@property(nonatomic, readonly)
+ webrtc::VideoRendererInterface* nativeVideoRenderer;
+
+- (instancetype)initWithVideoRenderer:(id<RTCVideoRenderer>)videoRenderer;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCVideoRendererAdapter.mm b/talk/app/webrtc/objc/RTCVideoRendererAdapter.mm
new file mode 100644
index 0000000000..cefd567c94
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCVideoRendererAdapter.mm
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCVideoRendererAdapter.h"
+#import "RTCI420Frame+Internal.h"
+
+namespace webrtc {
+
+class RTCVideoRendererNativeAdapter : public VideoRendererInterface {
+ public:
+ RTCVideoRendererNativeAdapter(RTCVideoRendererAdapter* adapter) {
+ _adapter = adapter;
+ _size = CGSizeZero;
+ }
+
+ void RenderFrame(const cricket::VideoFrame* videoFrame) override {
+ const cricket::VideoFrame* frame = videoFrame->GetCopyWithRotationApplied();
+ CGSize currentSize = CGSizeMake(frame->GetWidth(), frame->GetHeight());
+ if (!CGSizeEqualToSize(_size, currentSize)) {
+ _size = currentSize;
+ [_adapter.videoRenderer setSize:_size];
+ }
+ RTCI420Frame* i420Frame = [[RTCI420Frame alloc] initWithVideoFrame:frame];
+ [_adapter.videoRenderer renderFrame:i420Frame];
+ }
+
+ private:
+ __weak RTCVideoRendererAdapter* _adapter;
+ CGSize _size;
+};
+}
+
+@implementation RTCVideoRendererAdapter {
+ id<RTCVideoRenderer> _videoRenderer;
+ rtc::scoped_ptr<webrtc::RTCVideoRendererNativeAdapter> _adapter;
+}
+
+- (instancetype)initWithVideoRenderer:(id<RTCVideoRenderer>)videoRenderer {
+ NSParameterAssert(videoRenderer);
+ if (self = [super init]) {
+ _videoRenderer = videoRenderer;
+ _adapter.reset(new webrtc::RTCVideoRendererNativeAdapter(self));
+ }
+ return self;
+}
+
+- (webrtc::VideoRendererInterface*)nativeVideoRenderer {
+ return _adapter.get();
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCVideoSource+Internal.h b/talk/app/webrtc/objc/RTCVideoSource+Internal.h
new file mode 100644
index 0000000000..c6c4a206b6
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCVideoSource+Internal.h
@@ -0,0 +1,37 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCVideoSource.h"
+
+#include "talk/app/webrtc/videosourceinterface.h"
+
+@interface RTCVideoSource (Internal)
+
+@property(nonatomic, assign, readonly)
+ rtc::scoped_refptr<webrtc::VideoSourceInterface>videoSource;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCVideoSource.mm b/talk/app/webrtc/objc/RTCVideoSource.mm
new file mode 100644
index 0000000000..5a1ed7415b
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCVideoSource.mm
@@ -0,0 +1,44 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCMediaSource+Internal.h"
+#import "RTCVideoSource+Internal.h"
+
+@implementation RTCVideoSource
+@end
+
+@implementation RTCVideoSource (Internal)
+
+- (rtc::scoped_refptr<webrtc::VideoSourceInterface>)videoSource {
+ return static_cast<webrtc::VideoSourceInterface*>(self.mediaSource.get());
+}
+
+@end
diff --git a/talk/app/webrtc/objc/RTCVideoTrack+Internal.h b/talk/app/webrtc/objc/RTCVideoTrack+Internal.h
new file mode 100644
index 0000000000..c9ec382938
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCVideoTrack+Internal.h
@@ -0,0 +1,40 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCVideoTrack.h"
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+@class RTCVideoRenderer;
+
+@interface RTCVideoTrack (Internal)
+
+@property(nonatomic, readonly)
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
+
+@end
diff --git a/talk/app/webrtc/objc/RTCVideoTrack.mm b/talk/app/webrtc/objc/RTCVideoTrack.mm
new file mode 100644
index 0000000000..24e5edabc8
--- /dev/null
+++ b/talk/app/webrtc/objc/RTCVideoTrack.mm
@@ -0,0 +1,122 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCVideoTrack+Internal.h"
+
+#import "RTCMediaStreamTrack+Internal.h"
+#import "RTCPeerConnectionFactory+Internal.h"
+#import "RTCVideoRendererAdapter.h"
+#import "RTCMediaSource+Internal.h"
+#import "RTCVideoSource+Internal.h"
+
+@implementation RTCVideoTrack {
+ NSMutableArray* _adapters;
+}
+
+@synthesize source = _source;
+
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
+ source:(RTCVideoSource*)source
+ trackId:(NSString*)trackId {
+ NSParameterAssert(factory);
+ NSParameterAssert(source);
+ NSParameterAssert(trackId.length);
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
+ factory.nativeFactory->CreateVideoTrack([trackId UTF8String],
+ source.videoSource);
+ if (self = [super initWithMediaTrack:track]) {
+ [self configure];
+ _source = source;
+ }
+ return self;
+}
+
+- (instancetype)initWithMediaTrack:
+ (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
+ if (self = [super initWithMediaTrack:mediaTrack]) {
+ [self configure];
+ rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
+ self.nativeVideoTrack->GetSource();
+ if (source) {
+ _source = [[RTCVideoSource alloc] initWithMediaSource:source.get()];
+ }
+ }
+ return self;
+}
+
+- (void)configure {
+ _adapters = [NSMutableArray array];
+}
+
+- (void)dealloc {
+ for (RTCVideoRendererAdapter *adapter in _adapters) {
+ self.nativeVideoTrack->RemoveRenderer(adapter.nativeVideoRenderer);
+ }
+}
+
+- (void)addRenderer:(id<RTCVideoRenderer>)renderer {
+ // Make sure we don't have this renderer yet.
+ for (RTCVideoRendererAdapter* adapter in _adapters) {
+ NSParameterAssert(adapter.videoRenderer != renderer);
+ }
+ // Create a wrapper that provides a native pointer for us.
+ RTCVideoRendererAdapter* adapter =
+ [[RTCVideoRendererAdapter alloc] initWithVideoRenderer:renderer];
+ [_adapters addObject:adapter];
+ self.nativeVideoTrack->AddRenderer(adapter.nativeVideoRenderer);
+}
+
+- (void)removeRenderer:(id<RTCVideoRenderer>)renderer {
+ RTCVideoRendererAdapter* adapter = nil;
+ NSUInteger indexToRemove = NSNotFound;
+ for (NSUInteger i = 0; i < _adapters.count; i++) {
+ adapter = _adapters[i];
+ if (adapter.videoRenderer == renderer) {
+ indexToRemove = i;
+ break;
+ }
+ }
+ if (indexToRemove == NSNotFound) {
+ return;
+ }
+ self.nativeVideoTrack->RemoveRenderer(adapter.nativeVideoRenderer);
+ [_adapters removeObjectAtIndex:indexToRemove];
+}
+
+@end
+
+@implementation RTCVideoTrack (Internal)
+
+- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
+ return static_cast<webrtc::VideoTrackInterface*>(self.mediaTrack.get());
+}
+
+@end
diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.h b/talk/app/webrtc/objc/avfoundationvideocapturer.h
new file mode 100644
index 0000000000..ded80f6647
--- /dev/null
+++ b/talk/app/webrtc/objc/avfoundationvideocapturer.h
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_OBJC_AVFOUNDATION_VIDEO_CAPTURER_H_
+#define TALK_APP_WEBRTC_OBJC_AVFOUNDATION_VIDEO_CAPTURER_H_
+
+#include "talk/media/base/videocapturer.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/video_frame.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+@class RTCAVFoundationVideoCapturerInternal;
+
+namespace webrtc {
+
+class AVFoundationVideoCapturer : public cricket::VideoCapturer {
+ public:
+ AVFoundationVideoCapturer();
+ ~AVFoundationVideoCapturer();
+
+ cricket::CaptureState Start(const cricket::VideoFormat& format) override;
+ void Stop() override;
+ bool IsRunning() override;
+ bool IsScreencast() const override {
+ return false;
+ }
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override {
+ fourccs->push_back(cricket::FOURCC_NV12);
+ return true;
+ }
+
+ // Returns the active capture session.
+ AVCaptureSession* GetCaptureSession();
+
+ // Switches the camera being used (either front or back).
+ void SetUseBackCamera(bool useBackCamera);
+ bool GetUseBackCamera() const;
+
+ // Converts the sample buffer into a cricket::CapturedFrame and signals the
+ // frame for capture.
+ void CaptureSampleBuffer(CMSampleBufferRef sampleBuffer);
+
+ private:
+ // Used to signal frame capture on the thread that capturer was started on.
+ void SignalFrameCapturedOnStartThread(const cricket::CapturedFrame* frame);
+
+ RTCAVFoundationVideoCapturerInternal* _capturer;
+ rtc::Thread* _startThread; // Set in Start(), unset in Stop().
+ uint64_t _startTime;
+}; // AVFoundationVideoCapturer
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_OBJC_AVFOUNDATION_CAPTURER_H_
diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.mm b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
new file mode 100644
index 0000000000..e1b0f88fb6
--- /dev/null
+++ b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
@@ -0,0 +1,447 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/objc/avfoundationvideocapturer.h"
+
+#include "webrtc/base/bind.h"
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+// TODO(tkchin): support other formats.
+static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
+static cricket::VideoFormat const kDefaultFormat =
+ cricket::VideoFormat(640,
+ 480,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_NV12);
+
+// This queue is used to start and stop the capturer without blocking the
+// calling thread. -[AVCaptureSession startRunning] blocks until the camera is
+// running.
+static dispatch_queue_t kBackgroundQueue = nil;
+
+// This class used to capture frames using AVFoundation APIs on iOS. It is meant
+// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
+// because other webrtc objects own cricket::VideoCapturer, which is not
+// ref counted. To prevent bad behavior we do not expose this class directly.
+@interface RTCAVFoundationVideoCapturerInternal : NSObject
+ <AVCaptureVideoDataOutputSampleBufferDelegate>
+
+@property(nonatomic, readonly) AVCaptureSession* captureSession;
+@property(nonatomic, readonly) BOOL isRunning;
+@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
+
+// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
+// when we receive frames. This is safe because this object should be owned by
+// it.
+- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer;
+- (void)startCaptureAsync;
+- (void)stopCaptureAsync;
+
+@end
+
+@implementation RTCAVFoundationVideoCapturerInternal {
+ // Keep pointers to inputs for convenience.
+ AVCaptureDeviceInput* _frontDeviceInput;
+ AVCaptureDeviceInput* _backDeviceInput;
+ AVCaptureVideoDataOutput* _videoOutput;
+ // The cricket::VideoCapturer that owns this class. Should never be NULL.
+ webrtc::AVFoundationVideoCapturer* _capturer;
+ BOOL _orientationHasChanged;
+}
+
+@synthesize captureSession = _captureSession;
+@synthesize useBackCamera = _useBackCamera;
+@synthesize isRunning = _isRunning;
+
++ (void)initialize {
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ kBackgroundQueue = dispatch_queue_create(
+ "com.google.webrtc.RTCAVFoundationCapturerBackground",
+ DISPATCH_QUEUE_SERIAL);
+ });
+}
+
+- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
+ NSParameterAssert(capturer);
+ if (self = [super init]) {
+ _capturer = capturer;
+ if (![self setupCaptureSession]) {
+ return nil;
+ }
+ NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
+ [center addObserver:self
+ selector:@selector(deviceOrientationDidChange:)
+ name:UIDeviceOrientationDidChangeNotification
+ object:nil];
+ [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
+ object:nil
+ queue:nil
+ usingBlock:^(NSNotification* notification) {
+ NSLog(@"Capture session error: %@", notification.userInfo);
+ }];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self stopCaptureAsync];
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+ _capturer = nullptr;
+}
+
+- (void)setUseBackCamera:(BOOL)useBackCamera {
+ if (_useBackCamera == useBackCamera) {
+ return;
+ }
+ _useBackCamera = useBackCamera;
+ [self updateSessionInput];
+}
+
+- (void)startCaptureAsync {
+ if (_isRunning) {
+ return;
+ }
+ _orientationHasChanged = NO;
+ [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
+ AVCaptureSession* session = _captureSession;
+ dispatch_async(kBackgroundQueue, ^{
+ [session startRunning];
+ });
+ _isRunning = YES;
+}
+
+- (void)stopCaptureAsync {
+ if (!_isRunning) {
+ return;
+ }
+ [_videoOutput setSampleBufferDelegate:nil queue:nullptr];
+ AVCaptureSession* session = _captureSession;
+ dispatch_async(kBackgroundQueue, ^{
+ [session stopRunning];
+ });
+ [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
+ _isRunning = NO;
+}
+
+#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
+
+- (void)captureOutput:(AVCaptureOutput*)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection*)connection {
+ NSParameterAssert(captureOutput == _videoOutput);
+ if (!_isRunning) {
+ return;
+ }
+ _capturer->CaptureSampleBuffer(sampleBuffer);
+}
+
+- (void)captureOutput:(AVCaptureOutput*)captureOutput
+ didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection*)connection {
+ NSLog(@"Dropped sample buffer.");
+}
+
+#pragma mark - Private
+
+- (BOOL)setupCaptureSession {
+ _captureSession = [[AVCaptureSession alloc] init];
+#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
+ NSString* version = [[UIDevice currentDevice] systemVersion];
+ if ([version integerValue] >= 7) {
+ _captureSession.usesApplicationAudioSession = NO;
+ }
+#endif
+ if (![_captureSession canSetSessionPreset:kDefaultPreset]) {
+ NSLog(@"Default video capture preset unsupported.");
+ return NO;
+ }
+ _captureSession.sessionPreset = kDefaultPreset;
+
+ // Make the capturer output NV12. Ideally we want I420 but that's not
+ // currently supported on iPhone / iPad.
+ _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
+ _videoOutput.videoSettings = @{
+ (NSString*)kCVPixelBufferPixelFormatTypeKey :
+ @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
+ };
+ _videoOutput.alwaysDiscardsLateVideoFrames = NO;
+ [_videoOutput setSampleBufferDelegate:self
+ queue:dispatch_get_main_queue()];
+ if (![_captureSession canAddOutput:_videoOutput]) {
+ NSLog(@"Default video capture output unsupported.");
+ return NO;
+ }
+ [_captureSession addOutput:_videoOutput];
+
+ // Find the capture devices.
+ AVCaptureDevice* frontCaptureDevice = nil;
+ AVCaptureDevice* backCaptureDevice = nil;
+ for (AVCaptureDevice* captureDevice in
+ [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
+ if (captureDevice.position == AVCaptureDevicePositionBack) {
+ backCaptureDevice = captureDevice;
+ }
+ if (captureDevice.position == AVCaptureDevicePositionFront) {
+ frontCaptureDevice = captureDevice;
+ }
+ }
+ if (!frontCaptureDevice || !backCaptureDevice) {
+ NSLog(@"Failed to get capture devices.");
+ return NO;
+ }
+
+ // Set up the session inputs.
+ NSError* error = nil;
+ _frontDeviceInput =
+ [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice
+ error:&error];
+ if (!_frontDeviceInput) {
+ NSLog(@"Failed to get capture device input: %@",
+ error.localizedDescription);
+ return NO;
+ }
+ _backDeviceInput =
+ [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice
+ error:&error];
+ if (!_backDeviceInput) {
+ NSLog(@"Failed to get capture device input: %@",
+ error.localizedDescription);
+ return NO;
+ }
+
+ // Add the inputs.
+ if (![_captureSession canAddInput:_frontDeviceInput] ||
+ ![_captureSession canAddInput:_backDeviceInput]) {
+ NSLog(@"Session does not support capture inputs.");
+ return NO;
+ }
+ [self updateSessionInput];
+
+ return YES;
+}
+
+- (void)deviceOrientationDidChange:(NSNotification*)notification {
+ _orientationHasChanged = YES;
+ [self updateOrientation];
+}
+
+- (void)updateOrientation {
+ AVCaptureConnection* connection =
+ [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (!connection.supportsVideoOrientation) {
+ // TODO(tkchin): set rotation bit on frames.
+ return;
+ }
+ AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
+ switch ([UIDevice currentDevice].orientation) {
+ case UIDeviceOrientationPortrait:
+ orientation = AVCaptureVideoOrientationPortrait;
+ break;
+ case UIDeviceOrientationPortraitUpsideDown:
+ orientation = AVCaptureVideoOrientationPortraitUpsideDown;
+ break;
+ case UIDeviceOrientationLandscapeLeft:
+ orientation = AVCaptureVideoOrientationLandscapeRight;
+ break;
+ case UIDeviceOrientationLandscapeRight:
+ orientation = AVCaptureVideoOrientationLandscapeLeft;
+ break;
+ case UIDeviceOrientationFaceUp:
+ case UIDeviceOrientationFaceDown:
+ case UIDeviceOrientationUnknown:
+ if (!_orientationHasChanged) {
+ connection.videoOrientation = orientation;
+ }
+ return;
+ }
+ connection.videoOrientation = orientation;
+}
+
+- (void)updateSessionInput {
+ // Update the current session input to match what's stored in _useBackCamera.
+ [_captureSession beginConfiguration];
+ AVCaptureDeviceInput* oldInput = _backDeviceInput;
+ AVCaptureDeviceInput* newInput = _frontDeviceInput;
+ if (_useBackCamera) {
+ oldInput = _frontDeviceInput;
+ newInput = _backDeviceInput;
+ }
+ // Ok to remove this even if it's not attached. Will be no-op.
+ [_captureSession removeInput:oldInput];
+ [_captureSession addInput:newInput];
+ [self updateOrientation];
+ [_captureSession commitConfiguration];
+}
+
+@end
+
+namespace webrtc {
+
+AVFoundationVideoCapturer::AVFoundationVideoCapturer()
+ : _capturer(nil), _startThread(nullptr) {
+ // Set our supported formats. This matches kDefaultPreset.
+ std::vector<cricket::VideoFormat> supportedFormats;
+ supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
+ SetSupportedFormats(supportedFormats);
+ _capturer =
+ [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
+}
+
+AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
+ _capturer = nil;
+}
+
+cricket::CaptureState AVFoundationVideoCapturer::Start(
+ const cricket::VideoFormat& format) {
+ if (!_capturer) {
+ LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
+ return cricket::CaptureState::CS_FAILED;
+ }
+ if (_capturer.isRunning) {
+ LOG(LS_ERROR) << "The capturer is already running.";
+ return cricket::CaptureState::CS_FAILED;
+ }
+ if (format != kDefaultFormat) {
+ LOG(LS_ERROR) << "Unsupported format provided.";
+ return cricket::CaptureState::CS_FAILED;
+ }
+
+ // Keep track of which thread capture started on. This is the thread that
+ // frames need to be sent to.
+ RTC_DCHECK(!_startThread);
+ _startThread = rtc::Thread::Current();
+
+ SetCaptureFormat(&format);
+ // This isn't super accurate because it takes a while for the AVCaptureSession
+ // to spin up, and this call returns async.
+ // TODO(tkchin): make this better.
+ [_capturer startCaptureAsync];
+ SetCaptureState(cricket::CaptureState::CS_RUNNING);
+
+ return cricket::CaptureState::CS_STARTING;
+}
+
+void AVFoundationVideoCapturer::Stop() {
+ [_capturer stopCaptureAsync];
+ SetCaptureFormat(NULL);
+ _startThread = nullptr;
+}
+
+bool AVFoundationVideoCapturer::IsRunning() {
+ return _capturer.isRunning;
+}
+
+AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
+ return _capturer.captureSession;
+}
+
+void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
+ _capturer.useBackCamera = useBackCamera;
+}
+
+bool AVFoundationVideoCapturer::GetUseBackCamera() const {
+ return _capturer.useBackCamera;
+}
+
+void AVFoundationVideoCapturer::CaptureSampleBuffer(
+ CMSampleBufferRef sampleBuffer) {
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
+ !CMSampleBufferIsValid(sampleBuffer) ||
+ !CMSampleBufferDataIsReady(sampleBuffer)) {
+ return;
+ }
+
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (imageBuffer == NULL) {
+ return;
+ }
+
+ // Base address must be unlocked to access frame data.
+ CVOptionFlags lockFlags = kCVPixelBufferLock_ReadOnly;
+ CVReturn ret = CVPixelBufferLockBaseAddress(imageBuffer, lockFlags);
+ if (ret != kCVReturnSuccess) {
+ return;
+ }
+
+ static size_t const kYPlaneIndex = 0;
+ static size_t const kUVPlaneIndex = 1;
+ uint8_t* yPlaneAddress =
+ (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex);
+ size_t yPlaneHeight =
+ CVPixelBufferGetHeightOfPlane(imageBuffer, kYPlaneIndex);
+ size_t yPlaneWidth =
+ CVPixelBufferGetWidthOfPlane(imageBuffer, kYPlaneIndex);
+ size_t yPlaneBytesPerRow =
+ CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kYPlaneIndex);
+ size_t uvPlaneHeight =
+ CVPixelBufferGetHeightOfPlane(imageBuffer, kUVPlaneIndex);
+ size_t uvPlaneBytesPerRow =
+ CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kUVPlaneIndex);
+ size_t frameSize =
+ yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
+
+ // Sanity check assumption that planar bytes are contiguous.
+ uint8_t* uvPlaneAddress =
+ (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex);
+ RTC_DCHECK(
+ uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow);
+
+ // Stuff data into a cricket::CapturedFrame.
+ int64_t currentTime = rtc::TimeNanos();
+ cricket::CapturedFrame frame;
+ frame.width = yPlaneWidth;
+ frame.height = yPlaneHeight;
+ frame.pixel_width = 1;
+ frame.pixel_height = 1;
+ frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
+ frame.time_stamp = currentTime;
+ frame.data = yPlaneAddress;
+ frame.data_size = frameSize;
+
+ if (_startThread->IsCurrent()) {
+ SignalFrameCaptured(this, &frame);
+ } else {
+ _startThread->Invoke<void>(
+ rtc::Bind(&AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread,
+ this, &frame));
+ }
+ CVPixelBufferUnlockBaseAddress(imageBuffer, lockFlags);
+}
+
+void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread(
+ const cricket::CapturedFrame* frame) {
+ RTC_DCHECK(_startThread->IsCurrent());
+ // This will call a superclass method that will perform the frame conversion
+ // to I420.
+ SignalFrameCaptured(this, frame);
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/objc/public/RTCAVFoundationVideoSource.h b/talk/app/webrtc/objc/public/RTCAVFoundationVideoSource.h
new file mode 100644
index 0000000000..b6a686a7dc
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCAVFoundationVideoSource.h
@@ -0,0 +1,49 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCVideoSource.h"
+
+@class AVCaptureSession;
+@class RTCMediaConstraints;
+@class RTCPeerConnectionFactory;
+
+// RTCAVFoundationVideoSource is a video source that uses
+// webrtc::AVFoundationVideoCapturer. We do not currently provide a wrapper for
+// that capturer because cricket::VideoCapturer is not ref counted and we cannot
+// guarantee its lifetime. Instead, we expose its properties through the ref
+// counted video source interface.
+@interface RTCAVFoundationVideoSource : RTCVideoSource
+
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
+ constraints:(RTCMediaConstraints*)constraints;
+
+// Switches the camera being used (either front or back).
+@property(nonatomic, assign) BOOL useBackCamera;
+// Returns the active capture session.
+@property(nonatomic, readonly) AVCaptureSession* captureSession;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCAudioSource.h b/talk/app/webrtc/objc/public/RTCAudioSource.h
new file mode 100644
index 0000000000..c0d38fe7ed
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCAudioSource.h
@@ -0,0 +1,40 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCMediaSource.h"
+
+// RTCAudioSource is an ObjectiveC wrapper for AudioSourceInterface. It is
+// used as the source for one or more RTCAudioTrack objects.
+@interface RTCAudioSource : RTCMediaSource
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCAudioTrack.h b/talk/app/webrtc/objc/public/RTCAudioTrack.h
new file mode 100644
index 0000000000..48c2783173
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCAudioTrack.h
@@ -0,0 +1,39 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCMediaStreamTrack.h"
+
+// RTCAudioTrack is an ObjectiveC wrapper for AudioTrackInterface.
+@interface RTCAudioTrack : RTCMediaStreamTrack
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCDataChannel.h b/talk/app/webrtc/objc/public/RTCDataChannel.h
new file mode 100644
index 0000000000..24a46f655c
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCDataChannel.h
@@ -0,0 +1,118 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// ObjectiveC wrapper for a DataChannelInit object.
+@interface RTCDataChannelInit : NSObject
+
+// Set to YES if ordered delivery is required
+@property(nonatomic) BOOL isOrdered;
+// Max period in milliseconds in which retransmissions will be sent. After this
+// time, no more retransmissions will be sent. -1 if unset.
+@property(nonatomic) NSInteger maxRetransmitTimeMs;
+// The max number of retransmissions. -1 if unset.
+@property(nonatomic) NSInteger maxRetransmits;
+// Set to YES if the channel has been externally negotiated and we do not send
+// an in-band signalling in the form of an "open" message
+@property(nonatomic) BOOL isNegotiated;
+// The stream id, or SID, for SCTP data channels. -1 if unset.
+@property(nonatomic) NSInteger streamId;
+// Set by the application and opaque to the WebRTC implementation.
+@property(nonatomic) NSString* protocol;
+
+@end
+
+// ObjectiveC wrapper for a DataBuffer object.
+@interface RTCDataBuffer : NSObject
+
+@property(nonatomic, readonly) NSData* data;
+@property(nonatomic, readonly) BOOL isBinary;
+
+- (instancetype)initWithData:(NSData*)data isBinary:(BOOL)isBinary;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__((
+ unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
+
+// Keep in sync with webrtc::DataChannelInterface::DataState
+typedef enum {
+ kRTCDataChannelStateConnecting,
+ kRTCDataChannelStateOpen,
+ kRTCDataChannelStateClosing,
+ kRTCDataChannelStateClosed
+} RTCDataChannelState;
+
+@class RTCDataChannel;
+// Protocol for receving data channel state and message events.
+@protocol RTCDataChannelDelegate<NSObject>
+
+// Called when the data channel state has changed.
+- (void)channelDidChangeState:(RTCDataChannel*)channel;
+
+// Called when a data buffer was successfully received.
+- (void)channel:(RTCDataChannel*)channel
+ didReceiveMessageWithBuffer:(RTCDataBuffer*)buffer;
+
+@optional
+
+// Called when the buffered amount has changed.
+- (void)channel:(RTCDataChannel*)channel
+ didChangeBufferedAmount:(NSUInteger)amount;
+
+@end
+
+// ObjectiveC wrapper for a DataChannel object.
+// See talk/app/webrtc/datachannelinterface.h
+@interface RTCDataChannel : NSObject
+
+@property(nonatomic, readonly) NSString* label;
+@property(nonatomic, readonly) BOOL isReliable;
+@property(nonatomic, readonly) BOOL isOrdered;
+@property(nonatomic, readonly) NSUInteger maxRetransmitTime;
+@property(nonatomic, readonly) NSUInteger maxRetransmits;
+@property(nonatomic, readonly) NSString* protocol;
+@property(nonatomic, readonly) BOOL isNegotiated;
+@property(nonatomic, readonly) NSInteger streamId;
+@property(nonatomic, readonly) RTCDataChannelState state;
+@property(nonatomic, readonly) NSUInteger bufferedAmount;
+@property(nonatomic, weak) id<RTCDataChannelDelegate> delegate;
+
+- (void)close;
+- (BOOL)sendData:(RTCDataBuffer*)data;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__((
+ unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCEAGLVideoView.h b/talk/app/webrtc/objc/public/RTCEAGLVideoView.h
new file mode 100644
index 0000000000..32c834acf8
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCEAGLVideoView.h
@@ -0,0 +1,46 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#import "RTCVideoRenderer.h"
+
+@class RTCEAGLVideoView;
+@protocol RTCEAGLVideoViewDelegate
+
+- (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size;
+
+@end
+
+// RTCEAGLVideoView is an RTCVideoRenderer which renders i420 frames in its
+// bounds using OpenGLES 2.0.
+@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
+
+@property(nonatomic, weak) id<RTCEAGLVideoViewDelegate> delegate;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCFileLogger.h b/talk/app/webrtc/objc/public/RTCFileLogger.h
new file mode 100644
index 0000000000..3900cb6fbe
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCFileLogger.h
@@ -0,0 +1,70 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// TODO(tkchin): Move this to a common location.
+#ifndef NS_DESIGNATED_INITIALIZER
+#define NS_DESIGNATED_INITIALIZER
+#endif
+
+typedef NS_ENUM(NSUInteger, RTCFileLoggerSeverity) {
+ kRTCFileLoggerSeverityVerbose,
+ kRTCFileLoggerSeverityInfo,
+ kRTCFileLoggerSeverityWarning,
+ kRTCFileLoggerSeverityError
+};
+
+// This class intercepts WebRTC logs and saves them to a file. The file size
+// will not exceed the given maximum bytesize. When the maximum bytesize is
+// reached logs from the beginning and the end are preserved while the middle
+// section is overwritten instead.
+// This class is not threadsafe.
+@interface RTCFileLogger : NSObject
+
+// The severity level to capture. The default is kRTCFileLoggerSeverityInfo.
+@property(nonatomic, assign) RTCFileLoggerSeverity severity;
+
+// Default constructor provides default settings for dir path and file size.
+- (instancetype)init;
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize
+ NS_DESIGNATED_INITIALIZER;
+
+// Starts writing WebRTC logs to disk if not already started. Overwrites any
+// existing file(s).
+- (void)start;
+
+// Stops writing WebRTC logs to disk. This method is also called on dealloc.
+- (void)stop;
+
+// Returns the current contents of the logs, or nil if start has been called
+// without a stop.
+- (NSData *)logData;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCI420Frame.h b/talk/app/webrtc/objc/public/RTCI420Frame.h
new file mode 100644
index 0000000000..be16a04c3d
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCI420Frame.h
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// RTCI420Frame is an ObjectiveC version of cricket::VideoFrame.
+@interface RTCI420Frame : NSObject
+
+@property(nonatomic, readonly) NSUInteger width;
+@property(nonatomic, readonly) NSUInteger height;
+@property(nonatomic, readonly) NSUInteger chromaWidth;
+@property(nonatomic, readonly) NSUInteger chromaHeight;
+@property(nonatomic, readonly) NSUInteger chromaSize;
+// These can return NULL if the object is not backed by a buffer.
+@property(nonatomic, readonly) const uint8_t* yPlane;
+@property(nonatomic, readonly) const uint8_t* uPlane;
+@property(nonatomic, readonly) const uint8_t* vPlane;
+@property(nonatomic, readonly) NSInteger yPitch;
+@property(nonatomic, readonly) NSInteger uPitch;
+@property(nonatomic, readonly) NSInteger vPitch;
+
+- (BOOL)makeExclusive;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__((
+ unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
+
diff --git a/talk/app/webrtc/objc/public/RTCICECandidate.h b/talk/app/webrtc/objc/public/RTCICECandidate.h
new file mode 100644
index 0000000000..25e4fe2aee
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCICECandidate.h
@@ -0,0 +1,56 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// RTCICECandidate contains an instance of ICECandidateInterface.
+@interface RTCICECandidate : NSObject
+
+// If present, this contains the identifier of the "media stream
+// identification" as defined in [RFC 3388] for m-line this candidate is
+// associated with.
+@property(nonatomic, copy, readonly) NSString* sdpMid;
+
+// This indicates the index (starting at zero) of m-line in the SDP this
+// candidate is associated with.
+@property(nonatomic, assign, readonly) NSInteger sdpMLineIndex;
+
+// Creates an SDP-ized form of this candidate.
+@property(nonatomic, copy, readonly) NSString* sdp;
+
+// Creates an ICECandidateInterface based on SDP string.
+- (id)initWithMid:(NSString*)sdpMid
+ index:(NSInteger)sdpMLineIndex
+ sdp:(NSString*)sdp;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__((
+ unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCICEServer.h b/talk/app/webrtc/objc/public/RTCICEServer.h
new file mode 100644
index 0000000000..35acf0db5b
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCICEServer.h
@@ -0,0 +1,49 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// RTCICEServer allows for the creation of ICEServer structs.
+@interface RTCICEServer : NSObject
+
+// The server URI, username, and password.
+@property(nonatomic, strong, readonly) NSURL* URI;
+@property(nonatomic, copy, readonly) NSString* username;
+@property(nonatomic, copy, readonly) NSString* password;
+
+// Initializer for RTCICEServer taking uri, username, and password.
+- (id)initWithURI:(NSURL*)URI
+ username:(NSString*)username
+ password:(NSString*)password;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__((
+ unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCLogging.h b/talk/app/webrtc/objc/public/RTCLogging.h
new file mode 100644
index 0000000000..bcd160d921
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCLogging.h
@@ -0,0 +1,92 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// Subset of rtc::LoggingSeverity.
+typedef NS_ENUM(NSInteger, RTCLoggingSeverity) {
+ kRTCLoggingSeverityVerbose,
+ kRTCLoggingSeverityInfo,
+ kRTCLoggingSeverityWarning,
+ kRTCLoggingSeverityError,
+};
+
+#if defined(__cplusplus)
+extern "C" void RTCLogEx(RTCLoggingSeverity severity, NSString* logString);
+extern "C" void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity);
+extern "C" NSString* RTCFileName(const char* filePath);
+#else
+
+// Wrapper for C++ LOG(sev) macros.
+// Logs the log string to the webrtc logstream for the given severity.
+extern void RTCLogEx(RTCLoggingSeverity severity, NSString* logString);
+
+// Wrapper for rtc::LogMessage::LogToDebug.
+// Sets the minimum severity to be logged to console.
+extern void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity);
+
+// Returns the filename with the path prefix removed.
+extern NSString* RTCFileName(const char* filePath);
+
+#endif
+
+// Some convenience macros.
+
+#define RTCLogString(format, ...) \
+ [NSString stringWithFormat:@"(%@:%d %s): " format, \
+ RTCFileName(__FILE__), \
+ __LINE__, \
+ __FUNCTION__, \
+ ##__VA_ARGS__]
+
+#define RTCLogFormat(severity, format, ...) \
+ do { \
+ NSString *logString = RTCLogString(format, ##__VA_ARGS__); \
+ RTCLogEx(severity, logString); \
+ } while (false)
+
+#define RTCLogVerbose(format, ...) \
+ RTCLogFormat(kRTCLoggingSeverityVerbose, format, ##__VA_ARGS__) \
+
+#define RTCLogInfo(format, ...) \
+ RTCLogFormat(kRTCLoggingSeverityInfo, format, ##__VA_ARGS__) \
+
+#define RTCLogWarning(format, ...) \
+ RTCLogFormat(kRTCLoggingSeverityWarning, format, ##__VA_ARGS__) \
+
+#define RTCLogError(format, ...) \
+ RTCLogFormat(kRTCLoggingSeverityError, format, ##__VA_ARGS__) \
+
+#if !defined(NDEBUG)
+#define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__)
+#else
+#define RTCLogDebug(format, ...) \
+ do { \
+ } while (false)
+#endif
+
+#define RTCLog(format, ...) RTCLogInfo(format, ##__VA_ARGS__)
diff --git a/talk/app/webrtc/objc/public/RTCMediaConstraints.h b/talk/app/webrtc/objc/public/RTCMediaConstraints.h
new file mode 100644
index 0000000000..425150a804
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCMediaConstraints.h
@@ -0,0 +1,39 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// RTCMediaConstraints contains the media constraints to be used in
+// RTCPeerConnection and RTCMediaStream.
+@interface RTCMediaConstraints : NSObject
+
+// Initializer for RTCMediaConstraints. The parameters mandatory and optional
+// contain RTCPair objects with key/value for each constrant.
+- (id)initWithMandatoryConstraints:(NSArray *)mandatory
+ optionalConstraints:(NSArray *)optional;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCMediaSource.h b/talk/app/webrtc/objc/public/RTCMediaSource.h
new file mode 100644
index 0000000000..164d85de71
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCMediaSource.h
@@ -0,0 +1,44 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCTypes.h"
+
+// RTCMediaSource is an ObjectiveC wrapper for MediaSourceInterface
+@interface RTCMediaSource : NSObject
+
+// The current state of the RTCMediaSource.
+@property(nonatomic, assign, readonly) RTCSourceState state;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__((
+ unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCMediaStream.h b/talk/app/webrtc/objc/public/RTCMediaStream.h
new file mode 100644
index 0000000000..40a63ee378
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCMediaStream.h
@@ -0,0 +1,51 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+@class RTCAudioTrack;
+@class RTCVideoTrack;
+
+// RTCMediaStream is an ObjectiveC wrapper for MediaStreamInterface.
+@interface RTCMediaStream : NSObject
+
+@property(nonatomic, strong, readonly) NSArray *audioTracks;
+@property(nonatomic, strong, readonly) NSArray *videoTracks;
+@property(nonatomic, strong, readonly) NSString *label;
+
+- (BOOL)addAudioTrack:(RTCAudioTrack *)track;
+- (BOOL)addVideoTrack:(RTCVideoTrack *)track;
+- (BOOL)removeAudioTrack:(RTCAudioTrack *)track;
+- (BOOL)removeVideoTrack:(RTCVideoTrack *)track;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCMediaStreamTrack.h b/talk/app/webrtc/objc/public/RTCMediaStreamTrack.h
new file mode 100644
index 0000000000..2b098645ff
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCMediaStreamTrack.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCTypes.h"
+
+@class RTCMediaStreamTrack;
+@protocol RTCMediaStreamTrackDelegate<NSObject>
+
+- (void)mediaStreamTrackDidChange:(RTCMediaStreamTrack*)mediaStreamTrack;
+
+@end
+
+// RTCMediaStreamTrack implements the interface common to RTCAudioTrack and
+// RTCVideoTrack. Do not create an instance of this class, rather create one
+// of the derived classes.
+@interface RTCMediaStreamTrack : NSObject
+
+@property(nonatomic, readonly) NSString* kind;
+@property(nonatomic, readonly) NSString* label;
+@property(nonatomic, weak) id<RTCMediaStreamTrackDelegate> delegate;
+
+- (BOOL)isEnabled;
+- (BOOL)setEnabled:(BOOL)enabled;
+- (RTCTrackState)state;
+- (BOOL)setState:(RTCTrackState)state;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCNSGLVideoView.h b/talk/app/webrtc/objc/public/RTCNSGLVideoView.h
new file mode 100644
index 0000000000..2f07239ddd
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCNSGLVideoView.h
@@ -0,0 +1,47 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if TARGET_OS_IPHONE
+#error "This file targets OSX."
+#endif
+
+#import <AppKit/NSOpenGLView.h>
+
+#import "RTCVideoRenderer.h"
+
+@class RTCNSGLVideoView;
+@protocol RTCNSGLVideoViewDelegate
+
+- (void)videoView:(RTCNSGLVideoView*)videoView didChangeVideoSize:(CGSize)size;
+
+@end
+
+@interface RTCNSGLVideoView : NSOpenGLView <RTCVideoRenderer>
+
+@property(nonatomic, weak) id<RTCNSGLVideoViewDelegate> delegate;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCOpenGLVideoRenderer.h b/talk/app/webrtc/objc/public/RTCOpenGLVideoRenderer.h
new file mode 100644
index 0000000000..778930488e
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCOpenGLVideoRenderer.h
@@ -0,0 +1,73 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
+#import <GLKit/GLKit.h>
+#else
+#import <AppKit/NSOpenGL.h>
+#endif
+
+@class RTCI420Frame;
+
+// RTCOpenGLVideoRenderer issues appropriate OpenGL commands to draw a frame to
+// the currently bound framebuffer. Supports OpenGL 3.2 and OpenGLES 2.0. OpenGL
+// framebuffer creation and management should be handled elsewhere using the
+// same context used to initialize this class.
+@interface RTCOpenGLVideoRenderer : NSObject
+
+// The last successfully drawn frame. Used to avoid drawing frames unnecessarily
+// hence saving battery life by reducing load.
+@property(nonatomic, readonly) RTCI420Frame* lastDrawnFrame;
+
+#if TARGET_OS_IPHONE
+- (instancetype)initWithContext:(EAGLContext*)context;
+#else
+- (instancetype)initWithContext:(NSOpenGLContext*)context;
+#endif
+
+// Draws |frame| onto the currently bound OpenGL framebuffer. |setupGL| must be
+// called before this function will succeed.
+- (BOOL)drawFrame:(RTCI420Frame*)frame;
+
+// The following methods are used to manage OpenGL resources. On iOS
+// applications should release resources when placed in background for use in
+// the foreground application. In fact, attempting to call OpenGLES commands
+// while in background will result in application termination.
+
+// Sets up the OpenGL state needed for rendering.
+- (void)setupGL;
+// Tears down the OpenGL state created by |setupGL|.
+- (void)teardownGL;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__((
+ unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCPair.h b/talk/app/webrtc/objc/public/RTCPair.h
new file mode 100644
index 0000000000..51d20e721e
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCPair.h
@@ -0,0 +1,45 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// A class to hold a key and value.
+@interface RTCPair : NSObject
+
+@property(nonatomic, strong, readonly) NSString *key;
+@property(nonatomic, strong, readonly) NSString *value;
+
+// Initialize a RTCPair object with a key and value.
+- (id)initWithKey:(NSString *)key value:(NSString *)value;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCPeerConnection.h b/talk/app/webrtc/objc/public/RTCPeerConnection.h
new file mode 100644
index 0000000000..a13ed3ecc3
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCPeerConnection.h
@@ -0,0 +1,127 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCPeerConnectionDelegate.h"
+
+@class RTCConfiguration;
+@class RTCDataChannel;
+@class RTCDataChannelInit;
+@class RTCICECandidate;
+@class RTCICEServers;
+@class RTCMediaConstraints;
+@class RTCMediaStream;
+@class RTCMediaStreamTrack;
+@class RTCSessionDescription;
+@protocol RTCSessionDescriptionDelegate;
+@protocol RTCStatsDelegate;
+
+// RTCPeerConnection is an ObjectiveC friendly wrapper around a PeerConnection
+// object. See the documentation in talk/app/webrtc/peerconnectioninterface.h.
+// or http://www.webrtc.org/reference/native-apis, which in turn is inspired by
+// the JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
+// http://www.w3.org/TR/mediacapture-streams/
+@interface RTCPeerConnection : NSObject
+
+@property(nonatomic, weak) id<RTCPeerConnectionDelegate> delegate;
+
+// Accessor methods to active local streams.
+@property(nonatomic, strong, readonly) NSArray *localStreams;
+
+// The local description.
+@property(nonatomic, assign, readonly) RTCSessionDescription *localDescription;
+
+// The remote description.
+@property(nonatomic, assign, readonly) RTCSessionDescription *remoteDescription;
+
+// The current signaling state.
+@property(nonatomic, assign, readonly) RTCSignalingState signalingState;
+@property(nonatomic, assign, readonly) RTCICEConnectionState iceConnectionState;
+@property(nonatomic, assign, readonly) RTCICEGatheringState iceGatheringState;
+
+// Add a new MediaStream to be sent on this PeerConnection.
+// Note that a SessionDescription negotiation is needed before the
+// remote peer can receive the stream.
+- (BOOL)addStream:(RTCMediaStream *)stream;
+
+// Remove a MediaStream from this PeerConnection.
+// Note that a SessionDescription negotiation is need before the
+// remote peer is notified.
+- (void)removeStream:(RTCMediaStream *)stream;
+
+// Create a data channel.
+- (RTCDataChannel*)createDataChannelWithLabel:(NSString*)label
+ config:(RTCDataChannelInit*)config;
+
+// Create a new offer.
+// Success or failure will be reported via RTCSessionDescriptionDelegate.
+- (void)createOfferWithDelegate:(id<RTCSessionDescriptionDelegate>)delegate
+ constraints:(RTCMediaConstraints *)constraints;
+
+// Create an answer to an offer.
+// Success or failure will be reported via RTCSessionDescriptionDelegate.
+- (void)createAnswerWithDelegate:(id<RTCSessionDescriptionDelegate>)delegate
+ constraints:(RTCMediaConstraints *)constraints;
+
+// Sets the local session description.
+// Success or failure will be reported via RTCSessionDescriptionDelegate.
+- (void)
+ setLocalDescriptionWithDelegate:(id<RTCSessionDescriptionDelegate>)delegate
+ sessionDescription:(RTCSessionDescription *)sdp;
+
+// Sets the remote session description.
+// Success or failure will be reported via RTCSessionDescriptionDelegate.
+- (void)
+ setRemoteDescriptionWithDelegate:(id<RTCSessionDescriptionDelegate>)delegate
+ sessionDescription:(RTCSessionDescription *)sdp;
+
+// Sets the PeerConnection's global configuration to |configuration|.
+// Any changes to STUN/TURN servers or ICE candidate policy will affect the
+// next gathering phase, and cause the next call to createOffer to generate
+// new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies
+// cannot be changed with this method.
+- (BOOL)setConfiguration:(RTCConfiguration *)configuration;
+
+// Provides a remote candidate to the ICE Agent.
+- (BOOL)addICECandidate:(RTCICECandidate *)candidate;
+
+// Terminates all media and closes the transport.
+- (void)close;
+
+// Gets statistics for the media track. If |mediaStreamTrack| is nil statistics
+// are gathered for all tracks.
+// Statistics information will be reported via RTCStatsDelegate.
+- (BOOL)getStatsWithDelegate:(id<RTCStatsDelegate>)delegate
+ mediaStreamTrack:(RTCMediaStreamTrack*)mediaStreamTrack
+ statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCPeerConnectionDelegate.h b/talk/app/webrtc/objc/public/RTCPeerConnectionDelegate.h
new file mode 100644
index 0000000000..bf0c23104a
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCPeerConnectionDelegate.h
@@ -0,0 +1,72 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCTypes.h"
+
+@class RTCDataChannel;
+@class RTCICECandidate;
+@class RTCMediaStream;
+@class RTCPeerConnection;
+
+// RTCPeerConnectionDelegate is a protocol for an object that must be
+// implemented to get messages from PeerConnection.
+@protocol RTCPeerConnectionDelegate<NSObject>
+
+// Triggered when the SignalingState changed.
+- (void)peerConnection:(RTCPeerConnection *)peerConnection
+ signalingStateChanged:(RTCSignalingState)stateChanged;
+
+// Triggered when media is received on a new stream from remote peer.
+- (void)peerConnection:(RTCPeerConnection *)peerConnection
+ addedStream:(RTCMediaStream *)stream;
+
+// Triggered when a remote peer close a stream.
+- (void)peerConnection:(RTCPeerConnection *)peerConnection
+ removedStream:(RTCMediaStream *)stream;
+
+// Triggered when renegotiation is needed, for example the ICE has restarted.
+- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection *)peerConnection;
+
+// Called any time the ICEConnectionState changes.
+- (void)peerConnection:(RTCPeerConnection *)peerConnection
+ iceConnectionChanged:(RTCICEConnectionState)newState;
+
+// Called any time the ICEGatheringState changes.
+- (void)peerConnection:(RTCPeerConnection *)peerConnection
+ iceGatheringChanged:(RTCICEGatheringState)newState;
+
+// New Ice candidate have been found.
+- (void)peerConnection:(RTCPeerConnection *)peerConnection
+ gotICECandidate:(RTCICECandidate *)candidate;
+
+// New data channel has been opened.
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ didOpenDataChannel:(RTCDataChannel*)dataChannel;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCPeerConnectionFactory.h b/talk/app/webrtc/objc/public/RTCPeerConnectionFactory.h
new file mode 100644
index 0000000000..e1e69b4b7f
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCPeerConnectionFactory.h
@@ -0,0 +1,77 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+@class RTCAudioTrack;
+@class RTCConfiguration;
+@class RTCMediaConstraints;
+@class RTCMediaStream;
+@class RTCPeerConnection;
+@class RTCVideoCapturer;
+@class RTCVideoSource;
+@class RTCVideoTrack;
+@protocol RTCPeerConnectionDelegate;
+
+// RTCPeerConnectionFactory is an ObjectiveC wrapper for PeerConnectionFactory.
+// It is the main entry point to the PeerConnection API for clients.
+@interface RTCPeerConnectionFactory : NSObject
+
+// Initialize & de-initialize the SSL subsystem. Failure is fatal.
++ (void)initializeSSL;
++ (void)deinitializeSSL;
+
+// Create an RTCPeerConnection object. RTCPeerConnectionFactory will create
+// required libjingle threads, socket and network manager factory classes for
+// networking.
+- (RTCPeerConnection *)
+ peerConnectionWithICEServers:(NSArray *)servers
+ constraints:(RTCMediaConstraints *)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate;
+
+// Creates a peer connection using the default port allocator factory and identity service.
+- (RTCPeerConnection *)peerConnectionWithConfiguration:(RTCConfiguration *)configuration
+ constraints:(RTCMediaConstraints *)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate;
+
+// Create an RTCMediaStream named |label|.
+- (RTCMediaStream *)mediaStreamWithLabel:(NSString *)label;
+
+// Creates a RTCVideoSource. The new source takes ownership of |capturer|.
+// |constraints| decides video resolution and frame rate but can be NULL.
+- (RTCVideoSource *)videoSourceWithCapturer:(RTCVideoCapturer *)capturer
+ constraints:(RTCMediaConstraints *)constraints;
+
+// Creates a new local VideoTrack. The same |source| can be used in several
+// tracks.
+- (RTCVideoTrack *)videoTrackWithID:(NSString *)videoId
+ source:(RTCVideoSource *)source;
+
+// Creates an new AudioTrack.
+- (RTCAudioTrack *)audioTrackWithID:(NSString *)audioId;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h b/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h
new file mode 100644
index 0000000000..b0cc72b5b7
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h
@@ -0,0 +1,75 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// See talk/app/webrtc/peerconnectioninterface.h.
+
+#import <Foundation/Foundation.h>
+
+typedef NS_ENUM(NSInteger, RTCIceTransportsType) {
+ kRTCIceTransportsTypeNone,
+ kRTCIceTransportsTypeRelay,
+ kRTCIceTransportsTypeNoHost,
+ kRTCIceTransportsTypeAll,
+};
+
+// https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-08#section-4.1.1
+typedef NS_ENUM(NSInteger, RTCBundlePolicy) {
+ kRTCBundlePolicyBalanced,
+ kRTCBundlePolicyMaxBundle,
+ kRTCBundlePolicyMaxCompat,
+};
+
+// https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-09#section-4.1.1
+typedef NS_ENUM(NSInteger, RTCRtcpMuxPolicy) {
+ kRTCRtcpMuxPolicyNegotiate,
+ kRTCRtcpMuxPolicyRequire,
+};
+
+typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) {
+ kRTCTcpCandidatePolicyEnabled,
+ kRTCTcpCandidatePolicyDisabled,
+};
+
+// Configuration object used for creating a peer connection.
+@interface RTCConfiguration : NSObject
+
+@property(nonatomic, assign) RTCIceTransportsType iceTransportsType;
+@property(nonatomic, copy) NSArray *iceServers;
+@property(nonatomic, assign) RTCBundlePolicy bundlePolicy;
+@property(nonatomic, assign) RTCRtcpMuxPolicy rtcpMuxPolicy;
+@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy;
+@property(nonatomic, assign) int audioJitterBufferMaxPackets;
+@property(nonatomic, assign) int iceConnectionReceivingTimeout;
+
+- (instancetype)initWithIceTransportsType:(RTCIceTransportsType)iceTransportsType
+ bundlePolicy:(RTCBundlePolicy)bundlePolicy
+ rtcpMuxPolicy:(RTCRtcpMuxPolicy)rtcpMuxPolicy
+ tcpCandidatePolicy:(RTCTcpCandidatePolicy)tcpCandidatePolicy
+ audioJitterBufferMaxPackets:(int)audioJitterBufferMaxPackets
+ iceConnectionReceivingTimeout:(int)iceConnectionReceivingTimeout;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCSessionDescription.h b/talk/app/webrtc/objc/public/RTCSessionDescription.h
new file mode 100644
index 0000000000..e3db21cb60
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCSessionDescription.h
@@ -0,0 +1,50 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// Description of an RFC 4566 Session.
+// RTCSessionDescription is an ObjectiveC wrapper for
+// SessionDescriptionInterface.
+@interface RTCSessionDescription : NSObject
+
+// The SDP description.
+@property(nonatomic, copy, readonly) NSString *description;
+
+// The session type.
+@property(nonatomic, copy, readonly) NSString *type;
+
+- (id)initWithType:(NSString *)type sdp:(NSString *)sdp;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
+
diff --git a/talk/app/webrtc/objc/public/RTCSessionDescriptionDelegate.h b/talk/app/webrtc/objc/public/RTCSessionDescriptionDelegate.h
new file mode 100644
index 0000000000..2fb0af91f0
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCSessionDescriptionDelegate.h
@@ -0,0 +1,49 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+@class RTCPeerConnection;
+@class RTCSessionDescription;
+
+extern NSString* const kRTCSessionDescriptionDelegateErrorDomain;
+extern int const kRTCSessionDescriptionDelegateErrorCode;
+
+// RTCSessionDescriptionDelegate is a protocol for listening to callback
+// messages when RTCSessionDescriptions are created or set.
+@protocol RTCSessionDescriptionDelegate<NSObject>
+
+// Called when creating a session.
+- (void)peerConnection:(RTCPeerConnection *)peerConnection
+ didCreateSessionDescription:(RTCSessionDescription *)sdp
+ error:(NSError *)error;
+
+// Called when setting a local or remote description.
+- (void)peerConnection:(RTCPeerConnection *)peerConnection
+ didSetSessionDescriptionWithError:(NSError *)error;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCStatsDelegate.h b/talk/app/webrtc/objc/public/RTCStatsDelegate.h
new file mode 100644
index 0000000000..54e8e569a1
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCStatsDelegate.h
@@ -0,0 +1,39 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+@class RTCPeerConnection;
+
+// RTCSessionDescriptionDelegate is a protocol for receiving statistic
+// reports from RTCPeerConnection.
+@protocol RTCStatsDelegate<NSObject>
+
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ didGetStats:(NSArray*)stats; // NSArray of RTCStatsReport*.
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCStatsReport.h b/talk/app/webrtc/objc/public/RTCStatsReport.h
new file mode 100644
index 0000000000..691c1aa600
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCStatsReport.h
@@ -0,0 +1,45 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// ObjectiveC friendly wrapper around a StatsReport object.
+// See talk/app/webrtc/statsypes.h
+@interface RTCStatsReport : NSObject
+
+@property(nonatomic, readonly) NSString* reportId;
+@property(nonatomic, readonly) NSString* type;
+@property(nonatomic, readonly) CFTimeInterval timestamp;
+@property(nonatomic, readonly) NSArray* values; // NSArray of RTCPair*.
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__((
+ unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCTypes.h b/talk/app/webrtc/objc/public/RTCTypes.h
new file mode 100644
index 0000000000..99ac192c34
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCTypes.h
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Enums that are common to the ObjectiveC version of the PeerConnection API.
+
+// RTCICEConnectionState correspond to the states in webrtc::ICEConnectionState.
+typedef enum {
+ RTCICEConnectionNew,
+ RTCICEConnectionChecking,
+ RTCICEConnectionConnected,
+ RTCICEConnectionCompleted,
+ RTCICEConnectionFailed,
+ RTCICEConnectionDisconnected,
+ RTCICEConnectionClosed,
+ RTCICEConnectionMax,
+} RTCICEConnectionState;
+
+// RTCICEGatheringState the states in webrtc::ICEGatheringState.
+typedef enum {
+ RTCICEGatheringNew,
+ RTCICEGatheringGathering,
+ RTCICEGatheringComplete,
+} RTCICEGatheringState;
+
+// RTCSignalingState correspond to the states in webrtc::SignalingState.
+typedef enum {
+ RTCSignalingStable,
+ RTCSignalingHaveLocalOffer,
+ RTCSignalingHaveLocalPrAnswer,
+ RTCSignalingHaveRemoteOffer,
+ RTCSignalingHaveRemotePrAnswer,
+ RTCSignalingClosed,
+} RTCSignalingState;
+
+// RTCStatsOutputLevel correspond to webrtc::StatsOutputLevel
+typedef enum {
+ RTCStatsOutputLevelStandard,
+ RTCStatsOutputLevelDebug,
+} RTCStatsOutputLevel;
+
+// RTCSourceState corresponds to the states in webrtc::SourceState.
+typedef enum {
+ RTCSourceStateInitializing,
+ RTCSourceStateLive,
+ RTCSourceStateEnded,
+ RTCSourceStateMuted,
+} RTCSourceState;
+
+// RTCTrackState corresponds to the states in webrtc::TrackState.
+typedef enum {
+ RTCTrackStateInitializing,
+ RTCTrackStateLive,
+ RTCTrackStateEnded,
+ RTCTrackStateFailed,
+} RTCTrackState;
diff --git a/talk/app/webrtc/objc/public/RTCVideoCapturer.h b/talk/app/webrtc/objc/public/RTCVideoCapturer.h
new file mode 100644
index 0000000000..d07c091fad
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCVideoCapturer.h
@@ -0,0 +1,42 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+// RTCVideoCapturer is an ObjectiveC wrapper for VideoCapturerInterface.
+@interface RTCVideoCapturer : NSObject
+
+// Create a new video capturer using the specified device.
++ (RTCVideoCapturer *)capturerWithDeviceName:(NSString *)deviceName;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCVideoRenderer.h b/talk/app/webrtc/objc/public/RTCVideoRenderer.h
new file mode 100644
index 0000000000..c837b1c5ac
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCVideoRenderer.h
@@ -0,0 +1,43 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
+#import <UIKit/UIKit.h>
+#endif
+
+@class RTCI420Frame;
+
+@protocol RTCVideoRenderer<NSObject>
+
+// The size of the frame.
+- (void)setSize:(CGSize)size;
+
+// The frame to be displayed.
+- (void)renderFrame:(RTCI420Frame*)frame;
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCVideoSource.h b/talk/app/webrtc/objc/public/RTCVideoSource.h
new file mode 100644
index 0000000000..15a9262b29
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCVideoSource.h
@@ -0,0 +1,39 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCMediaSource.h"
+
+// RTCVideoSource is an ObjectiveC wrapper for VideoSourceInterface.
+@interface RTCVideoSource : RTCMediaSource
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objc/public/RTCVideoTrack.h b/talk/app/webrtc/objc/public/RTCVideoTrack.h
new file mode 100644
index 0000000000..332c3ea80d
--- /dev/null
+++ b/talk/app/webrtc/objc/public/RTCVideoTrack.h
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "RTCMediaStreamTrack.h"
+
+@protocol RTCVideoRenderer;
+@class RTCPeerConnectionFactory;
+@class RTCVideoSource;
+
+// RTCVideoTrack is an ObjectiveC wrapper for VideoTrackInterface.
+@interface RTCVideoTrack : RTCMediaStreamTrack
+
+@property(nonatomic, readonly) RTCVideoSource* source;
+
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
+ source:(RTCVideoSource*)source
+ trackId:(NSString*)trackId;
+
+// Register a renderer that will render all frames received on this track.
+- (void)addRenderer:(id<RTCVideoRenderer>)renderer;
+
+// Deregister a renderer.
+- (void)removeRenderer:(id<RTCVideoRenderer>)renderer;
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+// Disallow init and don't add to documentation
+- (id)init __attribute__(
+ (unavailable("init is not a supported initializer for this class.")));
+#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+
+@end
diff --git a/talk/app/webrtc/objctests/Info.plist b/talk/app/webrtc/objctests/Info.plist
new file mode 100644
index 0000000000..c2fb0617f3
--- /dev/null
+++ b/talk/app/webrtc/objctests/Info.plist
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.Google.${PRODUCT_NAME:rfc1034identifier}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+</dict>
+</plist>
diff --git a/talk/app/webrtc/objctests/OWNERS b/talk/app/webrtc/objctests/OWNERS
new file mode 100644
index 0000000000..cd06158b7f
--- /dev/null
+++ b/talk/app/webrtc/objctests/OWNERS
@@ -0,0 +1 @@
+tkchin@webrtc.org
diff --git a/talk/app/webrtc/objctests/README b/talk/app/webrtc/objctests/README
new file mode 100644
index 0000000000..ebbb2c2603
--- /dev/null
+++ b/talk/app/webrtc/objctests/README
@@ -0,0 +1 @@
+See ../objc/README for information on what this is and how to use it.
diff --git a/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.h b/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.h
new file mode 100644
index 0000000000..7c49e1d768
--- /dev/null
+++ b/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.h
@@ -0,0 +1,60 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCDataChannel.h"
+#import "RTCPeerConnectionDelegate.h"
+
+// Observer of PeerConnection events, used by RTCPeerConnectionTest to check
+// expectations.
+@interface RTCPeerConnectionSyncObserver
+ : NSObject<RTCPeerConnectionDelegate, RTCDataChannelDelegate>
+@property(nonatomic) RTCDataChannel* dataChannel;
+// TODO(hughv): Add support for RTCVideoRendererDelegate when Video is enabled.
+
+// Transfer received ICE candidates to the caller.
+- (NSArray*)releaseReceivedICECandidates;
+
+// Register expectations for events that this observer should see before it can
+// be considered satisfied (see below).
+- (void)expectError;
+- (void)expectSignalingChange:(RTCSignalingState)state;
+- (void)expectAddStream:(NSString *)label;
+- (void)expectRemoveStream:(NSString *)label;
+- (void)expectICECandidates:(int)count;
+- (void)expectICEConnectionChange:(RTCICEConnectionState)state;
+- (void)expectICEGatheringChange:(RTCICEGatheringState)state;
+- (void)expectDataChannel:(NSString*)label;
+- (void)expectStateChange:(RTCDataChannelState)state;
+- (void)expectMessage:(NSData*)message isBinary:(BOOL)isBinary;
+
+// Wait until all registered expectations above have been observed.
+// Return false if expectations aren't met within |timeoutSeconds|.
+- (BOOL)waitForAllExpectationsToBeSatisfiedWithTimeout:(NSTimeInterval)timeout;
+
+@end
diff --git a/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m b/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m
new file mode 100644
index 0000000000..892c461980
--- /dev/null
+++ b/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m
@@ -0,0 +1,263 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCPeerConnectionSyncObserver.h"
+
+#import "RTCMediaStream.h"
+
+@implementation RTCPeerConnectionSyncObserver {
+ int _expectedErrors;
+ NSMutableArray* _expectedSignalingChanges;
+ NSMutableArray* _expectedAddStreamLabels;
+ NSMutableArray* _expectedRemoveStreamLabels;
+ int _expectedICECandidates;
+ NSMutableArray* _receivedICECandidates;
+ NSMutableArray* _expectedICEConnectionChanges;
+ NSMutableArray* _expectedICEGatheringChanges;
+ NSMutableArray* _expectedDataChannels;
+ NSMutableArray* _expectedStateChanges;
+ NSMutableArray* _expectedMessages;
+}
+
+- (id)init {
+ self = [super init];
+ if (self) {
+ _expectedSignalingChanges = [NSMutableArray array];
+ _expectedSignalingChanges = [NSMutableArray array];
+ _expectedAddStreamLabels = [NSMutableArray array];
+ _expectedRemoveStreamLabels = [NSMutableArray array];
+ _receivedICECandidates = [NSMutableArray array];
+ _expectedICEConnectionChanges = [NSMutableArray array];
+ _expectedICEGatheringChanges = [NSMutableArray array];
+ _expectedDataChannels = [NSMutableArray array];
+ _expectedMessages = [NSMutableArray array];
+ _expectedStateChanges = [NSMutableArray array];
+ }
+ return self;
+}
+
+- (int)popFirstElementAsInt:(NSMutableArray*)array {
+ NSAssert([array count] > 0, @"Empty array");
+ NSNumber* boxedState = [array objectAtIndex:0];
+ [array removeObjectAtIndex:0];
+ return [boxedState intValue];
+}
+
+- (NSString*)popFirstElementAsNSString:(NSMutableArray*)array {
+ NSAssert([array count] > 0, @"Empty expectation array");
+ NSString* string = [array objectAtIndex:0];
+ [array removeObjectAtIndex:0];
+ return string;
+}
+
+- (BOOL)areAllExpectationsSatisfied {
+ return _expectedICECandidates <= 0 && // See comment in gotICECandidate.
+ _expectedErrors == 0 && [_expectedSignalingChanges count] == 0 &&
+ [_expectedICEConnectionChanges count] == 0 &&
+ [_expectedICEGatheringChanges count] == 0 &&
+ [_expectedAddStreamLabels count] == 0 &&
+ [_expectedRemoveStreamLabels count] == 0 &&
+ [_expectedDataChannels count] == 0 &&
+ [_expectedStateChanges count] == 0 &&
+ [_expectedMessages count] == 0;
+ // TODO(hughv): Test video state here too.
+}
+
+- (NSArray*)releaseReceivedICECandidates {
+ NSArray* ret = _receivedICECandidates;
+ _receivedICECandidates = [NSMutableArray array];
+ return ret;
+}
+
+- (void)expectError {
+ ++_expectedErrors;
+}
+
+- (void)expectSignalingChange:(RTCSignalingState)state {
+ [_expectedSignalingChanges addObject:@((int)state)];
+}
+
+- (void)expectAddStream:(NSString*)label {
+ [_expectedAddStreamLabels addObject:label];
+}
+
+- (void)expectRemoveStream:(NSString*)label {
+ [_expectedRemoveStreamLabels addObject:label];
+}
+
+- (void)expectICECandidates:(int)count {
+ _expectedICECandidates += count;
+}
+
+- (void)expectICEConnectionChange:(RTCICEConnectionState)state {
+ [_expectedICEConnectionChanges addObject:@((int)state)];
+}
+
+- (void)expectICEGatheringChange:(RTCICEGatheringState)state {
+ [_expectedICEGatheringChanges addObject:@((int)state)];
+}
+
+- (void)expectDataChannel:(NSString*)label {
+ [_expectedDataChannels addObject:label];
+}
+
+- (void)expectStateChange:(RTCDataChannelState)state {
+ [_expectedStateChanges addObject:@(state)];
+}
+
+- (void)expectMessage:(NSData*)message isBinary:(BOOL)isBinary {
+ RTCDataBuffer* buffer = [[RTCDataBuffer alloc] initWithData:message
+ isBinary:isBinary];
+ [_expectedMessages addObject:buffer];
+}
+
+- (BOOL)waitForAllExpectationsToBeSatisfiedWithTimeout:(NSTimeInterval)timeout {
+ NSParameterAssert(timeout >= 0);
+ // TODO (fischman): Revisit. Keeping in sync with the Java version, but
+ // polling is not optimal.
+ // https://code.google.com/p/libjingle/source/browse/trunk/talk/app/webrtc/javatests/src/org/webrtc/PeerConnectionTest.java?line=212#212
+ NSDate *startTime = [NSDate date];
+ while (![self areAllExpectationsSatisfied]) {
+ if (startTime.timeIntervalSinceNow < -timeout) {
+ return NO;
+ }
+ [[NSRunLoop currentRunLoop]
+ runUntilDate:[NSDate dateWithTimeIntervalSinceNow:1]];
+ }
+ return YES;
+}
+
+#pragma mark - RTCPeerConnectionDelegate methods
+
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ signalingStateChanged:(RTCSignalingState)stateChanged {
+ int expectedState = [self popFirstElementAsInt:_expectedSignalingChanges];
+ NSString* message =
+ [NSString stringWithFormat:@"RTCPeerConnectionDelegate::"
+ @"onSignalingStateChange [%d] expected[%d]",
+ stateChanged,
+ expectedState];
+ NSAssert(expectedState == (int)stateChanged, message);
+}
+
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ addedStream:(RTCMediaStream*)stream {
+ NSString* expectedLabel =
+ [self popFirstElementAsNSString:_expectedAddStreamLabels];
+ NSAssert([expectedLabel isEqual:stream.label], @"Stream not expected");
+}
+
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ removedStream:(RTCMediaStream*)stream {
+ NSString* expectedLabel =
+ [self popFirstElementAsNSString:_expectedRemoveStreamLabels];
+ NSAssert([expectedLabel isEqual:stream.label], @"Stream not expected");
+}
+
+- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection {
+}
+
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ gotICECandidate:(RTCICECandidate*)candidate {
+ --_expectedICECandidates;
+ // We don't assert expectedICECandidates >= 0 because it's hard to know
+ // how many to expect, in general. We only use expectICECandidates to
+ // assert a minimal count.
+ [_receivedICECandidates addObject:candidate];
+}
+
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ iceGatheringChanged:(RTCICEGatheringState)newState {
+ // It's fine to get a variable number of GATHERING messages before
+ // COMPLETE fires (depending on how long the test runs) so we don't assert
+ // any particular count.
+ if (newState == RTCICEGatheringGathering) {
+ return;
+ }
+ NSAssert([_expectedICEGatheringChanges count] > 0,
+ @"Unexpected ICE gathering state change");
+ int expectedState = [self popFirstElementAsInt:_expectedICEGatheringChanges];
+ NSAssert(expectedState == (int)newState,
+ @"ICE gathering state should match expectation");
+}
+
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ iceConnectionChanged:(RTCICEConnectionState)newState {
+ // See TODO(fischman) in RTCPeerConnectionTest.mm about Completed.
+ if (newState == RTCICEConnectionCompleted)
+ return;
+ NSAssert([_expectedICEConnectionChanges count] > 0,
+ @"Unexpected ICE connection state change");
+ int expectedState = [self popFirstElementAsInt:_expectedICEConnectionChanges];
+ NSAssert(expectedState == (int)newState,
+ @"ICE connection state should match expectation");
+}
+
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ didOpenDataChannel:(RTCDataChannel*)dataChannel {
+ NSString* expectedLabel =
+ [self popFirstElementAsNSString:_expectedDataChannels];
+ NSAssert([expectedLabel isEqual:dataChannel.label],
+ @"Data channel not expected");
+ self.dataChannel = dataChannel;
+ dataChannel.delegate = self;
+ NSAssert(kRTCDataChannelStateConnecting == dataChannel.state,
+ @"Unexpected state");
+}
+
+#pragma mark - RTCDataChannelDelegate
+
+- (void)channelDidChangeState:(RTCDataChannel*)channel {
+ NSAssert([_expectedStateChanges count] > 0,
+ @"Unexpected state change");
+ int expectedState = [self popFirstElementAsInt:_expectedStateChanges];
+ NSAssert(expectedState == channel.state, @"Channel state should match");
+}
+
+- (void)channel:(RTCDataChannel*)channel
+ didChangeBufferedAmount:(NSUInteger)previousAmount {
+ NSAssert(channel.bufferedAmount != previousAmount,
+ @"Invalid bufferedAmount change");
+}
+
+- (void)channel:(RTCDataChannel*)channel
+ didReceiveMessageWithBuffer:(RTCDataBuffer*)buffer {
+ NSAssert([_expectedMessages count] > 0,
+ @"Unexpected message received");
+ RTCDataBuffer* expectedBuffer = [_expectedMessages objectAtIndex:0];
+ NSAssert(expectedBuffer.isBinary == buffer.isBinary,
+ @"Buffer isBinary should match");
+ NSAssert([expectedBuffer.data isEqual:buffer.data],
+ @"Buffer data should match");
+ [_expectedMessages removeObjectAtIndex:0];
+}
+
+@end
diff --git a/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm b/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm
new file mode 100644
index 0000000000..c43a6fcf24
--- /dev/null
+++ b/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm
@@ -0,0 +1,346 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCICEServer.h"
+#import "RTCMediaConstraints.h"
+#import "RTCMediaStream.h"
+#import "RTCPair.h"
+#import "RTCPeerConnection.h"
+#import "RTCPeerConnectionFactory.h"
+#import "RTCPeerConnectionSyncObserver.h"
+#import "RTCSessionDescription.h"
+#import "RTCSessionDescriptionSyncObserver.h"
+#import "RTCVideoRenderer.h"
+#import "RTCVideoTrack.h"
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/ssladapter.h"
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+const NSTimeInterval kRTCPeerConnectionTestTimeout = 20;
+
+@interface RTCFakeRenderer : NSObject <RTCVideoRenderer>
+@end
+
+@implementation RTCFakeRenderer
+
+- (void)setSize:(CGSize)size {}
+- (void)renderFrame:(RTCI420Frame*)frame {}
+
+@end
+
+@interface RTCPeerConnectionTest : NSObject
+
+// Returns whether the two sessions are of the same type.
++ (BOOL)isSession:(RTCSessionDescription*)session1
+ ofSameTypeAsSession:(RTCSessionDescription*)session2;
+
+// Create and add tracks to pc, with the given source, label, and IDs
+- (RTCMediaStream*)addTracksToPeerConnection:(RTCPeerConnection*)pc
+ withFactory:(RTCPeerConnectionFactory*)factory
+ videoSource:(RTCVideoSource*)videoSource
+ streamLabel:(NSString*)streamLabel
+ videoTrackID:(NSString*)videoTrackID
+ audioTrackID:(NSString*)audioTrackID;
+
+- (void)testCompleteSessionWithFactory:(RTCPeerConnectionFactory*)factory;
+
+@end
+
+@implementation RTCPeerConnectionTest
+
++ (BOOL)isSession:(RTCSessionDescription*)session1
+ ofSameTypeAsSession:(RTCSessionDescription*)session2 {
+ return [session1.type isEqual:session2.type];
+}
+
+- (RTCMediaStream*)addTracksToPeerConnection:(RTCPeerConnection*)pc
+ withFactory:(RTCPeerConnectionFactory*)factory
+ videoSource:(RTCVideoSource*)videoSource
+ streamLabel:(NSString*)streamLabel
+ videoTrackID:(NSString*)videoTrackID
+ audioTrackID:(NSString*)audioTrackID {
+ RTCMediaStream* localMediaStream = [factory mediaStreamWithLabel:streamLabel];
+ RTCVideoTrack* videoTrack =
+ [factory videoTrackWithID:videoTrackID source:videoSource];
+ RTCFakeRenderer* videoRenderer = [[RTCFakeRenderer alloc] init];
+ [videoTrack addRenderer:videoRenderer];
+ [localMediaStream addVideoTrack:videoTrack];
+ // Test that removal/re-add works.
+ [localMediaStream removeVideoTrack:videoTrack];
+ [localMediaStream addVideoTrack:videoTrack];
+ RTCAudioTrack* audioTrack = [factory audioTrackWithID:audioTrackID];
+ [localMediaStream addAudioTrack:audioTrack];
+ [pc addStream:localMediaStream];
+ return localMediaStream;
+}
+
+- (void)testCompleteSessionWithFactory:(RTCPeerConnectionFactory*)factory {
+ NSArray* mandatory = @[
+ [[RTCPair alloc] initWithKey:@"DtlsSrtpKeyAgreement" value:@"true"],
+ [[RTCPair alloc] initWithKey:@"internalSctpDataChannels" value:@"true"],
+ ];
+ RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] init];
+ RTCMediaConstraints* pcConstraints =
+ [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory
+ optionalConstraints:nil];
+
+ RTCPeerConnectionSyncObserver* offeringExpectations =
+ [[RTCPeerConnectionSyncObserver alloc] init];
+ RTCPeerConnection* pcOffer =
+ [factory peerConnectionWithICEServers:nil
+ constraints:pcConstraints
+ delegate:offeringExpectations];
+
+ RTCPeerConnectionSyncObserver* answeringExpectations =
+ [[RTCPeerConnectionSyncObserver alloc] init];
+
+ RTCPeerConnection* pcAnswer =
+ [factory peerConnectionWithICEServers:nil
+ constraints:pcConstraints
+ delegate:answeringExpectations];
+ // TODO(hughv): Create video capturer
+ RTCVideoCapturer* capturer = nil;
+ RTCVideoSource* videoSource =
+ [factory videoSourceWithCapturer:capturer constraints:constraints];
+
+ // Here and below, "oLMS" refers to offerer's local media stream, and "aLMS"
+ // refers to the answerer's local media stream, with suffixes of "a0" and "v0"
+ // for audio and video tracks, resp. These mirror chrome historical naming.
+ RTCMediaStream* oLMSUnused = [self addTracksToPeerConnection:pcOffer
+ withFactory:factory
+ videoSource:videoSource
+ streamLabel:@"oLMS"
+ videoTrackID:@"oLMSv0"
+ audioTrackID:@"oLMSa0"];
+
+ RTCDataChannel* offerDC =
+ [pcOffer createDataChannelWithLabel:@"offerDC"
+ config:[[RTCDataChannelInit alloc] init]];
+ EXPECT_TRUE([offerDC.label isEqual:@"offerDC"]);
+ offerDC.delegate = offeringExpectations;
+ offeringExpectations.dataChannel = offerDC;
+
+ RTCSessionDescriptionSyncObserver* sdpObserver =
+ [[RTCSessionDescriptionSyncObserver alloc] init];
+ [pcOffer createOfferWithDelegate:sdpObserver constraints:constraints];
+ [sdpObserver wait];
+ EXPECT_TRUE(sdpObserver.success);
+ RTCSessionDescription* offerSDP = sdpObserver.sessionDescription;
+ EXPECT_EQ([@"offer" compare:offerSDP.type options:NSCaseInsensitiveSearch],
+ NSOrderedSame);
+ EXPECT_GT([offerSDP.description length], 0);
+
+ sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init];
+ [answeringExpectations expectSignalingChange:RTCSignalingHaveRemoteOffer];
+ [answeringExpectations expectAddStream:@"oLMS"];
+ [pcAnswer setRemoteDescriptionWithDelegate:sdpObserver
+ sessionDescription:offerSDP];
+ [sdpObserver wait];
+
+ RTCMediaStream* aLMSUnused = [self addTracksToPeerConnection:pcAnswer
+ withFactory:factory
+ videoSource:videoSource
+ streamLabel:@"aLMS"
+ videoTrackID:@"aLMSv0"
+ audioTrackID:@"aLMSa0"];
+
+ sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init];
+ [pcAnswer createAnswerWithDelegate:sdpObserver constraints:constraints];
+ [sdpObserver wait];
+ EXPECT_TRUE(sdpObserver.success);
+ RTCSessionDescription* answerSDP = sdpObserver.sessionDescription;
+ EXPECT_EQ([@"answer" compare:answerSDP.type options:NSCaseInsensitiveSearch],
+ NSOrderedSame);
+ EXPECT_GT([answerSDP.description length], 0);
+
+ [offeringExpectations expectICECandidates:2];
+ // It's possible to only have 1 ICE candidate for the answerer, since we use
+ // BUNDLE and rtcp-mux by default, and don't provide any ICE servers in this
+ // test.
+ [answeringExpectations expectICECandidates:1];
+
+ sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init];
+ [answeringExpectations expectSignalingChange:RTCSignalingStable];
+ [pcAnswer setLocalDescriptionWithDelegate:sdpObserver
+ sessionDescription:answerSDP];
+ [sdpObserver wait];
+ EXPECT_TRUE(sdpObserver.sessionDescription == NULL);
+
+ sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init];
+ [offeringExpectations expectSignalingChange:RTCSignalingHaveLocalOffer];
+ [pcOffer setLocalDescriptionWithDelegate:sdpObserver
+ sessionDescription:offerSDP];
+ [sdpObserver wait];
+ EXPECT_TRUE(sdpObserver.sessionDescription == NULL);
+
+ [offeringExpectations expectICEConnectionChange:RTCICEConnectionChecking];
+ [offeringExpectations expectICEConnectionChange:RTCICEConnectionConnected];
+ // TODO(fischman): figure out why this is flaky and re-introduce (and remove
+ // special-casing from the observer!).
+ // [offeringExpectations expectICEConnectionChange:RTCICEConnectionCompleted];
+ [answeringExpectations expectICEConnectionChange:RTCICEConnectionChecking];
+ [answeringExpectations expectICEConnectionChange:RTCICEConnectionConnected];
+
+ [offeringExpectations expectStateChange:kRTCDataChannelStateOpen];
+ [answeringExpectations expectDataChannel:@"offerDC"];
+ [answeringExpectations expectStateChange:kRTCDataChannelStateOpen];
+
+ [offeringExpectations expectICEGatheringChange:RTCICEGatheringComplete];
+ [answeringExpectations expectICEGatheringChange:RTCICEGatheringComplete];
+
+ sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init];
+ [offeringExpectations expectSignalingChange:RTCSignalingStable];
+ [offeringExpectations expectAddStream:@"aLMS"];
+ [pcOffer setRemoteDescriptionWithDelegate:sdpObserver
+ sessionDescription:answerSDP];
+ [sdpObserver wait];
+ EXPECT_TRUE(sdpObserver.sessionDescription == NULL);
+
+ EXPECT_TRUE([offerSDP.type isEqual:pcOffer.localDescription.type]);
+ EXPECT_TRUE([answerSDP.type isEqual:pcOffer.remoteDescription.type]);
+ EXPECT_TRUE([offerSDP.type isEqual:pcAnswer.remoteDescription.type]);
+ EXPECT_TRUE([answerSDP.type isEqual:pcAnswer.localDescription.type]);
+
+ for (RTCICECandidate* candidate in offeringExpectations
+ .releaseReceivedICECandidates) {
+ [pcAnswer addICECandidate:candidate];
+ }
+ for (RTCICECandidate* candidate in answeringExpectations
+ .releaseReceivedICECandidates) {
+ [pcOffer addICECandidate:candidate];
+ }
+
+ EXPECT_TRUE(
+ [offeringExpectations waitForAllExpectationsToBeSatisfiedWithTimeout:
+ kRTCPeerConnectionTestTimeout]);
+ EXPECT_TRUE(
+ [answeringExpectations waitForAllExpectationsToBeSatisfiedWithTimeout:
+ kRTCPeerConnectionTestTimeout]);
+
+ EXPECT_EQ(pcOffer.signalingState, RTCSignalingStable);
+ EXPECT_EQ(pcAnswer.signalingState, RTCSignalingStable);
+
+ // Test send and receive UTF-8 text
+ NSString* text = @"你好";
+ NSData* textData = [text dataUsingEncoding:NSUTF8StringEncoding];
+ RTCDataBuffer* buffer =
+ [[RTCDataBuffer alloc] initWithData:textData isBinary:NO];
+ [answeringExpectations expectMessage:[textData copy] isBinary:NO];
+ EXPECT_TRUE([offeringExpectations.dataChannel sendData:buffer]);
+ EXPECT_TRUE(
+ [answeringExpectations waitForAllExpectationsToBeSatisfiedWithTimeout:
+ kRTCPeerConnectionTestTimeout]);
+
+ // Test send and receive binary data
+ const size_t byteLength = 5;
+ char bytes[byteLength] = {1, 2, 3, 4, 5};
+ NSData* byteData = [NSData dataWithBytes:bytes length:byteLength];
+ buffer = [[RTCDataBuffer alloc] initWithData:byteData isBinary:YES];
+ [answeringExpectations expectMessage:[byteData copy] isBinary:YES];
+ EXPECT_TRUE([offeringExpectations.dataChannel sendData:buffer]);
+ EXPECT_TRUE(
+ [answeringExpectations waitForAllExpectationsToBeSatisfiedWithTimeout:
+ kRTCPeerConnectionTestTimeout]);
+
+ [offeringExpectations expectStateChange:kRTCDataChannelStateClosing];
+ [answeringExpectations expectStateChange:kRTCDataChannelStateClosing];
+ [offeringExpectations expectStateChange:kRTCDataChannelStateClosed];
+ [answeringExpectations expectStateChange:kRTCDataChannelStateClosed];
+
+ [answeringExpectations.dataChannel close];
+ [offeringExpectations.dataChannel close];
+
+ EXPECT_TRUE(
+ [offeringExpectations waitForAllExpectationsToBeSatisfiedWithTimeout:
+ kRTCPeerConnectionTestTimeout]);
+ EXPECT_TRUE(
+ [answeringExpectations waitForAllExpectationsToBeSatisfiedWithTimeout:
+ kRTCPeerConnectionTestTimeout]);
+ // Don't need to listen to further state changes.
+ // TODO(tkchin): figure out why Closed->Closing without this.
+ offeringExpectations.dataChannel.delegate = nil;
+ answeringExpectations.dataChannel.delegate = nil;
+
+ // Let the audio feedback run for 2s to allow human testing and to ensure
+ // things stabilize. TODO(fischman): replace seconds with # of video frames,
+ // when we have video flowing.
+ [[NSRunLoop currentRunLoop]
+ runUntilDate:[NSDate dateWithTimeIntervalSinceNow:2]];
+
+ [offeringExpectations expectICEConnectionChange:RTCICEConnectionClosed];
+ [answeringExpectations expectICEConnectionChange:RTCICEConnectionClosed];
+ [offeringExpectations expectSignalingChange:RTCSignalingClosed];
+ [answeringExpectations expectSignalingChange:RTCSignalingClosed];
+
+ [pcOffer close];
+ [pcAnswer close];
+
+ EXPECT_TRUE(
+ [offeringExpectations waitForAllExpectationsToBeSatisfiedWithTimeout:
+ kRTCPeerConnectionTestTimeout]);
+ EXPECT_TRUE(
+ [answeringExpectations waitForAllExpectationsToBeSatisfiedWithTimeout:
+ kRTCPeerConnectionTestTimeout]);
+
+ capturer = nil;
+ videoSource = nil;
+ pcOffer = nil;
+ pcAnswer = nil;
+ // TODO(fischman): be stricter about shutdown checks; ensure thread
+ // counts return to where they were before the test kicked off, and
+ // that all objects have in fact shut down.
+}
+
+@end
+
+// TODO(fischman): move {Initialize,Cleanup}SSL into alloc/dealloc of
+// RTCPeerConnectionTest and avoid the appearance of RTCPeerConnectionTest being
+// a TestBase since it's not.
+TEST(RTCPeerConnectionTest, SessionTest) {
+ @autoreleasepool {
+ rtc::InitializeSSL();
+ // Since |factory| will own the signaling & worker threads, it's important
+ // that it outlive the created PeerConnections since they self-delete on the
+ // signaling thread, and if |factory| is freed first then a last refcount on
+ // the factory will expire during this teardown, causing the signaling
+ // thread to try to Join() with itself. This is a hack to ensure that the
+ // factory outlives RTCPeerConnection:dealloc.
+ // See https://code.google.com/p/webrtc/issues/detail?id=3100.
+ RTCPeerConnectionFactory* factory = [[RTCPeerConnectionFactory alloc] init];
+ @autoreleasepool {
+ RTCPeerConnectionTest* pcTest = [[RTCPeerConnectionTest alloc] init];
+ [pcTest testCompleteSessionWithFactory:factory];
+ }
+ rtc::CleanupSSL();
+ }
+}
diff --git a/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.h b/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.h
new file mode 100644
index 0000000000..bc83eaf950
--- /dev/null
+++ b/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.h
@@ -0,0 +1,49 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCSessionDescriptionDelegate.h"
+
+@class RTCSessionDescription;
+
+// Observer of SDP-related events, used by RTCPeerConnectionTest to check
+// expectations.
+@interface RTCSessionDescriptionSyncObserver : NSObject<
+ RTCSessionDescriptionDelegate>
+
+// Error string. May be nil.
+@property(atomic, copy) NSString *error;
+// Created session description. May be nil.
+@property(atomic, strong) RTCSessionDescription *sessionDescription;
+// Whether an SDP-related callback reported success.
+@property(atomic, assign) BOOL success;
+
+// Wait for an SDP-related callback to fire.
+- (void)wait;
+
+@end
diff --git a/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.m b/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.m
new file mode 100644
index 0000000000..31b0efe0d5
--- /dev/null
+++ b/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.m
@@ -0,0 +1,103 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "RTCSessionDescriptionSyncObserver.h"
+
+#import "RTCSessionDescription.h"
+
+@interface RTCSessionDescriptionSyncObserver ()
+
+// CondVar used to wait for, and signal arrival of, an SDP-related callback.
+@property(nonatomic, strong) NSCondition* condition;
+// Whether an SDP-related callback has fired; cleared before wait returns.
+@property(atomic, assign) BOOL signaled;
+
+@end
+
+@implementation RTCSessionDescriptionSyncObserver
+
+@synthesize error = _error;
+@synthesize sessionDescription = _sessionDescription;
+@synthesize success = _success;
+@synthesize condition = _condition;
+@synthesize signaled = _signaled;
+
+- (id)init {
+ if ((self = [super init])) {
+ if (!(_condition = [[NSCondition alloc] init]))
+ self = nil;
+ }
+ return self;
+}
+
+- (void)signal {
+ self.signaled = YES;
+ [self.condition signal];
+}
+
+- (void)wait {
+ [self.condition lock];
+ if (!self.signaled)
+ [self.condition wait];
+ self.signaled = NO;
+ [self.condition unlock];
+}
+
+#pragma mark - RTCSessionDescriptionDelegate methods
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ didCreateSessionDescription:(RTCSessionDescription*)sdp
+ error:(NSError*)error {
+ [self.condition lock];
+ if (error) {
+ self.success = NO;
+ self.error = error.description;
+ } else {
+ self.success = YES;
+ self.sessionDescription = sdp;
+ }
+ [self signal];
+ [self.condition unlock];
+}
+
+- (void)peerConnection:(RTCPeerConnection*)peerConnection
+ didSetSessionDescriptionWithError:(NSError*)error {
+ [self.condition lock];
+ if (error) {
+ self.success = NO;
+ self.error = error.description;
+ } else {
+ self.success = YES;
+ }
+ [self signal];
+ [self.condition unlock];
+}
+
+@end
diff --git a/talk/app/webrtc/objctests/mac/main.mm b/talk/app/webrtc/objctests/mac/main.mm
new file mode 100644
index 0000000000..78ccf32afc
--- /dev/null
+++ b/talk/app/webrtc/objctests/mac/main.mm
@@ -0,0 +1,37 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/base/gunit.h"
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+int main(int argc, char* argv[]) {
+ testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/talk/app/webrtc/peerconnection.cc b/talk/app/webrtc/peerconnection.cc
new file mode 100644
index 0000000000..0d519b280b
--- /dev/null
+++ b/talk/app/webrtc/peerconnection.cc
@@ -0,0 +1,1975 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/peerconnection.h"
+
+#include <vector>
+#include <cctype> // for isdigit
+
+#include "talk/app/webrtc/audiotrack.h"
+#include "talk/app/webrtc/dtmfsender.h"
+#include "talk/app/webrtc/jsepicecandidate.h"
+#include "talk/app/webrtc/jsepsessiondescription.h"
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/mediastreamproxy.h"
+#include "talk/app/webrtc/mediastreamtrackproxy.h"
+#include "talk/app/webrtc/remoteaudiosource.h"
+#include "talk/app/webrtc/remotevideocapturer.h"
+#include "talk/app/webrtc/rtpreceiver.h"
+#include "talk/app/webrtc/rtpsender.h"
+#include "talk/app/webrtc/streamcollection.h"
+#include "talk/app/webrtc/videosource.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "talk/media/sctp/sctpdataengine.h"
+#include "webrtc/p2p/client/basicportallocator.h"
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/system_wrappers/include/field_trial.h"
+
+namespace {
+
+using webrtc::DataChannel;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::StreamCollection;
+using webrtc::StunConfigurations;
+using webrtc::TurnConfigurations;
+typedef webrtc::PortAllocatorFactoryInterface::StunConfiguration
+ StunConfiguration;
+typedef webrtc::PortAllocatorFactoryInterface::TurnConfiguration
+ TurnConfiguration;
+
+static const char kDefaultStreamLabel[] = "default";
+static const char kDefaultAudioTrackLabel[] = "defaulta0";
+static const char kDefaultVideoTrackLabel[] = "defaultv0";
+
+// The min number of tokens must present in Turn host uri.
+// e.g. user@turn.example.org
+static const size_t kTurnHostTokensNum = 2;
+// Number of tokens must be preset when TURN uri has transport param.
+static const size_t kTurnTransportTokensNum = 2;
+// The default stun port.
+static const int kDefaultStunPort = 3478;
+static const int kDefaultStunTlsPort = 5349;
+static const char kTransport[] = "transport";
+static const char kUdpTransportType[] = "udp";
+static const char kTcpTransportType[] = "tcp";
+
+// NOTE: Must be in the same order as the ServiceType enum.
+static const char* kValidIceServiceTypes[] = {"stun", "stuns", "turn", "turns"};
+
+// NOTE: A loop below assumes that the first value of this enum is 0 and all
+// other values are incremental.
+enum ServiceType {
+ STUN = 0, // Indicates a STUN server.
+ STUNS, // Indicates a STUN server used with a TLS session.
+ TURN, // Indicates a TURN server
+ TURNS, // Indicates a TURN server used with a TLS session.
+ INVALID, // Unknown.
+};
+static_assert(INVALID == ARRAY_SIZE(kValidIceServiceTypes),
+ "kValidIceServiceTypes must have as many strings as ServiceType "
+ "has values.");
+
+enum {
+ MSG_SET_SESSIONDESCRIPTION_SUCCESS = 0,
+ MSG_SET_SESSIONDESCRIPTION_FAILED,
+ MSG_CREATE_SESSIONDESCRIPTION_FAILED,
+ MSG_GETSTATS,
+};
+
+struct SetSessionDescriptionMsg : public rtc::MessageData {
+ explicit SetSessionDescriptionMsg(
+ webrtc::SetSessionDescriptionObserver* observer)
+ : observer(observer) {
+ }
+
+ rtc::scoped_refptr<webrtc::SetSessionDescriptionObserver> observer;
+ std::string error;
+};
+
+struct CreateSessionDescriptionMsg : public rtc::MessageData {
+ explicit CreateSessionDescriptionMsg(
+ webrtc::CreateSessionDescriptionObserver* observer)
+ : observer(observer) {}
+
+ rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserver> observer;
+ std::string error;
+};
+
+struct GetStatsMsg : public rtc::MessageData {
+ GetStatsMsg(webrtc::StatsObserver* observer,
+ webrtc::MediaStreamTrackInterface* track)
+ : observer(observer), track(track) {
+ }
+ rtc::scoped_refptr<webrtc::StatsObserver> observer;
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track;
+};
+
+// |in_str| should be of format
+// stunURI = scheme ":" stun-host [ ":" stun-port ]
+// scheme = "stun" / "stuns"
+// stun-host = IP-literal / IPv4address / reg-name
+// stun-port = *DIGIT
+//
+// draft-petithuguenin-behave-turn-uris-01
+// turnURI = scheme ":" turn-host [ ":" turn-port ]
+// turn-host = username@IP-literal / IPv4address / reg-name
+bool GetServiceTypeAndHostnameFromUri(const std::string& in_str,
+ ServiceType* service_type,
+ std::string* hostname) {
+ const std::string::size_type colonpos = in_str.find(':');
+ if (colonpos == std::string::npos) {
+ LOG(LS_WARNING) << "Missing ':' in ICE URI: " << in_str;
+ return false;
+ }
+ if ((colonpos + 1) == in_str.length()) {
+ LOG(LS_WARNING) << "Empty hostname in ICE URI: " << in_str;
+ return false;
+ }
+ *service_type = INVALID;
+ for (size_t i = 0; i < ARRAY_SIZE(kValidIceServiceTypes); ++i) {
+ if (in_str.compare(0, colonpos, kValidIceServiceTypes[i]) == 0) {
+ *service_type = static_cast<ServiceType>(i);
+ break;
+ }
+ }
+ if (*service_type == INVALID) {
+ return false;
+ }
+ *hostname = in_str.substr(colonpos + 1, std::string::npos);
+ return true;
+}
+
+bool ParsePort(const std::string& in_str, int* port) {
+ // Make sure port only contains digits. FromString doesn't check this.
+ for (const char& c : in_str) {
+ if (!std::isdigit(c)) {
+ return false;
+ }
+ }
+ return rtc::FromString(in_str, port);
+}
+
+// This method parses IPv6 and IPv4 literal strings, along with hostnames in
+// standard hostname:port format.
+// Consider following formats as correct.
+// |hostname:port|, |[IPV6 address]:port|, |IPv4 address|:port,
+// |hostname|, |[IPv6 address]|, |IPv4 address|.
+bool ParseHostnameAndPortFromString(const std::string& in_str,
+ std::string* host,
+ int* port) {
+ RTC_DCHECK(host->empty());
+ if (in_str.at(0) == '[') {
+ std::string::size_type closebracket = in_str.rfind(']');
+ if (closebracket != std::string::npos) {
+ std::string::size_type colonpos = in_str.find(':', closebracket);
+ if (std::string::npos != colonpos) {
+ if (!ParsePort(in_str.substr(closebracket + 2, std::string::npos),
+ port)) {
+ return false;
+ }
+ }
+ *host = in_str.substr(1, closebracket - 1);
+ } else {
+ return false;
+ }
+ } else {
+ std::string::size_type colonpos = in_str.find(':');
+ if (std::string::npos != colonpos) {
+ if (!ParsePort(in_str.substr(colonpos + 1, std::string::npos), port)) {
+ return false;
+ }
+ *host = in_str.substr(0, colonpos);
+ } else {
+ *host = in_str;
+ }
+ }
+ return !host->empty();
+}
+
+// Adds a StunConfiguration or TurnConfiguration to the appropriate list,
+// by parsing |url| and using the username/password in |server|.
+bool ParseIceServerUrl(const PeerConnectionInterface::IceServer& server,
+ const std::string& url,
+ StunConfigurations* stun_config,
+ TurnConfigurations* turn_config) {
+ // draft-nandakumar-rtcweb-stun-uri-01
+ // stunURI = scheme ":" stun-host [ ":" stun-port ]
+ // scheme = "stun" / "stuns"
+ // stun-host = IP-literal / IPv4address / reg-name
+ // stun-port = *DIGIT
+
+ // draft-petithuguenin-behave-turn-uris-01
+ // turnURI = scheme ":" turn-host [ ":" turn-port ]
+ // [ "?transport=" transport ]
+ // scheme = "turn" / "turns"
+ // transport = "udp" / "tcp" / transport-ext
+ // transport-ext = 1*unreserved
+ // turn-host = IP-literal / IPv4address / reg-name
+ // turn-port = *DIGIT
+ RTC_DCHECK(stun_config != nullptr);
+ RTC_DCHECK(turn_config != nullptr);
+ std::vector<std::string> tokens;
+ std::string turn_transport_type = kUdpTransportType;
+ RTC_DCHECK(!url.empty());
+ rtc::tokenize(url, '?', &tokens);
+ std::string uri_without_transport = tokens[0];
+ // Let's look into transport= param, if it exists.
+ if (tokens.size() == kTurnTransportTokensNum) { // ?transport= is present.
+ std::string uri_transport_param = tokens[1];
+ rtc::tokenize(uri_transport_param, '=', &tokens);
+ if (tokens[0] == kTransport) {
+ // As per above grammar transport param will be consist of lower case
+ // letters.
+ if (tokens[1] != kUdpTransportType && tokens[1] != kTcpTransportType) {
+ LOG(LS_WARNING) << "Transport param should always be udp or tcp.";
+ return false;
+ }
+ turn_transport_type = tokens[1];
+ }
+ }
+
+ std::string hoststring;
+ ServiceType service_type;
+ if (!GetServiceTypeAndHostnameFromUri(uri_without_transport,
+ &service_type,
+ &hoststring)) {
+ LOG(LS_WARNING) << "Invalid transport parameter in ICE URI: " << url;
+ return false;
+ }
+
+ // GetServiceTypeAndHostnameFromUri should never give an empty hoststring
+ RTC_DCHECK(!hoststring.empty());
+
+ // Let's break hostname.
+ tokens.clear();
+ rtc::tokenize_with_empty_tokens(hoststring, '@', &tokens);
+
+ std::string username(server.username);
+ if (tokens.size() > kTurnHostTokensNum) {
+ LOG(LS_WARNING) << "Invalid user@hostname format: " << hoststring;
+ return false;
+ }
+ if (tokens.size() == kTurnHostTokensNum) {
+ if (tokens[0].empty() || tokens[1].empty()) {
+ LOG(LS_WARNING) << "Invalid user@hostname format: " << hoststring;
+ return false;
+ }
+ username.assign(rtc::s_url_decode(tokens[0]));
+ hoststring = tokens[1];
+ } else {
+ hoststring = tokens[0];
+ }
+
+ int port = kDefaultStunPort;
+ if (service_type == TURNS) {
+ port = kDefaultStunTlsPort;
+ turn_transport_type = kTcpTransportType;
+ }
+
+ std::string address;
+ if (!ParseHostnameAndPortFromString(hoststring, &address, &port)) {
+ LOG(WARNING) << "Invalid hostname format: " << uri_without_transport;
+ return false;
+ }
+
+ if (port <= 0 || port > 0xffff) {
+ LOG(WARNING) << "Invalid port: " << port;
+ return false;
+ }
+
+ switch (service_type) {
+ case STUN:
+ case STUNS:
+ stun_config->push_back(StunConfiguration(address, port));
+ break;
+ case TURN:
+ case TURNS: {
+ bool secure = (service_type == TURNS);
+ turn_config->push_back(TurnConfiguration(address, port,
+ username,
+ server.password,
+ turn_transport_type,
+ secure));
+ break;
+ }
+ case INVALID:
+ default:
+ LOG(WARNING) << "Configuration not supported: " << url;
+ return false;
+ }
+ return true;
+}
+
+// Check if we can send |new_stream| on a PeerConnection.
+bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams,
+ webrtc::MediaStreamInterface* new_stream) {
+ if (!new_stream || !current_streams) {
+ return false;
+ }
+ if (current_streams->find(new_stream->label()) != nullptr) {
+ LOG(LS_ERROR) << "MediaStream with label " << new_stream->label()
+ << " is already added.";
+ return false;
+ }
+ return true;
+}
+
+bool MediaContentDirectionHasSend(cricket::MediaContentDirection dir) {
+ return dir == cricket::MD_SENDONLY || dir == cricket::MD_SENDRECV;
+}
+
+// If the direction is "recvonly" or "inactive", treat the description
+// as containing no streams.
+// See: https://code.google.com/p/webrtc/issues/detail?id=5054
+std::vector<cricket::StreamParams> GetActiveStreams(
+ const cricket::MediaContentDescription* desc) {
+ return MediaContentDirectionHasSend(desc->direction())
+ ? desc->streams()
+ : std::vector<cricket::StreamParams>();
+}
+
+bool IsValidOfferToReceiveMedia(int value) {
+ typedef PeerConnectionInterface::RTCOfferAnswerOptions Options;
+ return (value >= Options::kUndefined) &&
+ (value <= Options::kMaxOfferToReceiveMedia);
+}
+
+// Add the stream and RTP data channel info to |session_options|.
+void SetStreams(cricket::MediaSessionOptions* session_options,
+ rtc::scoped_refptr<StreamCollection> streams,
+ const std::map<std::string, rtc::scoped_refptr<DataChannel>>&
+ rtp_data_channels) {
+ session_options->streams.clear();
+ if (streams != nullptr) {
+ for (size_t i = 0; i < streams->count(); ++i) {
+ MediaStreamInterface* stream = streams->at(i);
+ // For each audio track in the stream, add it to the MediaSessionOptions.
+ for (const auto& track : stream->GetAudioTracks()) {
+ session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, track->id(),
+ stream->label());
+ }
+ // For each video track in the stream, add it to the MediaSessionOptions.
+ for (const auto& track : stream->GetVideoTracks()) {
+ session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, track->id(),
+ stream->label());
+ }
+ }
+ }
+
+ // Check for data channels.
+ for (const auto& kv : rtp_data_channels) {
+ const DataChannel* channel = kv.second;
+ if (channel->state() == DataChannel::kConnecting ||
+ channel->state() == DataChannel::kOpen) {
+ // |streamid| and |sync_label| are both set to the DataChannel label
+ // here so they can be signaled the same way as MediaStreams and Tracks.
+ // For MediaStreams, the sync_label is the MediaStream label and the
+ // track label is the same as |streamid|.
+ const std::string& streamid = channel->label();
+ const std::string& sync_label = channel->label();
+ session_options->AddSendStream(cricket::MEDIA_TYPE_DATA, streamid,
+ sync_label);
+ }
+ }
+}
+
+} // namespace
+
+namespace webrtc {
+
+// Factory class for creating remote MediaStreams and MediaStreamTracks.
+class RemoteMediaStreamFactory {
+ public:
+ explicit RemoteMediaStreamFactory(rtc::Thread* signaling_thread,
+ cricket::ChannelManager* channel_manager)
+ : signaling_thread_(signaling_thread),
+ channel_manager_(channel_manager) {}
+
+ rtc::scoped_refptr<MediaStreamInterface> CreateMediaStream(
+ const std::string& stream_label) {
+ return MediaStreamProxy::Create(signaling_thread_,
+ MediaStream::Create(stream_label));
+ }
+
+ AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream,
+ const std::string& track_id) {
+ return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(
+ stream, track_id, RemoteAudioSource::Create().get());
+ }
+
+ VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream,
+ const std::string& track_id) {
+ return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(
+ stream, track_id,
+ VideoSource::Create(channel_manager_, new RemoteVideoCapturer(),
+ nullptr)
+ .get());
+ }
+
+ private:
+ template <typename TI, typename T, typename TP, typename S>
+ TI* AddTrack(MediaStreamInterface* stream,
+ const std::string& track_id,
+ S* source) {
+ rtc::scoped_refptr<TI> track(
+ TP::Create(signaling_thread_, T::Create(track_id, source)));
+ track->set_state(webrtc::MediaStreamTrackInterface::kLive);
+ if (stream->AddTrack(track)) {
+ return track;
+ }
+ return nullptr;
+ }
+
+ rtc::Thread* signaling_thread_;
+ cricket::ChannelManager* channel_manager_;
+};
+
+bool ConvertRtcOptionsForOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+ cricket::MediaSessionOptions* session_options) {
+ typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
+ if (!IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) ||
+ !IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video)) {
+ return false;
+ }
+
+ if (rtc_options.offer_to_receive_audio != RTCOfferAnswerOptions::kUndefined) {
+ session_options->recv_audio = (rtc_options.offer_to_receive_audio > 0);
+ }
+ if (rtc_options.offer_to_receive_video != RTCOfferAnswerOptions::kUndefined) {
+ session_options->recv_video = (rtc_options.offer_to_receive_video > 0);
+ }
+
+ session_options->vad_enabled = rtc_options.voice_activity_detection;
+ session_options->transport_options.ice_restart = rtc_options.ice_restart;
+ session_options->bundle_enabled = rtc_options.use_rtp_mux;
+
+ return true;
+}
+
+bool ParseConstraintsForAnswer(const MediaConstraintsInterface* constraints,
+ cricket::MediaSessionOptions* session_options) {
+ bool value = false;
+ size_t mandatory_constraints_satisfied = 0;
+
+ // kOfferToReceiveAudio defaults to true according to spec.
+ if (!FindConstraint(constraints,
+ MediaConstraintsInterface::kOfferToReceiveAudio, &value,
+ &mandatory_constraints_satisfied) ||
+ value) {
+ session_options->recv_audio = true;
+ }
+
+ // kOfferToReceiveVideo defaults to false according to spec. But
+ // if it is an answer and video is offered, we should still accept video
+ // per default.
+ value = false;
+ if (!FindConstraint(constraints,
+ MediaConstraintsInterface::kOfferToReceiveVideo, &value,
+ &mandatory_constraints_satisfied) ||
+ value) {
+ session_options->recv_video = true;
+ }
+
+ if (FindConstraint(constraints,
+ MediaConstraintsInterface::kVoiceActivityDetection, &value,
+ &mandatory_constraints_satisfied)) {
+ session_options->vad_enabled = value;
+ }
+
+ if (FindConstraint(constraints, MediaConstraintsInterface::kUseRtpMux, &value,
+ &mandatory_constraints_satisfied)) {
+ session_options->bundle_enabled = value;
+ } else {
+ // kUseRtpMux defaults to true according to spec.
+ session_options->bundle_enabled = true;
+ }
+
+ if (FindConstraint(constraints, MediaConstraintsInterface::kIceRestart,
+ &value, &mandatory_constraints_satisfied)) {
+ session_options->transport_options.ice_restart = value;
+ } else {
+ // kIceRestart defaults to false according to spec.
+ session_options->transport_options.ice_restart = false;
+ }
+
+ if (!constraints) {
+ return true;
+ }
+ return mandatory_constraints_satisfied == constraints->GetMandatory().size();
+}
+
+bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
+ StunConfigurations* stun_config,
+ TurnConfigurations* turn_config) {
+ for (const webrtc::PeerConnectionInterface::IceServer& server : servers) {
+ if (!server.urls.empty()) {
+ for (const std::string& url : server.urls) {
+ if (url.empty()) {
+ LOG(LS_ERROR) << "Empty uri.";
+ return false;
+ }
+ if (!ParseIceServerUrl(server, url, stun_config, turn_config)) {
+ return false;
+ }
+ }
+ } else if (!server.uri.empty()) {
+ // Fallback to old .uri if new .urls isn't present.
+ if (!ParseIceServerUrl(server, server.uri, stun_config, turn_config)) {
+ return false;
+ }
+ } else {
+ LOG(LS_ERROR) << "Empty uri.";
+ return false;
+ }
+ }
+ return true;
+}
+
+PeerConnection::PeerConnection(PeerConnectionFactory* factory)
+ : factory_(factory),
+ observer_(NULL),
+ uma_observer_(NULL),
+ signaling_state_(kStable),
+ ice_state_(kIceNew),
+ ice_connection_state_(kIceConnectionNew),
+ ice_gathering_state_(kIceGatheringNew),
+ local_streams_(StreamCollection::Create()),
+ remote_streams_(StreamCollection::Create()) {}
+
+PeerConnection::~PeerConnection() {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ // Need to detach RTP senders/receivers from WebRtcSession,
+ // since it's about to be destroyed.
+ for (const auto& sender : senders_) {
+ sender->Stop();
+ }
+ for (const auto& receiver : receivers_) {
+ receiver->Stop();
+ }
+}
+
+bool PeerConnection::Initialize(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ const MediaConstraintsInterface* constraints,
+ PortAllocatorFactoryInterface* allocator_factory,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ PeerConnectionObserver* observer) {
+ RTC_DCHECK(observer != nullptr);
+ if (!observer) {
+ return false;
+ }
+ observer_ = observer;
+
+ std::vector<PortAllocatorFactoryInterface::StunConfiguration> stun_config;
+ std::vector<PortAllocatorFactoryInterface::TurnConfiguration> turn_config;
+ if (!ParseIceServers(configuration.servers, &stun_config, &turn_config)) {
+ return false;
+ }
+ port_allocator_.reset(
+ allocator_factory->CreatePortAllocator(stun_config, turn_config));
+
+ // To handle both internal and externally created port allocator, we will
+ // enable BUNDLE here.
+ int portallocator_flags = port_allocator_->flags();
+ portallocator_flags |= cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET |
+ cricket::PORTALLOCATOR_ENABLE_IPV6;
+ bool value;
+ // If IPv6 flag was specified, we'll not override it by experiment.
+ if (FindConstraint(constraints, MediaConstraintsInterface::kEnableIPv6,
+ &value, nullptr)) {
+ if (!value) {
+ portallocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
+ }
+ } else if (webrtc::field_trial::FindFullName("WebRTC-IPv6Default") ==
+ "Disabled") {
+ portallocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
+ }
+
+ if (configuration.tcp_candidate_policy == kTcpCandidatePolicyDisabled) {
+ portallocator_flags |= cricket::PORTALLOCATOR_DISABLE_TCP;
+ LOG(LS_INFO) << "TCP candidates are disabled.";
+ }
+
+ port_allocator_->set_flags(portallocator_flags);
+ // No step delay is used while allocating ports.
+ port_allocator_->set_step_delay(cricket::kMinimumStepDelay);
+
+ media_controller_.reset(factory_->CreateMediaController());
+
+ remote_stream_factory_.reset(new RemoteMediaStreamFactory(
+ factory_->signaling_thread(), media_controller_->channel_manager()));
+
+ session_.reset(
+ new WebRtcSession(media_controller_.get(), factory_->signaling_thread(),
+ factory_->worker_thread(), port_allocator_.get()));
+ stats_.reset(new StatsCollector(this));
+
+ // Initialize the WebRtcSession. It creates transport channels etc.
+ if (!session_->Initialize(factory_->options(), constraints,
+ dtls_identity_store.Pass(), configuration)) {
+ return false;
+ }
+
+ // Register PeerConnection as receiver of local ice candidates.
+ // All the callbacks will be posted to the application from PeerConnection.
+ session_->RegisterIceObserver(this);
+ session_->SignalState.connect(this, &PeerConnection::OnSessionStateChange);
+ session_->SignalVoiceChannelDestroyed.connect(
+ this, &PeerConnection::OnVoiceChannelDestroyed);
+ session_->SignalVideoChannelDestroyed.connect(
+ this, &PeerConnection::OnVideoChannelDestroyed);
+ session_->SignalDataChannelCreated.connect(
+ this, &PeerConnection::OnDataChannelCreated);
+ session_->SignalDataChannelDestroyed.connect(
+ this, &PeerConnection::OnDataChannelDestroyed);
+ session_->SignalDataChannelOpenMessage.connect(
+ this, &PeerConnection::OnDataChannelOpenMessage);
+ return true;
+}
+
+rtc::scoped_refptr<StreamCollectionInterface>
+PeerConnection::local_streams() {
+ return local_streams_;
+}
+
+rtc::scoped_refptr<StreamCollectionInterface>
+PeerConnection::remote_streams() {
+ return remote_streams_;
+}
+
+// TODO(deadbeef): Create RtpSenders immediately here, even if local
+// description hasn't yet been set.
+bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
+ if (IsClosed()) {
+ return false;
+ }
+ if (!CanAddLocalMediaStream(local_streams_, local_stream)) {
+ return false;
+ }
+
+ local_streams_->AddStream(local_stream);
+
+ // Find tracks that have already been configured in SDP. This can occur if a
+ // local session description that contains the MSID of these tracks is set
+ // before AddLocalStream is called. It can also occur if the local session
+ // description is not changed and RemoveLocalStream is called and later
+ // AddLocalStream is called again with the same stream.
+ for (const auto& track : local_stream->GetAudioTracks()) {
+ const TrackInfo* track_info =
+ FindTrackInfo(local_audio_tracks_, local_stream->label(), track->id());
+ if (track_info) {
+ CreateAudioSender(local_stream, track.get(), track_info->ssrc);
+ }
+ }
+ for (const auto& track : local_stream->GetVideoTracks()) {
+ const TrackInfo* track_info =
+ FindTrackInfo(local_video_tracks_, local_stream->label(), track->id());
+ if (track_info) {
+ CreateVideoSender(local_stream, track.get(), track_info->ssrc);
+ }
+ }
+
+ stats_->AddStream(local_stream);
+ observer_->OnRenegotiationNeeded();
+ return true;
+}
+
+// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around
+// indefinitely.
+void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) {
+ for (const auto& track : local_stream->GetAudioTracks()) {
+ const TrackInfo* track_info =
+ FindTrackInfo(local_audio_tracks_, local_stream->label(), track->id());
+ if (track_info) {
+ DestroyAudioSender(local_stream, track.get(), track_info->ssrc);
+ }
+ }
+ for (const auto& track : local_stream->GetVideoTracks()) {
+ const TrackInfo* track_info =
+ FindTrackInfo(local_video_tracks_, local_stream->label(), track->id());
+ if (track_info) {
+ DestroyVideoSender(local_stream, track.get());
+ }
+ }
+
+ local_streams_->RemoveStream(local_stream);
+
+ if (IsClosed()) {
+ return;
+ }
+ observer_->OnRenegotiationNeeded();
+}
+
+rtc::scoped_refptr<DtmfSenderInterface> PeerConnection::CreateDtmfSender(
+ AudioTrackInterface* track) {
+ if (!track) {
+ LOG(LS_ERROR) << "CreateDtmfSender - track is NULL.";
+ return NULL;
+ }
+ if (!local_streams_->FindAudioTrack(track->id())) {
+ LOG(LS_ERROR) << "CreateDtmfSender is called with a non local audio track.";
+ return NULL;
+ }
+
+ rtc::scoped_refptr<DtmfSenderInterface> sender(
+ DtmfSender::Create(track, signaling_thread(), session_.get()));
+ if (!sender.get()) {
+ LOG(LS_ERROR) << "CreateDtmfSender failed on DtmfSender::Create.";
+ return NULL;
+ }
+ return DtmfSenderProxy::Create(signaling_thread(), sender.get());
+}
+
+std::vector<rtc::scoped_refptr<RtpSenderInterface>> PeerConnection::GetSenders()
+ const {
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders;
+ for (const auto& sender : senders_) {
+ senders.push_back(RtpSenderProxy::Create(signaling_thread(), sender.get()));
+ }
+ return senders;
+}
+
+std::vector<rtc::scoped_refptr<RtpReceiverInterface>>
+PeerConnection::GetReceivers() const {
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers;
+ for (const auto& receiver : receivers_) {
+ receivers.push_back(
+ RtpReceiverProxy::Create(signaling_thread(), receiver.get()));
+ }
+ return receivers;
+}
+
+bool PeerConnection::GetStats(StatsObserver* observer,
+ MediaStreamTrackInterface* track,
+ StatsOutputLevel level) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ if (!VERIFY(observer != NULL)) {
+ LOG(LS_ERROR) << "GetStats - observer is NULL.";
+ return false;
+ }
+
+ stats_->UpdateStats(level);
+ signaling_thread()->Post(this, MSG_GETSTATS,
+ new GetStatsMsg(observer, track));
+ return true;
+}
+
+PeerConnectionInterface::SignalingState PeerConnection::signaling_state() {
+ return signaling_state_;
+}
+
+PeerConnectionInterface::IceState PeerConnection::ice_state() {
+ return ice_state_;
+}
+
+PeerConnectionInterface::IceConnectionState
+PeerConnection::ice_connection_state() {
+ return ice_connection_state_;
+}
+
+PeerConnectionInterface::IceGatheringState
+PeerConnection::ice_gathering_state() {
+ return ice_gathering_state_;
+}
+
+rtc::scoped_refptr<DataChannelInterface>
+PeerConnection::CreateDataChannel(
+ const std::string& label,
+ const DataChannelInit* config) {
+ bool first_datachannel = !HasDataChannels();
+
+ rtc::scoped_ptr<InternalDataChannelInit> internal_config;
+ if (config) {
+ internal_config.reset(new InternalDataChannelInit(*config));
+ }
+ rtc::scoped_refptr<DataChannelInterface> channel(
+ InternalCreateDataChannel(label, internal_config.get()));
+ if (!channel.get()) {
+ return nullptr;
+ }
+
+ // Trigger the onRenegotiationNeeded event for every new RTP DataChannel, or
+ // the first SCTP DataChannel.
+ if (session_->data_channel_type() == cricket::DCT_RTP || first_datachannel) {
+ observer_->OnRenegotiationNeeded();
+ }
+
+ return DataChannelProxy::Create(signaling_thread(), channel.get());
+}
+
+void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints) {
+ if (!VERIFY(observer != nullptr)) {
+ LOG(LS_ERROR) << "CreateOffer - observer is NULL.";
+ return;
+ }
+ RTCOfferAnswerOptions options;
+
+ bool value;
+ size_t mandatory_constraints = 0;
+
+ if (FindConstraint(constraints,
+ MediaConstraintsInterface::kOfferToReceiveAudio,
+ &value,
+ &mandatory_constraints)) {
+ options.offer_to_receive_audio =
+ value ? RTCOfferAnswerOptions::kOfferToReceiveMediaTrue : 0;
+ }
+
+ if (FindConstraint(constraints,
+ MediaConstraintsInterface::kOfferToReceiveVideo,
+ &value,
+ &mandatory_constraints)) {
+ options.offer_to_receive_video =
+ value ? RTCOfferAnswerOptions::kOfferToReceiveMediaTrue : 0;
+ }
+
+ if (FindConstraint(constraints,
+ MediaConstraintsInterface::kVoiceActivityDetection,
+ &value,
+ &mandatory_constraints)) {
+ options.voice_activity_detection = value;
+ }
+
+ if (FindConstraint(constraints,
+ MediaConstraintsInterface::kIceRestart,
+ &value,
+ &mandatory_constraints)) {
+ options.ice_restart = value;
+ }
+
+ if (FindConstraint(constraints,
+ MediaConstraintsInterface::kUseRtpMux,
+ &value,
+ &mandatory_constraints)) {
+ options.use_rtp_mux = value;
+ }
+
+ CreateOffer(observer, options);
+}
+
+void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) {
+ if (!VERIFY(observer != nullptr)) {
+ LOG(LS_ERROR) << "CreateOffer - observer is NULL.";
+ return;
+ }
+
+ cricket::MediaSessionOptions session_options;
+ if (!GetOptionsForOffer(options, &session_options)) {
+ std::string error = "CreateOffer called with invalid options.";
+ LOG(LS_ERROR) << error;
+ PostCreateSessionDescriptionFailure(observer, error);
+ return;
+ }
+
+ session_->CreateOffer(observer, options, session_options);
+}
+
+void PeerConnection::CreateAnswer(
+ CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints) {
+ if (!VERIFY(observer != nullptr)) {
+ LOG(LS_ERROR) << "CreateAnswer - observer is NULL.";
+ return;
+ }
+
+ cricket::MediaSessionOptions session_options;
+ if (!GetOptionsForAnswer(constraints, &session_options)) {
+ std::string error = "CreateAnswer called with invalid constraints.";
+ LOG(LS_ERROR) << error;
+ PostCreateSessionDescriptionFailure(observer, error);
+ return;
+ }
+
+ session_->CreateAnswer(observer, constraints, session_options);
+}
+
+void PeerConnection::SetLocalDescription(
+ SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) {
+ if (!VERIFY(observer != nullptr)) {
+ LOG(LS_ERROR) << "SetLocalDescription - observer is NULL.";
+ return;
+ }
+ if (!desc) {
+ PostSetSessionDescriptionFailure(observer, "SessionDescription is NULL.");
+ return;
+ }
+ // Update stats here so that we have the most recent stats for tracks and
+ // streams that might be removed by updating the session description.
+ stats_->UpdateStats(kStatsOutputLevelStandard);
+ std::string error;
+ if (!session_->SetLocalDescription(desc, &error)) {
+ PostSetSessionDescriptionFailure(observer, error);
+ return;
+ }
+
+ // If setting the description decided our SSL role, allocate any necessary
+ // SCTP sids.
+ rtc::SSLRole role;
+ if (session_->data_channel_type() == cricket::DCT_SCTP &&
+ session_->GetSslRole(&role)) {
+ AllocateSctpSids(role);
+ }
+
+ // Update state and SSRC of local MediaStreams and DataChannels based on the
+ // local session description.
+ const cricket::ContentInfo* audio_content =
+ GetFirstAudioContent(desc->description());
+ if (audio_content) {
+ const cricket::AudioContentDescription* audio_desc =
+ static_cast<const cricket::AudioContentDescription*>(
+ audio_content->description);
+ UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
+ }
+
+ const cricket::ContentInfo* video_content =
+ GetFirstVideoContent(desc->description());
+ if (video_content) {
+ const cricket::VideoContentDescription* video_desc =
+ static_cast<const cricket::VideoContentDescription*>(
+ video_content->description);
+ UpdateLocalTracks(video_desc->streams(), video_desc->type());
+ }
+
+ const cricket::ContentInfo* data_content =
+ GetFirstDataContent(desc->description());
+ if (data_content) {
+ const cricket::DataContentDescription* data_desc =
+ static_cast<const cricket::DataContentDescription*>(
+ data_content->description);
+ if (rtc::starts_with(data_desc->protocol().data(),
+ cricket::kMediaProtocolRtpPrefix)) {
+ UpdateLocalRtpDataChannels(data_desc->streams());
+ }
+ }
+
+ SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
+ signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg);
+
+ // MaybeStartGathering needs to be called after posting
+ // MSG_SET_SESSIONDESCRIPTION_SUCCESS, so that we don't signal any candidates
+ // before signaling that SetLocalDescription completed.
+ session_->MaybeStartGathering();
+}
+
+void PeerConnection::SetRemoteDescription(
+ SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) {
+ if (!VERIFY(observer != nullptr)) {
+ LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL.";
+ return;
+ }
+ if (!desc) {
+ PostSetSessionDescriptionFailure(observer, "SessionDescription is NULL.");
+ return;
+ }
+ // Update stats here so that we have the most recent stats for tracks and
+ // streams that might be removed by updating the session description.
+ stats_->UpdateStats(kStatsOutputLevelStandard);
+ std::string error;
+ if (!session_->SetRemoteDescription(desc, &error)) {
+ PostSetSessionDescriptionFailure(observer, error);
+ return;
+ }
+
+ // If setting the description decided our SSL role, allocate any necessary
+ // SCTP sids.
+ rtc::SSLRole role;
+ if (session_->data_channel_type() == cricket::DCT_SCTP &&
+ session_->GetSslRole(&role)) {
+ AllocateSctpSids(role);
+ }
+
+ const cricket::SessionDescription* remote_desc = desc->description();
+
+ // We wait to signal new streams until we finish processing the description,
+ // since only at that point will new streams have all their tracks.
+ rtc::scoped_refptr<StreamCollection> new_streams(StreamCollection::Create());
+
+ // Find all audio rtp streams and create corresponding remote AudioTracks
+ // and MediaStreams.
+ const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
+ if (audio_content) {
+ const cricket::AudioContentDescription* desc =
+ static_cast<const cricket::AudioContentDescription*>(
+ audio_content->description);
+ UpdateRemoteStreamsList(GetActiveStreams(desc), desc->type(), new_streams);
+ remote_info_.default_audio_track_needed =
+ !remote_desc->msid_supported() && desc->streams().empty() &&
+ MediaContentDirectionHasSend(desc->direction());
+ }
+
+ // Find all video rtp streams and create corresponding remote VideoTracks
+ // and MediaStreams.
+ const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
+ if (video_content) {
+ const cricket::VideoContentDescription* desc =
+ static_cast<const cricket::VideoContentDescription*>(
+ video_content->description);
+ UpdateRemoteStreamsList(GetActiveStreams(desc), desc->type(), new_streams);
+ remote_info_.default_video_track_needed =
+ !remote_desc->msid_supported() && desc->streams().empty() &&
+ MediaContentDirectionHasSend(desc->direction());
+ }
+
+ // Update the DataChannels with the information from the remote peer.
+ const cricket::ContentInfo* data_content = GetFirstDataContent(remote_desc);
+ if (data_content) {
+ const cricket::DataContentDescription* desc =
+ static_cast<const cricket::DataContentDescription*>(
+ data_content->description);
+ if (rtc::starts_with(desc->protocol().data(),
+ cricket::kMediaProtocolRtpPrefix)) {
+ UpdateRemoteRtpDataChannels(GetActiveStreams(desc));
+ }
+ }
+
+ // Iterate new_streams and notify the observer about new MediaStreams.
+ for (size_t i = 0; i < new_streams->count(); ++i) {
+ MediaStreamInterface* new_stream = new_streams->at(i);
+ stats_->AddStream(new_stream);
+ observer_->OnAddStream(new_stream);
+ }
+
+ // Find removed MediaStreams.
+ if (remote_info_.IsDefaultMediaStreamNeeded() &&
+ remote_streams_->find(kDefaultStreamLabel) != nullptr) {
+ // The default media stream already exists. No need to do anything.
+ } else {
+ UpdateEndedRemoteMediaStreams();
+ remote_info_.msid_supported |= remote_streams_->count() > 0;
+ }
+ MaybeCreateDefaultStream();
+
+ SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
+ signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg);
+}
+
+bool PeerConnection::SetConfiguration(const RTCConfiguration& config) {
+ if (port_allocator_) {
+ std::vector<PortAllocatorFactoryInterface::StunConfiguration> stuns;
+ std::vector<PortAllocatorFactoryInterface::TurnConfiguration> turns;
+ if (!ParseIceServers(config.servers, &stuns, &turns)) {
+ return false;
+ }
+
+ std::vector<rtc::SocketAddress> stun_hosts;
+ typedef std::vector<StunConfiguration>::const_iterator StunIt;
+ for (StunIt stun_it = stuns.begin(); stun_it != stuns.end(); ++stun_it) {
+ stun_hosts.push_back(stun_it->server);
+ }
+
+ rtc::SocketAddress stun_addr;
+ if (!stun_hosts.empty()) {
+ stun_addr = stun_hosts.front();
+ LOG(LS_INFO) << "SetConfiguration: StunServer Address: "
+ << stun_addr.ToString();
+ }
+
+ for (size_t i = 0; i < turns.size(); ++i) {
+ cricket::RelayCredentials credentials(turns[i].username,
+ turns[i].password);
+ cricket::RelayServerConfig relay_server(cricket::RELAY_TURN);
+ cricket::ProtocolType protocol;
+ if (cricket::StringToProto(turns[i].transport_type.c_str(), &protocol)) {
+ relay_server.ports.push_back(cricket::ProtocolAddress(
+ turns[i].server, protocol, turns[i].secure));
+ relay_server.credentials = credentials;
+ LOG(LS_INFO) << "SetConfiguration: TurnServer Address: "
+ << turns[i].server.ToString();
+ } else {
+ LOG(LS_WARNING) << "Ignoring TURN server " << turns[i].server << ". "
+ << "Reason= Incorrect " << turns[i].transport_type
+ << " transport parameter.";
+ }
+ }
+ }
+ session_->SetIceConfig(session_->ParseIceConfig(config));
+ return session_->SetIceTransports(config.type);
+}
+
+bool PeerConnection::AddIceCandidate(
+ const IceCandidateInterface* ice_candidate) {
+ return session_->ProcessIceMessage(ice_candidate);
+}
+
+void PeerConnection::RegisterUMAObserver(UMAObserver* observer) {
+ uma_observer_ = observer;
+
+ if (session_) {
+ session_->set_metrics_observer(uma_observer_);
+ }
+
+ // Send information about IPv4/IPv6 status.
+ if (uma_observer_ && port_allocator_) {
+ if (port_allocator_->flags() & cricket::PORTALLOCATOR_ENABLE_IPV6) {
+ uma_observer_->IncrementEnumCounter(
+ kEnumCounterAddressFamily, kPeerConnection_IPv6,
+ kPeerConnectionAddressFamilyCounter_Max);
+ } else {
+ uma_observer_->IncrementEnumCounter(
+ kEnumCounterAddressFamily, kPeerConnection_IPv4,
+ kPeerConnectionAddressFamilyCounter_Max);
+ }
+ }
+}
+
+const SessionDescriptionInterface* PeerConnection::local_description() const {
+ return session_->local_description();
+}
+
+const SessionDescriptionInterface* PeerConnection::remote_description() const {
+ return session_->remote_description();
+}
+
+void PeerConnection::Close() {
+ // Update stats here so that we have the most recent stats for tracks and
+ // streams before the channels are closed.
+ stats_->UpdateStats(kStatsOutputLevelStandard);
+
+ session_->Close();
+}
+
+void PeerConnection::OnSessionStateChange(WebRtcSession* /*session*/,
+ WebRtcSession::State state) {
+ switch (state) {
+ case WebRtcSession::STATE_INIT:
+ ChangeSignalingState(PeerConnectionInterface::kStable);
+ break;
+ case WebRtcSession::STATE_SENTOFFER:
+ ChangeSignalingState(PeerConnectionInterface::kHaveLocalOffer);
+ break;
+ case WebRtcSession::STATE_SENTPRANSWER:
+ ChangeSignalingState(PeerConnectionInterface::kHaveLocalPrAnswer);
+ break;
+ case WebRtcSession::STATE_RECEIVEDOFFER:
+ ChangeSignalingState(PeerConnectionInterface::kHaveRemoteOffer);
+ break;
+ case WebRtcSession::STATE_RECEIVEDPRANSWER:
+ ChangeSignalingState(PeerConnectionInterface::kHaveRemotePrAnswer);
+ break;
+ case WebRtcSession::STATE_INPROGRESS:
+ ChangeSignalingState(PeerConnectionInterface::kStable);
+ break;
+ case WebRtcSession::STATE_CLOSED:
+ ChangeSignalingState(PeerConnectionInterface::kClosed);
+ break;
+ default:
+ break;
+ }
+}
+
+void PeerConnection::OnMessage(rtc::Message* msg) {
+ switch (msg->message_id) {
+ case MSG_SET_SESSIONDESCRIPTION_SUCCESS: {
+ SetSessionDescriptionMsg* param =
+ static_cast<SetSessionDescriptionMsg*>(msg->pdata);
+ param->observer->OnSuccess();
+ delete param;
+ break;
+ }
+ case MSG_SET_SESSIONDESCRIPTION_FAILED: {
+ SetSessionDescriptionMsg* param =
+ static_cast<SetSessionDescriptionMsg*>(msg->pdata);
+ param->observer->OnFailure(param->error);
+ delete param;
+ break;
+ }
+ case MSG_CREATE_SESSIONDESCRIPTION_FAILED: {
+ CreateSessionDescriptionMsg* param =
+ static_cast<CreateSessionDescriptionMsg*>(msg->pdata);
+ param->observer->OnFailure(param->error);
+ delete param;
+ break;
+ }
+ case MSG_GETSTATS: {
+ GetStatsMsg* param = static_cast<GetStatsMsg*>(msg->pdata);
+ StatsReports reports;
+ stats_->GetStats(param->track, &reports);
+ param->observer->OnComplete(reports);
+ delete param;
+ break;
+ }
+ default:
+ RTC_DCHECK(false && "Not implemented");
+ break;
+ }
+}
+
+void PeerConnection::CreateAudioReceiver(MediaStreamInterface* stream,
+ AudioTrackInterface* audio_track,
+ uint32_t ssrc) {
+ receivers_.push_back(new AudioRtpReceiver(audio_track, ssrc, session_.get()));
+}
+
+void PeerConnection::CreateVideoReceiver(MediaStreamInterface* stream,
+ VideoTrackInterface* video_track,
+ uint32_t ssrc) {
+ receivers_.push_back(new VideoRtpReceiver(video_track, ssrc, session_.get()));
+}
+
+// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote
+// description.
+void PeerConnection::DestroyAudioReceiver(MediaStreamInterface* stream,
+ AudioTrackInterface* audio_track) {
+ auto it = FindReceiverForTrack(audio_track);
+ if (it == receivers_.end()) {
+ LOG(LS_WARNING) << "RtpReceiver for track with id " << audio_track->id()
+ << " doesn't exist.";
+ } else {
+ (*it)->Stop();
+ receivers_.erase(it);
+ }
+}
+
+void PeerConnection::DestroyVideoReceiver(MediaStreamInterface* stream,
+ VideoTrackInterface* video_track) {
+ auto it = FindReceiverForTrack(video_track);
+ if (it == receivers_.end()) {
+ LOG(LS_WARNING) << "RtpReceiver for track with id " << video_track->id()
+ << " doesn't exist.";
+ } else {
+ (*it)->Stop();
+ receivers_.erase(it);
+ }
+}
+
+void PeerConnection::CreateAudioSender(MediaStreamInterface* stream,
+ AudioTrackInterface* audio_track,
+ uint32_t ssrc) {
+ senders_.push_back(new AudioRtpSender(audio_track, ssrc, session_.get()));
+ stats_->AddLocalAudioTrack(audio_track, ssrc);
+}
+
+void PeerConnection::CreateVideoSender(MediaStreamInterface* stream,
+ VideoTrackInterface* video_track,
+ uint32_t ssrc) {
+ senders_.push_back(new VideoRtpSender(video_track, ssrc, session_.get()));
+}
+
+// TODO(deadbeef): Keep RtpSenders around even if track goes away in local
+// description.
+void PeerConnection::DestroyAudioSender(MediaStreamInterface* stream,
+ AudioTrackInterface* audio_track,
+ uint32_t ssrc) {
+ auto it = FindSenderForTrack(audio_track);
+ if (it == senders_.end()) {
+ LOG(LS_WARNING) << "RtpSender for track with id " << audio_track->id()
+ << " doesn't exist.";
+ return;
+ } else {
+ (*it)->Stop();
+ senders_.erase(it);
+ }
+ stats_->RemoveLocalAudioTrack(audio_track, ssrc);
+}
+
+void PeerConnection::DestroyVideoSender(MediaStreamInterface* stream,
+ VideoTrackInterface* video_track) {
+ auto it = FindSenderForTrack(video_track);
+ if (it == senders_.end()) {
+ LOG(LS_WARNING) << "RtpSender for track with id " << video_track->id()
+ << " doesn't exist.";
+ return;
+ } else {
+ (*it)->Stop();
+ senders_.erase(it);
+ }
+}
+
+void PeerConnection::OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ // After transitioning to "closed", ignore any additional states from
+ // WebRtcSession (such as "disconnected").
+ if (IsClosed()) {
+ return;
+ }
+ ice_connection_state_ = new_state;
+ observer_->OnIceConnectionChange(ice_connection_state_);
+}
+
+void PeerConnection::OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ if (IsClosed()) {
+ return;
+ }
+ ice_gathering_state_ = new_state;
+ observer_->OnIceGatheringChange(ice_gathering_state_);
+}
+
+void PeerConnection::OnIceCandidate(const IceCandidateInterface* candidate) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ observer_->OnIceCandidate(candidate);
+}
+
+void PeerConnection::OnIceComplete() {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ observer_->OnIceComplete();
+}
+
+void PeerConnection::OnIceConnectionReceivingChange(bool receiving) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ observer_->OnIceConnectionReceivingChange(receiving);
+}
+
+void PeerConnection::ChangeSignalingState(
+ PeerConnectionInterface::SignalingState signaling_state) {
+ signaling_state_ = signaling_state;
+ if (signaling_state == kClosed) {
+ ice_connection_state_ = kIceConnectionClosed;
+ observer_->OnIceConnectionChange(ice_connection_state_);
+ if (ice_gathering_state_ != kIceGatheringComplete) {
+ ice_gathering_state_ = kIceGatheringComplete;
+ observer_->OnIceGatheringChange(ice_gathering_state_);
+ }
+ }
+ observer_->OnSignalingChange(signaling_state_);
+ observer_->OnStateChange(PeerConnectionObserver::kSignalingState);
+}
+
+void PeerConnection::PostSetSessionDescriptionFailure(
+ SetSessionDescriptionObserver* observer,
+ const std::string& error) {
+ SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
+ msg->error = error;
+ signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_FAILED, msg);
+}
+
+void PeerConnection::PostCreateSessionDescriptionFailure(
+ CreateSessionDescriptionObserver* observer,
+ const std::string& error) {
+ CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer);
+ msg->error = error;
+ signaling_thread()->Post(this, MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg);
+}
+
+bool PeerConnection::GetOptionsForOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+ cricket::MediaSessionOptions* session_options) {
+ if (!ConvertRtcOptionsForOffer(rtc_options, session_options)) {
+ return false;
+ }
+
+ SetStreams(session_options, local_streams_, rtp_data_channels_);
+ // Offer to receive audio/video if the constraint is not set and there are
+ // send streams, or we're currently receiving.
+ if (rtc_options.offer_to_receive_audio == RTCOfferAnswerOptions::kUndefined) {
+ session_options->recv_audio =
+ session_options->HasSendMediaStream(cricket::MEDIA_TYPE_AUDIO) ||
+ !remote_audio_tracks_.empty();
+ }
+ if (rtc_options.offer_to_receive_video == RTCOfferAnswerOptions::kUndefined) {
+ session_options->recv_video =
+ session_options->HasSendMediaStream(cricket::MEDIA_TYPE_VIDEO) ||
+ !remote_video_tracks_.empty();
+ }
+ session_options->bundle_enabled =
+ session_options->bundle_enabled &&
+ (session_options->has_audio() || session_options->has_video() ||
+ session_options->has_data());
+
+ if (session_->data_channel_type() == cricket::DCT_SCTP && HasDataChannels()) {
+ session_options->data_channel_type = cricket::DCT_SCTP;
+ }
+ return true;
+}
+
+bool PeerConnection::GetOptionsForAnswer(
+ const MediaConstraintsInterface* constraints,
+ cricket::MediaSessionOptions* session_options) {
+ session_options->recv_audio = false;
+ session_options->recv_video = false;
+ if (!ParseConstraintsForAnswer(constraints, session_options)) {
+ return false;
+ }
+
+ SetStreams(session_options, local_streams_, rtp_data_channels_);
+ session_options->bundle_enabled =
+ session_options->bundle_enabled &&
+ (session_options->has_audio() || session_options->has_video() ||
+ session_options->has_data());
+
+ // RTP data channel is handled in MediaSessionOptions::AddStream. SCTP streams
+ // are not signaled in the SDP so does not go through that path and must be
+ // handled here.
+ if (session_->data_channel_type() == cricket::DCT_SCTP) {
+ session_options->data_channel_type = cricket::DCT_SCTP;
+ }
+ return true;
+}
+
+void PeerConnection::UpdateRemoteStreamsList(
+ const cricket::StreamParamsVec& streams,
+ cricket::MediaType media_type,
+ StreamCollection* new_streams) {
+ TrackInfos* current_tracks = GetRemoteTracks(media_type);
+
+ // Find removed tracks. I.e., tracks where the track id or ssrc don't match
+ // the
+ // new StreamParam.
+ auto track_it = current_tracks->begin();
+ while (track_it != current_tracks->end()) {
+ const TrackInfo& info = *track_it;
+ const cricket::StreamParams* params =
+ cricket::GetStreamBySsrc(streams, info.ssrc);
+ if (!params || params->id != info.track_id) {
+ OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type);
+ track_it = current_tracks->erase(track_it);
+ } else {
+ ++track_it;
+ }
+ }
+
+ // Find new and active tracks.
+ for (const cricket::StreamParams& params : streams) {
+ // The sync_label is the MediaStream label and the |stream.id| is the
+ // track id.
+ const std::string& stream_label = params.sync_label;
+ const std::string& track_id = params.id;
+ uint32_t ssrc = params.first_ssrc();
+
+ rtc::scoped_refptr<MediaStreamInterface> stream =
+ remote_streams_->find(stream_label);
+ if (!stream) {
+ // This is a new MediaStream. Create a new remote MediaStream.
+ stream = remote_stream_factory_->CreateMediaStream(stream_label);
+ remote_streams_->AddStream(stream);
+ new_streams->AddStream(stream);
+ }
+
+ const TrackInfo* track_info =
+ FindTrackInfo(*current_tracks, stream_label, track_id);
+ if (!track_info) {
+ current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc));
+ OnRemoteTrackSeen(stream_label, track_id, ssrc, media_type);
+ }
+ }
+}
+
+void PeerConnection::OnRemoteTrackSeen(const std::string& stream_label,
+ const std::string& track_id,
+ uint32_t ssrc,
+ cricket::MediaType media_type) {
+ MediaStreamInterface* stream = remote_streams_->find(stream_label);
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ AudioTrackInterface* audio_track =
+ remote_stream_factory_->AddAudioTrack(stream, track_id);
+ CreateAudioReceiver(stream, audio_track, ssrc);
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ VideoTrackInterface* video_track =
+ remote_stream_factory_->AddVideoTrack(stream, track_id);
+ CreateVideoReceiver(stream, video_track, ssrc);
+ } else {
+ RTC_DCHECK(false && "Invalid media type");
+ }
+}
+
+void PeerConnection::OnRemoteTrackRemoved(const std::string& stream_label,
+ const std::string& track_id,
+ cricket::MediaType media_type) {
+ MediaStreamInterface* stream = remote_streams_->find(stream_label);
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ rtc::scoped_refptr<AudioTrackInterface> audio_track =
+ stream->FindAudioTrack(track_id);
+ if (audio_track) {
+ audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+ stream->RemoveTrack(audio_track);
+ DestroyAudioReceiver(stream, audio_track);
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ rtc::scoped_refptr<VideoTrackInterface> video_track =
+ stream->FindVideoTrack(track_id);
+ if (video_track) {
+ video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+ stream->RemoveTrack(video_track);
+ DestroyVideoReceiver(stream, video_track);
+ }
+ } else {
+ ASSERT(false && "Invalid media type");
+ }
+}
+
+void PeerConnection::UpdateEndedRemoteMediaStreams() {
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_to_remove;
+ for (size_t i = 0; i < remote_streams_->count(); ++i) {
+ MediaStreamInterface* stream = remote_streams_->at(i);
+ if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) {
+ streams_to_remove.push_back(stream);
+ }
+ }
+
+ for (const auto& stream : streams_to_remove) {
+ remote_streams_->RemoveStream(stream);
+ observer_->OnRemoveStream(stream);
+ }
+}
+
+void PeerConnection::MaybeCreateDefaultStream() {
+ if (!remote_info_.IsDefaultMediaStreamNeeded()) {
+ return;
+ }
+
+ bool default_created = false;
+
+ rtc::scoped_refptr<MediaStreamInterface> default_remote_stream =
+ remote_streams_->find(kDefaultStreamLabel);
+ if (default_remote_stream == nullptr) {
+ default_created = true;
+ default_remote_stream =
+ remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
+ remote_streams_->AddStream(default_remote_stream);
+ }
+ if (remote_info_.default_audio_track_needed &&
+ default_remote_stream->GetAudioTracks().size() == 0) {
+ remote_audio_tracks_.push_back(
+ TrackInfo(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0));
+ OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0,
+ cricket::MEDIA_TYPE_AUDIO);
+ }
+ if (remote_info_.default_video_track_needed &&
+ default_remote_stream->GetVideoTracks().size() == 0) {
+ remote_video_tracks_.push_back(
+ TrackInfo(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0));
+ OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0,
+ cricket::MEDIA_TYPE_VIDEO);
+ }
+ if (default_created) {
+ stats_->AddStream(default_remote_stream);
+ observer_->OnAddStream(default_remote_stream);
+ }
+}
+
+void PeerConnection::EndRemoteTracks(cricket::MediaType media_type) {
+ TrackInfos* current_tracks = GetRemoteTracks(media_type);
+ for (TrackInfos::iterator track_it = current_tracks->begin();
+ track_it != current_tracks->end(); ++track_it) {
+ const TrackInfo& info = *track_it;
+ MediaStreamInterface* stream = remote_streams_->find(info.stream_label);
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ AudioTrackInterface* track = stream->FindAudioTrack(info.track_id);
+ // There's no guarantee the track is still available, e.g. the track may
+ // have been removed from the stream by javascript.
+ if (track) {
+ track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+ }
+ }
+ if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ VideoTrackInterface* track = stream->FindVideoTrack(info.track_id);
+ // There's no guarantee the track is still available, e.g. the track may
+ // have been removed from the stream by javascript.
+ if (track) {
+ track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+ }
+ }
+ }
+}
+
+void PeerConnection::UpdateLocalTracks(
+ const std::vector<cricket::StreamParams>& streams,
+ cricket::MediaType media_type) {
+ TrackInfos* current_tracks = GetLocalTracks(media_type);
+
+ // Find removed tracks. I.e., tracks where the track id, stream label or ssrc
+ // don't match the new StreamParam.
+ TrackInfos::iterator track_it = current_tracks->begin();
+ while (track_it != current_tracks->end()) {
+ const TrackInfo& info = *track_it;
+ const cricket::StreamParams* params =
+ cricket::GetStreamBySsrc(streams, info.ssrc);
+ if (!params || params->id != info.track_id ||
+ params->sync_label != info.stream_label) {
+ OnLocalTrackRemoved(info.stream_label, info.track_id, info.ssrc,
+ media_type);
+ track_it = current_tracks->erase(track_it);
+ } else {
+ ++track_it;
+ }
+ }
+
+ // Find new and active tracks.
+ for (const cricket::StreamParams& params : streams) {
+ // The sync_label is the MediaStream label and the |stream.id| is the
+ // track id.
+ const std::string& stream_label = params.sync_label;
+ const std::string& track_id = params.id;
+ uint32_t ssrc = params.first_ssrc();
+ const TrackInfo* track_info =
+ FindTrackInfo(*current_tracks, stream_label, track_id);
+ if (!track_info) {
+ current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc));
+ OnLocalTrackSeen(stream_label, track_id, params.first_ssrc(), media_type);
+ }
+ }
+}
+
+void PeerConnection::OnLocalTrackSeen(const std::string& stream_label,
+ const std::string& track_id,
+ uint32_t ssrc,
+ cricket::MediaType media_type) {
+ MediaStreamInterface* stream = local_streams_->find(stream_label);
+ if (!stream) {
+ LOG(LS_WARNING) << "An unknown local MediaStream with label "
+ << stream_label << " has been configured.";
+ return;
+ }
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
+ if (!audio_track) {
+ LOG(LS_WARNING) << "An unknown local AudioTrack with id , " << track_id
+ << " has been configured.";
+ return;
+ }
+ CreateAudioSender(stream, audio_track, ssrc);
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
+ if (!video_track) {
+ LOG(LS_WARNING) << "An unknown local VideoTrack with id , " << track_id
+ << " has been configured.";
+ return;
+ }
+ CreateVideoSender(stream, video_track, ssrc);
+ } else {
+ RTC_DCHECK(false && "Invalid media type");
+ }
+}
+
+void PeerConnection::OnLocalTrackRemoved(const std::string& stream_label,
+ const std::string& track_id,
+ uint32_t ssrc,
+ cricket::MediaType media_type) {
+ MediaStreamInterface* stream = local_streams_->find(stream_label);
+ if (!stream) {
+ // This is the normal case. I.e., RemoveLocalStream has been called and the
+ // SessionDescriptions has been renegotiated.
+ return;
+ }
+ // A track has been removed from the SessionDescription but the MediaStream
+ // is still associated with PeerConnection. This only occurs if the SDP
+ // doesn't match with the calls to AddLocalStream and RemoveLocalStream.
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
+ if (!audio_track) {
+ return;
+ }
+ DestroyAudioSender(stream, audio_track, ssrc);
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
+ if (!video_track) {
+ return;
+ }
+ DestroyVideoSender(stream, video_track);
+ } else {
+ RTC_DCHECK(false && "Invalid media type.");
+ }
+}
+
+void PeerConnection::UpdateLocalRtpDataChannels(
+ const cricket::StreamParamsVec& streams) {
+ std::vector<std::string> existing_channels;
+
+ // Find new and active data channels.
+ for (const cricket::StreamParams& params : streams) {
+ // |it->sync_label| is actually the data channel label. The reason is that
+ // we use the same naming of data channels as we do for
+ // MediaStreams and Tracks.
+ // For MediaStreams, the sync_label is the MediaStream label and the
+ // track label is the same as |streamid|.
+ const std::string& channel_label = params.sync_label;
+ auto data_channel_it = rtp_data_channels_.find(channel_label);
+ if (!VERIFY(data_channel_it != rtp_data_channels_.end())) {
+ continue;
+ }
+ // Set the SSRC the data channel should use for sending.
+ data_channel_it->second->SetSendSsrc(params.first_ssrc());
+ existing_channels.push_back(data_channel_it->first);
+ }
+
+ UpdateClosingRtpDataChannels(existing_channels, true);
+}
+
+void PeerConnection::UpdateRemoteRtpDataChannels(
+ const cricket::StreamParamsVec& streams) {
+ std::vector<std::string> existing_channels;
+
+ // Find new and active data channels.
+ for (const cricket::StreamParams& params : streams) {
+ // The data channel label is either the mslabel or the SSRC if the mslabel
+ // does not exist. Ex a=ssrc:444330170 mslabel:test1.
+ std::string label = params.sync_label.empty()
+ ? rtc::ToString(params.first_ssrc())
+ : params.sync_label;
+ auto data_channel_it = rtp_data_channels_.find(label);
+ if (data_channel_it == rtp_data_channels_.end()) {
+ // This is a new data channel.
+ CreateRemoteRtpDataChannel(label, params.first_ssrc());
+ } else {
+ data_channel_it->second->SetReceiveSsrc(params.first_ssrc());
+ }
+ existing_channels.push_back(label);
+ }
+
+ UpdateClosingRtpDataChannels(existing_channels, false);
+}
+
+void PeerConnection::UpdateClosingRtpDataChannels(
+ const std::vector<std::string>& active_channels,
+ bool is_local_update) {
+ auto it = rtp_data_channels_.begin();
+ while (it != rtp_data_channels_.end()) {
+ DataChannel* data_channel = it->second;
+ if (std::find(active_channels.begin(), active_channels.end(),
+ data_channel->label()) != active_channels.end()) {
+ ++it;
+ continue;
+ }
+
+ if (is_local_update) {
+ data_channel->SetSendSsrc(0);
+ } else {
+ data_channel->RemotePeerRequestClose();
+ }
+
+ if (data_channel->state() == DataChannel::kClosed) {
+ rtp_data_channels_.erase(it);
+ it = rtp_data_channels_.begin();
+ } else {
+ ++it;
+ }
+ }
+}
+
+void PeerConnection::CreateRemoteRtpDataChannel(const std::string& label,
+ uint32_t remote_ssrc) {
+ rtc::scoped_refptr<DataChannel> channel(
+ InternalCreateDataChannel(label, nullptr));
+ if (!channel.get()) {
+ LOG(LS_WARNING) << "Remote peer requested a DataChannel but"
+ << "CreateDataChannel failed.";
+ return;
+ }
+ channel->SetReceiveSsrc(remote_ssrc);
+ observer_->OnDataChannel(
+ DataChannelProxy::Create(signaling_thread(), channel));
+}
+
+rtc::scoped_refptr<DataChannel> PeerConnection::InternalCreateDataChannel(
+ const std::string& label,
+ const InternalDataChannelInit* config) {
+ if (IsClosed()) {
+ return nullptr;
+ }
+ if (session_->data_channel_type() == cricket::DCT_NONE) {
+ LOG(LS_ERROR)
+ << "InternalCreateDataChannel: Data is not supported in this call.";
+ return nullptr;
+ }
+ InternalDataChannelInit new_config =
+ config ? (*config) : InternalDataChannelInit();
+ if (session_->data_channel_type() == cricket::DCT_SCTP) {
+ if (new_config.id < 0) {
+ rtc::SSLRole role;
+ if (session_->GetSslRole(&role) &&
+ !sid_allocator_.AllocateSid(role, &new_config.id)) {
+ LOG(LS_ERROR) << "No id can be allocated for the SCTP data channel.";
+ return nullptr;
+ }
+ } else if (!sid_allocator_.ReserveSid(new_config.id)) {
+ LOG(LS_ERROR) << "Failed to create a SCTP data channel "
+ << "because the id is already in use or out of range.";
+ return nullptr;
+ }
+ }
+
+ rtc::scoped_refptr<DataChannel> channel(DataChannel::Create(
+ session_.get(), session_->data_channel_type(), label, new_config));
+ if (!channel) {
+ sid_allocator_.ReleaseSid(new_config.id);
+ return nullptr;
+ }
+
+ if (channel->data_channel_type() == cricket::DCT_RTP) {
+ if (rtp_data_channels_.find(channel->label()) != rtp_data_channels_.end()) {
+ LOG(LS_ERROR) << "DataChannel with label " << channel->label()
+ << " already exists.";
+ return nullptr;
+ }
+ rtp_data_channels_[channel->label()] = channel;
+ } else {
+ RTC_DCHECK(channel->data_channel_type() == cricket::DCT_SCTP);
+ sctp_data_channels_.push_back(channel);
+ channel->SignalClosed.connect(this,
+ &PeerConnection::OnSctpDataChannelClosed);
+ }
+
+ return channel;
+}
+
+bool PeerConnection::HasDataChannels() const {
+ return !rtp_data_channels_.empty() || !sctp_data_channels_.empty();
+}
+
+void PeerConnection::AllocateSctpSids(rtc::SSLRole role) {
+ for (const auto& channel : sctp_data_channels_) {
+ if (channel->id() < 0) {
+ int sid;
+ if (!sid_allocator_.AllocateSid(role, &sid)) {
+ LOG(LS_ERROR) << "Failed to allocate SCTP sid.";
+ continue;
+ }
+ channel->SetSctpSid(sid);
+ }
+ }
+}
+
+void PeerConnection::OnSctpDataChannelClosed(DataChannel* channel) {
+ for (auto it = sctp_data_channels_.begin(); it != sctp_data_channels_.end();
+ ++it) {
+ if (it->get() == channel) {
+ if (channel->id() >= 0) {
+ sid_allocator_.ReleaseSid(channel->id());
+ }
+ sctp_data_channels_.erase(it);
+ return;
+ }
+ }
+}
+
+void PeerConnection::OnVoiceChannelDestroyed() {
+ EndRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
+}
+
+void PeerConnection::OnVideoChannelDestroyed() {
+ EndRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
+}
+
+void PeerConnection::OnDataChannelCreated() {
+ for (const auto& channel : sctp_data_channels_) {
+ channel->OnTransportChannelCreated();
+ }
+}
+
+void PeerConnection::OnDataChannelDestroyed() {
+ // Use a temporary copy of the RTP/SCTP DataChannel list because the
+ // DataChannel may callback to us and try to modify the list.
+ std::map<std::string, rtc::scoped_refptr<DataChannel>> temp_rtp_dcs;
+ temp_rtp_dcs.swap(rtp_data_channels_);
+ for (const auto& kv : temp_rtp_dcs) {
+ kv.second->OnTransportChannelDestroyed();
+ }
+
+ std::vector<rtc::scoped_refptr<DataChannel>> temp_sctp_dcs;
+ temp_sctp_dcs.swap(sctp_data_channels_);
+ for (const auto& channel : temp_sctp_dcs) {
+ channel->OnTransportChannelDestroyed();
+ }
+}
+
+void PeerConnection::OnDataChannelOpenMessage(
+ const std::string& label,
+ const InternalDataChannelInit& config) {
+ rtc::scoped_refptr<DataChannel> channel(
+ InternalCreateDataChannel(label, &config));
+ if (!channel.get()) {
+ LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message.";
+ return;
+ }
+
+ observer_->OnDataChannel(
+ DataChannelProxy::Create(signaling_thread(), channel));
+}
+
+std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
+PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) {
+ return std::find_if(
+ senders_.begin(), senders_.end(),
+ [track](const rtc::scoped_refptr<RtpSenderInterface>& sender) {
+ return sender->track() == track;
+ });
+}
+
+std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
+PeerConnection::FindReceiverForTrack(MediaStreamTrackInterface* track) {
+ return std::find_if(
+ receivers_.begin(), receivers_.end(),
+ [track](const rtc::scoped_refptr<RtpReceiverInterface>& receiver) {
+ return receiver->track() == track;
+ });
+}
+
+PeerConnection::TrackInfos* PeerConnection::GetRemoteTracks(
+ cricket::MediaType media_type) {
+ RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO);
+ return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &remote_audio_tracks_
+ : &remote_video_tracks_;
+}
+
+PeerConnection::TrackInfos* PeerConnection::GetLocalTracks(
+ cricket::MediaType media_type) {
+ RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO);
+ return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &local_audio_tracks_
+ : &local_video_tracks_;
+}
+
+const PeerConnection::TrackInfo* PeerConnection::FindTrackInfo(
+ const PeerConnection::TrackInfos& infos,
+ const std::string& stream_label,
+ const std::string track_id) const {
+ for (const TrackInfo& track_info : infos) {
+ if (track_info.stream_label == stream_label &&
+ track_info.track_id == track_id) {
+ return &track_info;
+ }
+ }
+ return nullptr;
+}
+
+DataChannel* PeerConnection::FindDataChannelBySid(int sid) const {
+ for (const auto& channel : sctp_data_channels_) {
+ if (channel->id() == sid) {
+ return channel;
+ }
+ }
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/peerconnection.h b/talk/app/webrtc/peerconnection.h
new file mode 100644
index 0000000000..2d388ae9f9
--- /dev/null
+++ b/talk/app/webrtc/peerconnection.h
@@ -0,0 +1,396 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_PEERCONNECTION_H_
+#define TALK_APP_WEBRTC_PEERCONNECTION_H_
+
+#include <string>
+
+#include "talk/app/webrtc/dtlsidentitystore.h"
+#include "talk/app/webrtc/peerconnectionfactory.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/rtpreceiverinterface.h"
+#include "talk/app/webrtc/rtpsenderinterface.h"
+#include "talk/app/webrtc/statscollector.h"
+#include "talk/app/webrtc/streamcollection.h"
+#include "talk/app/webrtc/webrtcsession.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace webrtc {
+
+class RemoteMediaStreamFactory;
+
+typedef std::vector<PortAllocatorFactoryInterface::StunConfiguration>
+ StunConfigurations;
+typedef std::vector<PortAllocatorFactoryInterface::TurnConfiguration>
+ TurnConfigurations;
+
+// Populates |session_options| from |rtc_options|, and returns true if options
+// are valid.
+bool ConvertRtcOptionsForOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+ cricket::MediaSessionOptions* session_options);
+
+// Populates |session_options| from |constraints|, and returns true if all
+// mandatory constraints are satisfied.
+bool ParseConstraintsForAnswer(const MediaConstraintsInterface* constraints,
+ cricket::MediaSessionOptions* session_options);
+
+// Parses the URLs for each server in |servers| to build |stun_config| and
+// |turn_config|.
+bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
+ StunConfigurations* stun_config,
+ TurnConfigurations* turn_config);
+
+// PeerConnection implements the PeerConnectionInterface interface.
+// It uses WebRtcSession to implement the PeerConnection functionality.
+class PeerConnection : public PeerConnectionInterface,
+ public IceObserver,
+ public rtc::MessageHandler,
+ public sigslot::has_slots<> {
+ public:
+ explicit PeerConnection(PeerConnectionFactory* factory);
+
+ bool Initialize(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ const MediaConstraintsInterface* constraints,
+ PortAllocatorFactoryInterface* allocator_factory,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ PeerConnectionObserver* observer);
+ rtc::scoped_refptr<StreamCollectionInterface> local_streams() override;
+ rtc::scoped_refptr<StreamCollectionInterface> remote_streams() override;
+ bool AddStream(MediaStreamInterface* local_stream) override;
+ void RemoveStream(MediaStreamInterface* local_stream) override;
+
+ virtual WebRtcSession* session() { return session_.get(); }
+
+ rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
+ AudioTrackInterface* track) override;
+
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
+ const override;
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
+ const override;
+
+ rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ const std::string& label,
+ const DataChannelInit* config) override;
+ bool GetStats(StatsObserver* observer,
+ webrtc::MediaStreamTrackInterface* track,
+ StatsOutputLevel level) override;
+
+ SignalingState signaling_state() override;
+
+ // TODO(bemasc): Remove ice_state() when callers are removed.
+ IceState ice_state() override;
+ IceConnectionState ice_connection_state() override;
+ IceGatheringState ice_gathering_state() override;
+
+ const SessionDescriptionInterface* local_description() const override;
+ const SessionDescriptionInterface* remote_description() const override;
+
+ // JSEP01
+ void CreateOffer(CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints) override;
+ void CreateOffer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) override;
+ void CreateAnswer(CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints) override;
+ void SetLocalDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) override;
+ void SetRemoteDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) override;
+ bool SetConfiguration(
+ const PeerConnectionInterface::RTCConfiguration& config) override;
+ bool AddIceCandidate(const IceCandidateInterface* candidate) override;
+
+ void RegisterUMAObserver(UMAObserver* observer) override;
+
+ void Close() override;
+
+ // Virtual for unit tests.
+ virtual const std::vector<rtc::scoped_refptr<DataChannel>>&
+ sctp_data_channels() const {
+ return sctp_data_channels_;
+ };
+
+ protected:
+ ~PeerConnection() override;
+
+ private:
+ struct TrackInfo {
+ TrackInfo() : ssrc(0) {}
+ TrackInfo(const std::string& stream_label,
+ const std::string track_id,
+ uint32_t ssrc)
+ : stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
+ std::string stream_label;
+ std::string track_id;
+ uint32_t ssrc;
+ };
+ typedef std::vector<TrackInfo> TrackInfos;
+
+ struct RemotePeerInfo {
+ RemotePeerInfo()
+ : msid_supported(false),
+ default_audio_track_needed(false),
+ default_video_track_needed(false) {}
+ // True if it has been discovered that the remote peer support MSID.
+ bool msid_supported;
+ // The remote peer indicates in the session description that audio will be
+ // sent but no MSID is given.
+ bool default_audio_track_needed;
+ // The remote peer indicates in the session description that video will be
+ // sent but no MSID is given.
+ bool default_video_track_needed;
+
+ bool IsDefaultMediaStreamNeeded() {
+ return !msid_supported &&
+ (default_audio_track_needed || default_video_track_needed);
+ }
+ };
+
+ // Implements MessageHandler.
+ void OnMessage(rtc::Message* msg) override;
+
+ void CreateAudioReceiver(MediaStreamInterface* stream,
+ AudioTrackInterface* audio_track,
+ uint32_t ssrc);
+ void CreateVideoReceiver(MediaStreamInterface* stream,
+ VideoTrackInterface* video_track,
+ uint32_t ssrc);
+ void DestroyAudioReceiver(MediaStreamInterface* stream,
+ AudioTrackInterface* audio_track);
+ void DestroyVideoReceiver(MediaStreamInterface* stream,
+ VideoTrackInterface* video_track);
+ void CreateAudioSender(MediaStreamInterface* stream,
+ AudioTrackInterface* audio_track,
+ uint32_t ssrc);
+ void CreateVideoSender(MediaStreamInterface* stream,
+ VideoTrackInterface* video_track,
+ uint32_t ssrc);
+ void DestroyAudioSender(MediaStreamInterface* stream,
+ AudioTrackInterface* audio_track,
+ uint32_t ssrc);
+ void DestroyVideoSender(MediaStreamInterface* stream,
+ VideoTrackInterface* video_track);
+
+ // Implements IceObserver
+ void OnIceConnectionChange(IceConnectionState new_state) override;
+ void OnIceGatheringChange(IceGatheringState new_state) override;
+ void OnIceCandidate(const IceCandidateInterface* candidate) override;
+ void OnIceComplete() override;
+ void OnIceConnectionReceivingChange(bool receiving) override;
+
+ // Signals from WebRtcSession.
+ void OnSessionStateChange(WebRtcSession* session, WebRtcSession::State state);
+ void ChangeSignalingState(SignalingState signaling_state);
+
+ rtc::Thread* signaling_thread() const {
+ return factory_->signaling_thread();
+ }
+
+ void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer,
+ const std::string& error);
+ void PostCreateSessionDescriptionFailure(
+ CreateSessionDescriptionObserver* observer,
+ const std::string& error);
+
+ bool IsClosed() const {
+ return signaling_state_ == PeerConnectionInterface::kClosed;
+ }
+
+ // Returns a MediaSessionOptions struct with options decided by |options|,
+ // the local MediaStreams and DataChannels.
+ virtual bool GetOptionsForOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+ cricket::MediaSessionOptions* session_options);
+
+ // Returns a MediaSessionOptions struct with options decided by
+ // |constraints|, the local MediaStreams and DataChannels.
+ virtual bool GetOptionsForAnswer(
+ const MediaConstraintsInterface* constraints,
+ cricket::MediaSessionOptions* session_options);
+
+ // Makes sure a MediaStream Track is created for each StreamParam in
+ // |streams|. |media_type| is the type of the |streams| and can be either
+ // audio or video.
+ // If a new MediaStream is created it is added to |new_streams|.
+ void UpdateRemoteStreamsList(
+ const std::vector<cricket::StreamParams>& streams,
+ cricket::MediaType media_type,
+ StreamCollection* new_streams);
+
+ // Triggered when a remote track has been seen for the first time in a remote
+ // session description. It creates a remote MediaStreamTrackInterface
+ // implementation and triggers CreateAudioReceiver or CreateVideoReceiver.
+ void OnRemoteTrackSeen(const std::string& stream_label,
+ const std::string& track_id,
+ uint32_t ssrc,
+ cricket::MediaType media_type);
+
+ // Triggered when a remote track has been removed from a remote session
+ // description. It removes the remote track with id |track_id| from a remote
+ // MediaStream and triggers DestroyAudioReceiver or DestroyVideoReceiver.
+ void OnRemoteTrackRemoved(const std::string& stream_label,
+ const std::string& track_id,
+ cricket::MediaType media_type);
+
+ // Finds remote MediaStreams without any tracks and removes them from
+ // |remote_streams_| and notifies the observer that the MediaStreams no longer
+ // exist.
+ void UpdateEndedRemoteMediaStreams();
+
+ void MaybeCreateDefaultStream();
+
+ // Set the MediaStreamTrackInterface::TrackState to |kEnded| on all remote
+ // tracks of type |media_type|.
+ void EndRemoteTracks(cricket::MediaType media_type);
+
+ // Loops through the vector of |streams| and finds added and removed
+ // StreamParams since last time this method was called.
+ // For each new or removed StreamParam, OnLocalTrackSeen or
+ // OnLocalTrackRemoved is invoked.
+ void UpdateLocalTracks(const std::vector<cricket::StreamParams>& streams,
+ cricket::MediaType media_type);
+
+ // Triggered when a local track has been seen for the first time in a local
+ // session description.
+ // This method triggers CreateAudioSender or CreateVideoSender if the rtp
+ // streams in the local SessionDescription can be mapped to a MediaStreamTrack
+ // in a MediaStream in |local_streams_|
+ void OnLocalTrackSeen(const std::string& stream_label,
+ const std::string& track_id,
+ uint32_t ssrc,
+ cricket::MediaType media_type);
+
+ // Triggered when a local track has been removed from a local session
+ // description.
+ // This method triggers DestroyAudioSender or DestroyVideoSender if a stream
+ // has been removed from the local SessionDescription and the stream can be
+ // mapped to a MediaStreamTrack in a MediaStream in |local_streams_|.
+ void OnLocalTrackRemoved(const std::string& stream_label,
+ const std::string& track_id,
+ uint32_t ssrc,
+ cricket::MediaType media_type);
+
+ void UpdateLocalRtpDataChannels(const cricket::StreamParamsVec& streams);
+ void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams);
+ void UpdateClosingRtpDataChannels(
+ const std::vector<std::string>& active_channels,
+ bool is_local_update);
+ void CreateRemoteRtpDataChannel(const std::string& label,
+ uint32_t remote_ssrc);
+
+ // Creates channel and adds it to the collection of DataChannels that will
+ // be offered in a SessionDescription.
+ rtc::scoped_refptr<DataChannel> InternalCreateDataChannel(
+ const std::string& label,
+ const InternalDataChannelInit* config);
+
+ // Checks if any data channel has been added.
+ bool HasDataChannels() const;
+
+ void AllocateSctpSids(rtc::SSLRole role);
+ void OnSctpDataChannelClosed(DataChannel* channel);
+
+ // Notifications from WebRtcSession relating to BaseChannels.
+ void OnVoiceChannelDestroyed();
+ void OnVideoChannelDestroyed();
+ void OnDataChannelCreated();
+ void OnDataChannelDestroyed();
+ // Called when the cricket::DataChannel receives a message indicating that a
+ // webrtc::DataChannel should be opened.
+ void OnDataChannelOpenMessage(const std::string& label,
+ const InternalDataChannelInit& config);
+
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
+ FindSenderForTrack(MediaStreamTrackInterface* track);
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
+ FindReceiverForTrack(MediaStreamTrackInterface* track);
+
+ TrackInfos* GetRemoteTracks(cricket::MediaType media_type);
+ TrackInfos* GetLocalTracks(cricket::MediaType media_type);
+ const TrackInfo* FindTrackInfo(const TrackInfos& infos,
+ const std::string& stream_label,
+ const std::string track_id) const;
+
+ // Returns the specified SCTP DataChannel in sctp_data_channels_,
+ // or nullptr if not found.
+ DataChannel* FindDataChannelBySid(int sid) const;
+
+ // Storing the factory as a scoped reference pointer ensures that the memory
+ // in the PeerConnectionFactoryImpl remains available as long as the
+ // PeerConnection is running. It is passed to PeerConnection as a raw pointer.
+ // However, since the reference counting is done in the
+ // PeerConnectionFactoryInterface all instances created using the raw pointer
+ // will refer to the same reference count.
+ rtc::scoped_refptr<PeerConnectionFactory> factory_;
+ PeerConnectionObserver* observer_;
+ UMAObserver* uma_observer_;
+ SignalingState signaling_state_;
+ // TODO(bemasc): Remove ice_state_.
+ IceState ice_state_;
+ IceConnectionState ice_connection_state_;
+ IceGatheringState ice_gathering_state_;
+
+ rtc::scoped_ptr<cricket::PortAllocator> port_allocator_;
+ rtc::scoped_ptr<MediaControllerInterface> media_controller_;
+
+ // Streams added via AddStream.
+ rtc::scoped_refptr<StreamCollection> local_streams_;
+ // Streams created as a result of SetRemoteDescription.
+ rtc::scoped_refptr<StreamCollection> remote_streams_;
+
+ // These lists store track info seen in local/remote descriptions.
+ TrackInfos remote_audio_tracks_;
+ TrackInfos remote_video_tracks_;
+ TrackInfos local_audio_tracks_;
+ TrackInfos local_video_tracks_;
+
+ SctpSidAllocator sid_allocator_;
+ // label -> DataChannel
+ std::map<std::string, rtc::scoped_refptr<DataChannel>> rtp_data_channels_;
+ std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_;
+
+ RemotePeerInfo remote_info_;
+ rtc::scoped_ptr<RemoteMediaStreamFactory> remote_stream_factory_;
+
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders_;
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers_;
+
+ // The session_ scoped_ptr is declared at the bottom of PeerConnection
+ // because its destruction fires signals (such as VoiceChannelDestroyed)
+ // which will trigger some final actions in PeerConnection...
+ rtc::scoped_ptr<WebRtcSession> session_;
+ // ... But stats_ depends on session_ so it should be destroyed even earlier.
+ rtc::scoped_ptr<StatsCollector> stats_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_PEERCONNECTION_H_
diff --git a/talk/app/webrtc/peerconnection_unittest.cc b/talk/app/webrtc/peerconnection_unittest.cc
new file mode 100644
index 0000000000..3cf66d64d8
--- /dev/null
+++ b/talk/app/webrtc/peerconnection_unittest.cc
@@ -0,0 +1,1752 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <list>
+#include <map>
+#include <vector>
+
+#include "talk/app/webrtc/dtmfsender.h"
+#include "talk/app/webrtc/fakemetricsobserver.h"
+#include "talk/app/webrtc/fakeportallocatorfactory.h"
+#include "talk/app/webrtc/localaudiosource.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/peerconnectionfactory.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/test/fakeaudiocapturemodule.h"
+#include "talk/app/webrtc/test/fakeconstraints.h"
+#include "talk/app/webrtc/test/fakedtlsidentitystore.h"
+#include "talk/app/webrtc/test/fakeperiodicvideocapturer.h"
+#include "talk/app/webrtc/test/fakevideotrackrenderer.h"
+#include "talk/app/webrtc/test/mockpeerconnectionobservers.h"
+#include "talk/app/webrtc/videosourceinterface.h"
+#include "talk/media/webrtc/fakewebrtcvideoengine.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/virtualsocketserver.h"
+#include "webrtc/p2p/base/constants.h"
+#include "webrtc/p2p/base/sessiondescription.h"
+
+#define MAYBE_SKIP_TEST(feature) \
+ if (!(feature())) { \
+ LOG(LS_INFO) << "Feature disabled... skipping"; \
+ return; \
+ }
+
+using cricket::ContentInfo;
+using cricket::FakeWebRtcVideoDecoder;
+using cricket::FakeWebRtcVideoDecoderFactory;
+using cricket::FakeWebRtcVideoEncoder;
+using cricket::FakeWebRtcVideoEncoderFactory;
+using cricket::MediaContentDescription;
+using webrtc::DataBuffer;
+using webrtc::DataChannelInterface;
+using webrtc::DtmfSender;
+using webrtc::DtmfSenderInterface;
+using webrtc::DtmfSenderObserverInterface;
+using webrtc::FakeConstraints;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::MockCreateSessionDescriptionObserver;
+using webrtc::MockDataChannelObserver;
+using webrtc::MockSetSessionDescriptionObserver;
+using webrtc::MockStatsObserver;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionFactory;
+using webrtc::SessionDescriptionInterface;
+using webrtc::StreamCollectionInterface;
+
+static const int kMaxWaitMs = 10000;
+// Disable for TSan v2, see
+// https://code.google.com/p/webrtc/issues/detail?id=1205 for details.
+// This declaration is also #ifdef'd as it causes uninitialized-variable
+// warnings.
+#if !defined(THREAD_SANITIZER)
+static const int kMaxWaitForStatsMs = 3000;
+#endif
+static const int kMaxWaitForFramesMs = 10000;
+static const int kEndAudioFrameCount = 3;
+static const int kEndVideoFrameCount = 3;
+
+static const char kStreamLabelBase[] = "stream_label";
+static const char kVideoTrackLabelBase[] = "video_track";
+static const char kAudioTrackLabelBase[] = "audio_track";
+static const char kDataChannelLabel[] = "data_channel";
+
+// Disable for TSan v2, see
+// https://code.google.com/p/webrtc/issues/detail?id=1205 for details.
+// This declaration is also #ifdef'd as it causes unused-variable errors.
+#if !defined(THREAD_SANITIZER)
+// SRTP cipher name negotiated by the tests. This must be updated if the
+// default changes.
+static const char kDefaultSrtpCipher[] = "AES_CM_128_HMAC_SHA1_32";
+#endif
+
+static void RemoveLinesFromSdp(const std::string& line_start,
+ std::string* sdp) {
+ const char kSdpLineEnd[] = "\r\n";
+ size_t ssrc_pos = 0;
+ while ((ssrc_pos = sdp->find(line_start, ssrc_pos)) !=
+ std::string::npos) {
+ size_t end_ssrc = sdp->find(kSdpLineEnd, ssrc_pos);
+ sdp->erase(ssrc_pos, end_ssrc - ssrc_pos + strlen(kSdpLineEnd));
+ }
+}
+
+class SignalingMessageReceiver {
+ public:
+ virtual void ReceiveSdpMessage(const std::string& type,
+ std::string& msg) = 0;
+ virtual void ReceiveIceMessage(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) = 0;
+
+ protected:
+ SignalingMessageReceiver() {}
+ virtual ~SignalingMessageReceiver() {}
+};
+
+class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
+ public SignalingMessageReceiver {
+ public:
+ static PeerConnectionTestClient* CreateClient(
+ const std::string& id,
+ const MediaConstraintsInterface* constraints,
+ const PeerConnectionFactory::Options* options) {
+ PeerConnectionTestClient* client(new PeerConnectionTestClient(id));
+ if (!client->Init(constraints, options)) {
+ delete client;
+ return nullptr;
+ }
+ return client;
+ }
+
+ ~PeerConnectionTestClient() {
+ while (!fake_video_renderers_.empty()) {
+ RenderMap::iterator it = fake_video_renderers_.begin();
+ delete it->second;
+ fake_video_renderers_.erase(it);
+ }
+ }
+
+ void Negotiate() { Negotiate(true, true); }
+
+ void Negotiate(bool audio, bool video) {
+ rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(offer.use()));
+
+ if (offer->description()->GetContentByName("audio")) {
+ offer->description()->GetContentByName("audio")->rejected = !audio;
+ }
+ if (offer->description()->GetContentByName("video")) {
+ offer->description()->GetContentByName("video")->rejected = !video;
+ }
+
+ std::string sdp;
+ EXPECT_TRUE(offer->ToString(&sdp));
+ EXPECT_TRUE(DoSetLocalDescription(offer.release()));
+ signaling_message_receiver_->ReceiveSdpMessage(
+ webrtc::SessionDescriptionInterface::kOffer, sdp);
+ }
+
+ // SignalingMessageReceiver callback.
+ void ReceiveSdpMessage(const std::string& type, std::string& msg) override {
+ FilterIncomingSdpMessage(&msg);
+ if (type == webrtc::SessionDescriptionInterface::kOffer) {
+ HandleIncomingOffer(msg);
+ } else {
+ HandleIncomingAnswer(msg);
+ }
+ }
+
+ // SignalingMessageReceiver callback.
+ void ReceiveIceMessage(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) override {
+ LOG(INFO) << id_ << "ReceiveIceMessage";
+ rtc::scoped_ptr<webrtc::IceCandidateInterface> candidate(
+ webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, msg, nullptr));
+ EXPECT_TRUE(pc()->AddIceCandidate(candidate.get()));
+ }
+
+ // PeerConnectionObserver callbacks.
+ void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) override {
+ EXPECT_EQ(pc()->signaling_state(), new_state);
+ }
+ void OnAddStream(webrtc::MediaStreamInterface* media_stream) override {
+ for (size_t i = 0; i < media_stream->GetVideoTracks().size(); ++i) {
+ const std::string id = media_stream->GetVideoTracks()[i]->id();
+ ASSERT_TRUE(fake_video_renderers_.find(id) ==
+ fake_video_renderers_.end());
+ fake_video_renderers_[id] =
+ new webrtc::FakeVideoTrackRenderer(media_stream->GetVideoTracks()[i]);
+ }
+ }
+ void OnRemoveStream(webrtc::MediaStreamInterface* media_stream) override {}
+ void OnRenegotiationNeeded() override {}
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {
+ EXPECT_EQ(pc()->ice_connection_state(), new_state);
+ }
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override {
+ EXPECT_EQ(pc()->ice_gathering_state(), new_state);
+ }
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override {
+ LOG(INFO) << id_ << "OnIceCandidate";
+
+ std::string ice_sdp;
+ EXPECT_TRUE(candidate->ToString(&ice_sdp));
+ if (signaling_message_receiver_ == nullptr) {
+ // Remote party may be deleted.
+ return;
+ }
+ signaling_message_receiver_->ReceiveIceMessage(
+ candidate->sdp_mid(), candidate->sdp_mline_index(), ice_sdp);
+ }
+
+ void SetVideoConstraints(const webrtc::FakeConstraints& video_constraint) {
+ video_constraints_ = video_constraint;
+ }
+
+ void AddMediaStream(bool audio, bool video) {
+ std::string stream_label =
+ kStreamLabelBase +
+ rtc::ToString<int>(static_cast<int>(pc()->local_streams()->count()));
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ peer_connection_factory_->CreateLocalMediaStream(stream_label);
+
+ if (audio && can_receive_audio()) {
+ FakeConstraints constraints;
+ // Disable highpass filter so that we can get all the test audio frames.
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kHighpassFilter, false);
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ peer_connection_factory_->CreateAudioSource(&constraints);
+ // TODO(perkj): Test audio source when it is implemented. Currently audio
+ // always use the default input.
+ std::string label = stream_label + kAudioTrackLabelBase;
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ peer_connection_factory_->CreateAudioTrack(label, source));
+ stream->AddTrack(audio_track);
+ }
+ if (video && can_receive_video()) {
+ stream->AddTrack(CreateLocalVideoTrack(stream_label));
+ }
+
+ EXPECT_TRUE(pc()->AddStream(stream));
+ }
+
+ size_t NumberOfLocalMediaStreams() { return pc()->local_streams()->count(); }
+
+ bool SessionActive() {
+ return pc()->signaling_state() == webrtc::PeerConnectionInterface::kStable;
+ }
+
+ void set_signaling_message_receiver(
+ SignalingMessageReceiver* signaling_message_receiver) {
+ signaling_message_receiver_ = signaling_message_receiver;
+ }
+
+ void EnableVideoDecoderFactory() {
+ video_decoder_factory_enabled_ = true;
+ fake_video_decoder_factory_->AddSupportedVideoCodecType(
+ webrtc::kVideoCodecVP8);
+ }
+
+ void IceRestart() {
+ session_description_constraints_.SetMandatoryIceRestart(true);
+ SetExpectIceRestart(true);
+ }
+
+ void SetExpectIceRestart(bool expect_restart) {
+ expect_ice_restart_ = expect_restart;
+ }
+
+ bool ExpectIceRestart() const { return expect_ice_restart_; }
+
+ void SetReceiveAudioVideo(bool audio, bool video) {
+ SetReceiveAudio(audio);
+ SetReceiveVideo(video);
+ ASSERT_EQ(audio, can_receive_audio());
+ ASSERT_EQ(video, can_receive_video());
+ }
+
+ void SetReceiveAudio(bool audio) {
+ if (audio && can_receive_audio())
+ return;
+ session_description_constraints_.SetMandatoryReceiveAudio(audio);
+ }
+
+ void SetReceiveVideo(bool video) {
+ if (video && can_receive_video())
+ return;
+ session_description_constraints_.SetMandatoryReceiveVideo(video);
+ }
+
+ void RemoveMsidFromReceivedSdp(bool remove) { remove_msid_ = remove; }
+
+ void RemoveSdesCryptoFromReceivedSdp(bool remove) { remove_sdes_ = remove; }
+
+ void RemoveBundleFromReceivedSdp(bool remove) { remove_bundle_ = remove; }
+
+ bool can_receive_audio() {
+ bool value;
+ if (webrtc::FindConstraint(&session_description_constraints_,
+ MediaConstraintsInterface::kOfferToReceiveAudio,
+ &value, nullptr)) {
+ return value;
+ }
+ return true;
+ }
+
+ bool can_receive_video() {
+ bool value;
+ if (webrtc::FindConstraint(&session_description_constraints_,
+ MediaConstraintsInterface::kOfferToReceiveVideo,
+ &value, nullptr)) {
+ return value;
+ }
+ return true;
+ }
+
+ void OnIceComplete() override { LOG(INFO) << id_ << "OnIceComplete"; }
+
+ void OnDataChannel(DataChannelInterface* data_channel) override {
+ LOG(INFO) << id_ << "OnDataChannel";
+ data_channel_ = data_channel;
+ data_observer_.reset(new MockDataChannelObserver(data_channel));
+ }
+
+ void CreateDataChannel() {
+ data_channel_ = pc()->CreateDataChannel(kDataChannelLabel, nullptr);
+ ASSERT_TRUE(data_channel_.get() != nullptr);
+ data_observer_.reset(new MockDataChannelObserver(data_channel_));
+ }
+
+ DataChannelInterface* data_channel() { return data_channel_; }
+ const MockDataChannelObserver* data_observer() const {
+ return data_observer_.get();
+ }
+
+ webrtc::PeerConnectionInterface* pc() { return peer_connection_.get(); }
+
+ void StopVideoCapturers() {
+ for (std::vector<cricket::VideoCapturer*>::iterator it =
+ video_capturers_.begin();
+ it != video_capturers_.end(); ++it) {
+ (*it)->Stop();
+ }
+ }
+
+ bool AudioFramesReceivedCheck(int number_of_frames) const {
+ return number_of_frames <= fake_audio_capture_module_->frames_received();
+ }
+
+ bool VideoFramesReceivedCheck(int number_of_frames) {
+ if (video_decoder_factory_enabled_) {
+ const std::vector<FakeWebRtcVideoDecoder*>& decoders
+ = fake_video_decoder_factory_->decoders();
+ if (decoders.empty()) {
+ return number_of_frames <= 0;
+ }
+
+ for (std::vector<FakeWebRtcVideoDecoder*>::const_iterator
+ it = decoders.begin(); it != decoders.end(); ++it) {
+ if (number_of_frames > (*it)->GetNumFramesReceived()) {
+ return false;
+ }
+ }
+ return true;
+ } else {
+ if (fake_video_renderers_.empty()) {
+ return number_of_frames <= 0;
+ }
+
+ for (RenderMap::const_iterator it = fake_video_renderers_.begin();
+ it != fake_video_renderers_.end(); ++it) {
+ if (number_of_frames > it->second->num_rendered_frames()) {
+ return false;
+ }
+ }
+ return true;
+ }
+ }
+
+ // Verify the CreateDtmfSender interface
+ void VerifyDtmf() {
+ rtc::scoped_ptr<DummyDtmfObserver> observer(new DummyDtmfObserver());
+ rtc::scoped_refptr<DtmfSenderInterface> dtmf_sender;
+
+ // We can't create a DTMF sender with an invalid audio track or a non local
+ // track.
+ EXPECT_TRUE(peer_connection_->CreateDtmfSender(nullptr) == nullptr);
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> non_localtrack(
+ peer_connection_factory_->CreateAudioTrack("dummy_track", nullptr));
+ EXPECT_TRUE(peer_connection_->CreateDtmfSender(non_localtrack) == nullptr);
+
+ // We should be able to create a DTMF sender from a local track.
+ webrtc::AudioTrackInterface* localtrack =
+ peer_connection_->local_streams()->at(0)->GetAudioTracks()[0];
+ dtmf_sender = peer_connection_->CreateDtmfSender(localtrack);
+ EXPECT_TRUE(dtmf_sender.get() != nullptr);
+ dtmf_sender->RegisterObserver(observer.get());
+
+ // Test the DtmfSender object just created.
+ EXPECT_TRUE(dtmf_sender->CanInsertDtmf());
+ EXPECT_TRUE(dtmf_sender->InsertDtmf("1a", 100, 50));
+
+ // We don't need to verify that the DTMF tones are actually sent out because
+ // that is already covered by the tests of the lower level components.
+
+ EXPECT_TRUE_WAIT(observer->completed(), kMaxWaitMs);
+ std::vector<std::string> tones;
+ tones.push_back("1");
+ tones.push_back("a");
+ tones.push_back("");
+ observer->Verify(tones);
+
+ dtmf_sender->UnregisterObserver();
+ }
+
+ // Verifies that the SessionDescription have rejected the appropriate media
+ // content.
+ void VerifyRejectedMediaInSessionDescription() {
+ ASSERT_TRUE(peer_connection_->remote_description() != nullptr);
+ ASSERT_TRUE(peer_connection_->local_description() != nullptr);
+ const cricket::SessionDescription* remote_desc =
+ peer_connection_->remote_description()->description();
+ const cricket::SessionDescription* local_desc =
+ peer_connection_->local_description()->description();
+
+ const ContentInfo* remote_audio_content = GetFirstAudioContent(remote_desc);
+ if (remote_audio_content) {
+ const ContentInfo* audio_content =
+ GetFirstAudioContent(local_desc);
+ EXPECT_EQ(can_receive_audio(), !audio_content->rejected);
+ }
+
+ const ContentInfo* remote_video_content = GetFirstVideoContent(remote_desc);
+ if (remote_video_content) {
+ const ContentInfo* video_content =
+ GetFirstVideoContent(local_desc);
+ EXPECT_EQ(can_receive_video(), !video_content->rejected);
+ }
+ }
+
+ void VerifyLocalIceUfragAndPassword() {
+ ASSERT_TRUE(peer_connection_->local_description() != nullptr);
+ const cricket::SessionDescription* desc =
+ peer_connection_->local_description()->description();
+ const cricket::ContentInfos& contents = desc->contents();
+
+ for (size_t index = 0; index < contents.size(); ++index) {
+ if (contents[index].rejected)
+ continue;
+ const cricket::TransportDescription* transport_desc =
+ desc->GetTransportDescriptionByName(contents[index].name);
+
+ std::map<int, IceUfragPwdPair>::const_iterator ufragpair_it =
+ ice_ufrag_pwd_.find(static_cast<int>(index));
+ if (ufragpair_it == ice_ufrag_pwd_.end()) {
+ ASSERT_FALSE(ExpectIceRestart());
+ ice_ufrag_pwd_[static_cast<int>(index)] =
+ IceUfragPwdPair(transport_desc->ice_ufrag, transport_desc->ice_pwd);
+ } else if (ExpectIceRestart()) {
+ const IceUfragPwdPair& ufrag_pwd = ufragpair_it->second;
+ EXPECT_NE(ufrag_pwd.first, transport_desc->ice_ufrag);
+ EXPECT_NE(ufrag_pwd.second, transport_desc->ice_pwd);
+ } else {
+ const IceUfragPwdPair& ufrag_pwd = ufragpair_it->second;
+ EXPECT_EQ(ufrag_pwd.first, transport_desc->ice_ufrag);
+ EXPECT_EQ(ufrag_pwd.second, transport_desc->ice_pwd);
+ }
+ }
+ }
+
+ int GetAudioOutputLevelStats(webrtc::MediaStreamTrackInterface* track) {
+ rtc::scoped_refptr<MockStatsObserver>
+ observer(new rtc::RefCountedObject<MockStatsObserver>());
+ EXPECT_TRUE(peer_connection_->GetStats(
+ observer, track, PeerConnectionInterface::kStatsOutputLevelStandard));
+ EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
+ return observer->AudioOutputLevel();
+ }
+
+ int GetAudioInputLevelStats() {
+ rtc::scoped_refptr<MockStatsObserver>
+ observer(new rtc::RefCountedObject<MockStatsObserver>());
+ EXPECT_TRUE(peer_connection_->GetStats(
+ observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard));
+ EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
+ return observer->AudioInputLevel();
+ }
+
+ int GetBytesReceivedStats(webrtc::MediaStreamTrackInterface* track) {
+ rtc::scoped_refptr<MockStatsObserver>
+ observer(new rtc::RefCountedObject<MockStatsObserver>());
+ EXPECT_TRUE(peer_connection_->GetStats(
+ observer, track, PeerConnectionInterface::kStatsOutputLevelStandard));
+ EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
+ return observer->BytesReceived();
+ }
+
+ int GetBytesSentStats(webrtc::MediaStreamTrackInterface* track) {
+ rtc::scoped_refptr<MockStatsObserver>
+ observer(new rtc::RefCountedObject<MockStatsObserver>());
+ EXPECT_TRUE(peer_connection_->GetStats(
+ observer, track, PeerConnectionInterface::kStatsOutputLevelStandard));
+ EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
+ return observer->BytesSent();
+ }
+
+ int GetAvailableReceivedBandwidthStats() {
+ rtc::scoped_refptr<MockStatsObserver>
+ observer(new rtc::RefCountedObject<MockStatsObserver>());
+ EXPECT_TRUE(peer_connection_->GetStats(
+ observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard));
+ EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
+ int bw = observer->AvailableReceiveBandwidth();
+ return bw;
+ }
+
+ std::string GetDtlsCipherStats() {
+ rtc::scoped_refptr<MockStatsObserver>
+ observer(new rtc::RefCountedObject<MockStatsObserver>());
+ EXPECT_TRUE(peer_connection_->GetStats(
+ observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard));
+ EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
+ return observer->DtlsCipher();
+ }
+
+ std::string GetSrtpCipherStats() {
+ rtc::scoped_refptr<MockStatsObserver>
+ observer(new rtc::RefCountedObject<MockStatsObserver>());
+ EXPECT_TRUE(peer_connection_->GetStats(
+ observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard));
+ EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
+ return observer->SrtpCipher();
+ }
+
+ int rendered_width() {
+ EXPECT_FALSE(fake_video_renderers_.empty());
+ return fake_video_renderers_.empty() ? 1 :
+ fake_video_renderers_.begin()->second->width();
+ }
+
+ int rendered_height() {
+ EXPECT_FALSE(fake_video_renderers_.empty());
+ return fake_video_renderers_.empty() ? 1 :
+ fake_video_renderers_.begin()->second->height();
+ }
+
+ size_t number_of_remote_streams() {
+ if (!pc())
+ return 0;
+ return pc()->remote_streams()->count();
+ }
+
+ StreamCollectionInterface* remote_streams() {
+ if (!pc()) {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ return pc()->remote_streams();
+ }
+
+ StreamCollectionInterface* local_streams() {
+ if (!pc()) {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ return pc()->local_streams();
+ }
+
+ webrtc::PeerConnectionInterface::SignalingState signaling_state() {
+ return pc()->signaling_state();
+ }
+
+ webrtc::PeerConnectionInterface::IceConnectionState ice_connection_state() {
+ return pc()->ice_connection_state();
+ }
+
+ webrtc::PeerConnectionInterface::IceGatheringState ice_gathering_state() {
+ return pc()->ice_gathering_state();
+ }
+
+ private:
+ class DummyDtmfObserver : public DtmfSenderObserverInterface {
+ public:
+ DummyDtmfObserver() : completed_(false) {}
+
+ // Implements DtmfSenderObserverInterface.
+ void OnToneChange(const std::string& tone) override {
+ tones_.push_back(tone);
+ if (tone.empty()) {
+ completed_ = true;
+ }
+ }
+
+ void Verify(const std::vector<std::string>& tones) const {
+ ASSERT_TRUE(tones_.size() == tones.size());
+ EXPECT_TRUE(std::equal(tones.begin(), tones.end(), tones_.begin()));
+ }
+
+ bool completed() const { return completed_; }
+
+ private:
+ bool completed_;
+ std::vector<std::string> tones_;
+ };
+
+ explicit PeerConnectionTestClient(const std::string& id) : id_(id) {}
+
+ bool Init(const MediaConstraintsInterface* constraints,
+ const PeerConnectionFactory::Options* options) {
+ EXPECT_TRUE(!peer_connection_);
+ EXPECT_TRUE(!peer_connection_factory_);
+ allocator_factory_ = webrtc::FakePortAllocatorFactory::Create();
+ if (!allocator_factory_) {
+ return false;
+ }
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+
+ if (fake_audio_capture_module_ == nullptr) {
+ return false;
+ }
+ fake_video_decoder_factory_ = new FakeWebRtcVideoDecoderFactory();
+ fake_video_encoder_factory_ = new FakeWebRtcVideoEncoderFactory();
+ peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(),
+ fake_audio_capture_module_, fake_video_encoder_factory_,
+ fake_video_decoder_factory_);
+ if (!peer_connection_factory_) {
+ return false;
+ }
+ if (options) {
+ peer_connection_factory_->SetOptions(*options);
+ }
+ peer_connection_ = CreatePeerConnection(allocator_factory_.get(),
+ constraints);
+ return peer_connection_.get() != nullptr;
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface>
+ CreateLocalVideoTrack(const std::string stream_label) {
+ // Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
+ FakeConstraints source_constraints = video_constraints_;
+ source_constraints.SetMandatoryMaxFrameRate(10);
+
+ cricket::FakeVideoCapturer* fake_capturer =
+ new webrtc::FakePeriodicVideoCapturer();
+ video_capturers_.push_back(fake_capturer);
+ rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
+ peer_connection_factory_->CreateVideoSource(
+ fake_capturer, &source_constraints);
+ std::string label = stream_label + kVideoTrackLabelBase;
+ return peer_connection_factory_->CreateVideoTrack(label, source);
+ }
+
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> CreatePeerConnection(
+ webrtc::PortAllocatorFactoryInterface* factory,
+ const MediaConstraintsInterface* constraints) {
+ // CreatePeerConnection with IceServers.
+ webrtc::PeerConnectionInterface::IceServers ice_servers;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = "stun:stun.l.google.com:19302";
+ ice_servers.push_back(ice_server);
+
+ rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store(
+ rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
+ : nullptr);
+ return peer_connection_factory_->CreatePeerConnection(
+ ice_servers, constraints, factory, dtls_identity_store.Pass(), this);
+ }
+
+ void HandleIncomingOffer(const std::string& msg) {
+ LOG(INFO) << id_ << "HandleIncomingOffer ";
+ if (NumberOfLocalMediaStreams() == 0) {
+ // If we are not sending any streams ourselves it is time to add some.
+ AddMediaStream(true, true);
+ }
+ rtc::scoped_ptr<SessionDescriptionInterface> desc(
+ webrtc::CreateSessionDescription("offer", msg, nullptr));
+ EXPECT_TRUE(DoSetRemoteDescription(desc.release()));
+ rtc::scoped_ptr<SessionDescriptionInterface> answer;
+ EXPECT_TRUE(DoCreateAnswer(answer.use()));
+ std::string sdp;
+ EXPECT_TRUE(answer->ToString(&sdp));
+ EXPECT_TRUE(DoSetLocalDescription(answer.release()));
+ if (signaling_message_receiver_) {
+ signaling_message_receiver_->ReceiveSdpMessage(
+ webrtc::SessionDescriptionInterface::kAnswer, sdp);
+ }
+ }
+
+ void HandleIncomingAnswer(const std::string& msg) {
+ LOG(INFO) << id_ << "HandleIncomingAnswer";
+ rtc::scoped_ptr<SessionDescriptionInterface> desc(
+ webrtc::CreateSessionDescription("answer", msg, nullptr));
+ EXPECT_TRUE(DoSetRemoteDescription(desc.release()));
+ }
+
+ bool DoCreateOfferAnswer(SessionDescriptionInterface** desc,
+ bool offer) {
+ rtc::scoped_refptr<MockCreateSessionDescriptionObserver>
+ observer(new rtc::RefCountedObject<
+ MockCreateSessionDescriptionObserver>());
+ if (offer) {
+ pc()->CreateOffer(observer, &session_description_constraints_);
+ } else {
+ pc()->CreateAnswer(observer, &session_description_constraints_);
+ }
+ EXPECT_EQ_WAIT(true, observer->called(), kMaxWaitMs);
+ *desc = observer->release_desc();
+ if (observer->result() && ExpectIceRestart()) {
+ EXPECT_EQ(0u, (*desc)->candidates(0)->count());
+ }
+ return observer->result();
+ }
+
+ bool DoCreateOffer(SessionDescriptionInterface** desc) {
+ return DoCreateOfferAnswer(desc, true);
+ }
+
+ bool DoCreateAnswer(SessionDescriptionInterface** desc) {
+ return DoCreateOfferAnswer(desc, false);
+ }
+
+ bool DoSetLocalDescription(SessionDescriptionInterface* desc) {
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+ observer(new rtc::RefCountedObject<
+ MockSetSessionDescriptionObserver>());
+ LOG(INFO) << id_ << "SetLocalDescription ";
+ pc()->SetLocalDescription(observer, desc);
+ // Ignore the observer result. If we wait for the result with
+ // EXPECT_TRUE_WAIT, local ice candidates might be sent to the remote peer
+ // before the offer which is an error.
+ // The reason is that EXPECT_TRUE_WAIT uses
+ // rtc::Thread::Current()->ProcessMessages(1);
+ // ProcessMessages waits at least 1ms but processes all messages before
+ // returning. Since this test is synchronous and send messages to the remote
+ // peer whenever a callback is invoked, this can lead to messages being
+ // sent to the remote peer in the wrong order.
+ // TODO(perkj): Find a way to check the result without risking that the
+ // order of sent messages are changed. Ex- by posting all messages that are
+ // sent to the remote peer.
+ return true;
+ }
+
+ bool DoSetRemoteDescription(SessionDescriptionInterface* desc) {
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+ observer(new rtc::RefCountedObject<
+ MockSetSessionDescriptionObserver>());
+ LOG(INFO) << id_ << "SetRemoteDescription ";
+ pc()->SetRemoteDescription(observer, desc);
+ EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ return observer->result();
+ }
+
+ // This modifies all received SDP messages before they are processed.
+ void FilterIncomingSdpMessage(std::string* sdp) {
+ if (remove_msid_) {
+ const char kSdpSsrcAttribute[] = "a=ssrc:";
+ RemoveLinesFromSdp(kSdpSsrcAttribute, sdp);
+ const char kSdpMsidSupportedAttribute[] = "a=msid-semantic:";
+ RemoveLinesFromSdp(kSdpMsidSupportedAttribute, sdp);
+ }
+ if (remove_bundle_) {
+ const char kSdpBundleAttribute[] = "a=group:BUNDLE";
+ RemoveLinesFromSdp(kSdpBundleAttribute, sdp);
+ }
+ if (remove_sdes_) {
+ const char kSdpSdesCryptoAttribute[] = "a=crypto";
+ RemoveLinesFromSdp(kSdpSdesCryptoAttribute, sdp);
+ }
+ }
+
+ std::string id_;
+
+ rtc::scoped_refptr<webrtc::PortAllocatorFactoryInterface> allocator_factory_;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+ peer_connection_factory_;
+
+ typedef std::pair<std::string, std::string> IceUfragPwdPair;
+ std::map<int, IceUfragPwdPair> ice_ufrag_pwd_;
+ bool expect_ice_restart_ = false;
+
+ // Needed to keep track of number of frames send.
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+ // Needed to keep track of number of frames received.
+ typedef std::map<std::string, webrtc::FakeVideoTrackRenderer*> RenderMap;
+ RenderMap fake_video_renderers_;
+ // Needed to keep track of number of frames received when external decoder
+ // used.
+ FakeWebRtcVideoDecoderFactory* fake_video_decoder_factory_ = nullptr;
+ FakeWebRtcVideoEncoderFactory* fake_video_encoder_factory_ = nullptr;
+ bool video_decoder_factory_enabled_ = false;
+ webrtc::FakeConstraints video_constraints_;
+
+ // For remote peer communication.
+ SignalingMessageReceiver* signaling_message_receiver_ = nullptr;
+
+ // Store references to the video capturers we've created, so that we can stop
+ // them, if required.
+ std::vector<cricket::VideoCapturer*> video_capturers_;
+
+ webrtc::FakeConstraints session_description_constraints_;
+ bool remove_msid_ = false; // True if MSID should be removed in received SDP.
+ bool remove_bundle_ =
+ false; // True if bundle should be removed in received SDP.
+ bool remove_sdes_ =
+ false; // True if a=crypto should be removed in received SDP.
+
+ rtc::scoped_refptr<DataChannelInterface> data_channel_;
+ rtc::scoped_ptr<MockDataChannelObserver> data_observer_;
+};
+
+// TODO(deadbeef): Rename this to P2PTestConductor once the Linux memcheck and
+// Windows DrMemory Full bots' blacklists are updated.
+class JsepPeerConnectionP2PTestClient : public testing::Test {
+ public:
+ JsepPeerConnectionP2PTestClient()
+ : pss_(new rtc::PhysicalSocketServer),
+ ss_(new rtc::VirtualSocketServer(pss_.get())),
+ ss_scope_(ss_.get()) {}
+
+ bool SessionActive() {
+ return initiating_client_->SessionActive() &&
+ receiving_client_->SessionActive();
+ }
+
+ // Return true if the number of frames provided have been received or it is
+ // known that that will never occur (e.g. no frames will be sent or
+ // captured).
+ bool FramesNotPending(int audio_frames_to_receive,
+ int video_frames_to_receive) {
+ return VideoFramesReceivedCheck(video_frames_to_receive) &&
+ AudioFramesReceivedCheck(audio_frames_to_receive);
+ }
+ bool AudioFramesReceivedCheck(int frames_received) {
+ return initiating_client_->AudioFramesReceivedCheck(frames_received) &&
+ receiving_client_->AudioFramesReceivedCheck(frames_received);
+ }
+ bool VideoFramesReceivedCheck(int frames_received) {
+ return initiating_client_->VideoFramesReceivedCheck(frames_received) &&
+ receiving_client_->VideoFramesReceivedCheck(frames_received);
+ }
+ void VerifyDtmf() {
+ initiating_client_->VerifyDtmf();
+ receiving_client_->VerifyDtmf();
+ }
+
+ void TestUpdateOfferWithRejectedContent() {
+ initiating_client_->Negotiate(true, false);
+ EXPECT_TRUE_WAIT(
+ FramesNotPending(kEndAudioFrameCount * 2, kEndVideoFrameCount),
+ kMaxWaitForFramesMs);
+ // There shouldn't be any more video frame after the new offer is
+ // negotiated.
+ EXPECT_FALSE(VideoFramesReceivedCheck(kEndVideoFrameCount + 1));
+ }
+
+ void VerifyRenderedSize(int width, int height) {
+ EXPECT_EQ(width, receiving_client()->rendered_width());
+ EXPECT_EQ(height, receiving_client()->rendered_height());
+ EXPECT_EQ(width, initializing_client()->rendered_width());
+ EXPECT_EQ(height, initializing_client()->rendered_height());
+ }
+
+ void VerifySessionDescriptions() {
+ initiating_client_->VerifyRejectedMediaInSessionDescription();
+ receiving_client_->VerifyRejectedMediaInSessionDescription();
+ initiating_client_->VerifyLocalIceUfragAndPassword();
+ receiving_client_->VerifyLocalIceUfragAndPassword();
+ }
+
+ ~JsepPeerConnectionP2PTestClient() {
+ if (initiating_client_) {
+ initiating_client_->set_signaling_message_receiver(nullptr);
+ }
+ if (receiving_client_) {
+ receiving_client_->set_signaling_message_receiver(nullptr);
+ }
+ }
+
+ bool CreateTestClients() { return CreateTestClients(nullptr, nullptr); }
+
+ bool CreateTestClients(MediaConstraintsInterface* init_constraints,
+ MediaConstraintsInterface* recv_constraints) {
+ return CreateTestClients(init_constraints, nullptr, recv_constraints,
+ nullptr);
+ }
+
+ bool CreateTestClients(MediaConstraintsInterface* init_constraints,
+ PeerConnectionFactory::Options* init_options,
+ MediaConstraintsInterface* recv_constraints,
+ PeerConnectionFactory::Options* recv_options) {
+ initiating_client_.reset(PeerConnectionTestClient::CreateClient(
+ "Caller: ", init_constraints, init_options));
+ receiving_client_.reset(PeerConnectionTestClient::CreateClient(
+ "Callee: ", recv_constraints, recv_options));
+ if (!initiating_client_ || !receiving_client_) {
+ return false;
+ }
+ initiating_client_->set_signaling_message_receiver(receiving_client_.get());
+ receiving_client_->set_signaling_message_receiver(initiating_client_.get());
+ return true;
+ }
+
+ void SetVideoConstraints(const webrtc::FakeConstraints& init_constraints,
+ const webrtc::FakeConstraints& recv_constraints) {
+ initiating_client_->SetVideoConstraints(init_constraints);
+ receiving_client_->SetVideoConstraints(recv_constraints);
+ }
+
+ void EnableVideoDecoderFactory() {
+ initiating_client_->EnableVideoDecoderFactory();
+ receiving_client_->EnableVideoDecoderFactory();
+ }
+
+ // This test sets up a call between two parties. Both parties send static
+ // frames to each other. Once the test is finished the number of sent frames
+ // is compared to the number of received frames.
+ void LocalP2PTest() {
+ if (initiating_client_->NumberOfLocalMediaStreams() == 0) {
+ initiating_client_->AddMediaStream(true, true);
+ }
+ initiating_client_->Negotiate();
+ const int kMaxWaitForActivationMs = 5000;
+ // Assert true is used here since next tests are guaranteed to fail and
+ // would eat up 5 seconds.
+ ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
+ VerifySessionDescriptions();
+
+
+ int audio_frame_count = kEndAudioFrameCount;
+ // TODO(ronghuawu): Add test to cover the case of sendonly and recvonly.
+ if (!initiating_client_->can_receive_audio() ||
+ !receiving_client_->can_receive_audio()) {
+ audio_frame_count = -1;
+ }
+ int video_frame_count = kEndVideoFrameCount;
+ if (!initiating_client_->can_receive_video() ||
+ !receiving_client_->can_receive_video()) {
+ video_frame_count = -1;
+ }
+
+ if (audio_frame_count != -1 || video_frame_count != -1) {
+ // Audio or video is expected to flow, so both clients should reach the
+ // Connected state, and the offerer (ICE controller) should proceed to
+ // Completed.
+ // Note: These tests have been observed to fail under heavy load at
+ // shorter timeouts, so they may be flaky.
+ EXPECT_EQ_WAIT(
+ webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ initiating_client_->ice_connection_state(),
+ kMaxWaitForFramesMs);
+ EXPECT_EQ_WAIT(
+ webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ receiving_client_->ice_connection_state(),
+ kMaxWaitForFramesMs);
+ }
+
+ if (initiating_client_->can_receive_audio() ||
+ initiating_client_->can_receive_video()) {
+ // The initiating client can receive media, so it must produce candidates
+ // that will serve as destinations for that media.
+ // TODO(bemasc): Understand why the state is not already Complete here, as
+ // seems to be the case for the receiving client. This may indicate a bug
+ // in the ICE gathering system.
+ EXPECT_NE(webrtc::PeerConnectionInterface::kIceGatheringNew,
+ initiating_client_->ice_gathering_state());
+ }
+ if (receiving_client_->can_receive_audio() ||
+ receiving_client_->can_receive_video()) {
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete,
+ receiving_client_->ice_gathering_state(),
+ kMaxWaitForFramesMs);
+ }
+
+ EXPECT_TRUE_WAIT(FramesNotPending(audio_frame_count, video_frame_count),
+ kMaxWaitForFramesMs);
+ }
+
+ void SendRtpData(webrtc::DataChannelInterface* dc, const std::string& data) {
+ // Messages may get lost on the unreliable DataChannel, so we send multiple
+ // times to avoid test flakiness.
+ static const size_t kSendAttempts = 5;
+
+ for (size_t i = 0; i < kSendAttempts; ++i) {
+ dc->Send(DataBuffer(data));
+ }
+ }
+
+ PeerConnectionTestClient* initializing_client() {
+ return initiating_client_.get();
+ }
+ PeerConnectionTestClient* receiving_client() {
+ return receiving_client_.get();
+ }
+
+ private:
+ rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
+ rtc::scoped_ptr<rtc::VirtualSocketServer> ss_;
+ rtc::SocketServerScope ss_scope_;
+ rtc::scoped_ptr<PeerConnectionTestClient> initiating_client_;
+ rtc::scoped_ptr<PeerConnectionTestClient> receiving_client_;
+};
+
+// Disable for TSan v2, see
+// https://code.google.com/p/webrtc/issues/detail?id=1205 for details.
+#if !defined(THREAD_SANITIZER)
+
+// This test sets up a Jsep call between two parties and test Dtmf.
+// TODO(holmer): Disabled due to sometimes crashing on buildbots.
+// See issue webrtc/2378.
+TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestDtmf) {
+ ASSERT_TRUE(CreateTestClients());
+ LocalP2PTest();
+ VerifyDtmf();
+}
+
+// This test sets up a Jsep call between two parties and test that we can get a
+// video aspect ratio of 16:9.
+TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTest16To9) {
+ ASSERT_TRUE(CreateTestClients());
+ FakeConstraints constraint;
+ double requested_ratio = 640.0/360;
+ constraint.SetMandatoryMinAspectRatio(requested_ratio);
+ SetVideoConstraints(constraint, constraint);
+ LocalP2PTest();
+
+ ASSERT_LE(0, initializing_client()->rendered_height());
+ double initiating_video_ratio =
+ static_cast<double>(initializing_client()->rendered_width()) /
+ initializing_client()->rendered_height();
+ EXPECT_LE(requested_ratio, initiating_video_ratio);
+
+ ASSERT_LE(0, receiving_client()->rendered_height());
+ double receiving_video_ratio =
+ static_cast<double>(receiving_client()->rendered_width()) /
+ receiving_client()->rendered_height();
+ EXPECT_LE(requested_ratio, receiving_video_ratio);
+}
+
+// This test sets up a Jsep call between two parties and test that the
+// received video has a resolution of 1280*720.
+// TODO(mallinath): Enable when
+// http://code.google.com/p/webrtc/issues/detail?id=981 is fixed.
+TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTest1280By720) {
+ ASSERT_TRUE(CreateTestClients());
+ FakeConstraints constraint;
+ constraint.SetMandatoryMinWidth(1280);
+ constraint.SetMandatoryMinHeight(720);
+ SetVideoConstraints(constraint, constraint);
+ LocalP2PTest();
+ VerifyRenderedSize(1280, 720);
+}
+
+// This test sets up a call between two endpoints that are configured to use
+// DTLS key agreement. As a result, DTLS is negotiated and used for transport.
+TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestDtls) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ FakeConstraints setup_constraints;
+ setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+ LocalP2PTest();
+ VerifyRenderedSize(640, 480);
+}
+
+// This test sets up a audio call initially and then upgrades to audio/video,
+// using DTLS.
+TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestDtlsRenegotiate) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ FakeConstraints setup_constraints;
+ setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+ receiving_client()->SetReceiveAudioVideo(true, false);
+ LocalP2PTest();
+ receiving_client()->SetReceiveAudioVideo(true, true);
+ receiving_client()->Negotiate();
+}
+
+// This test sets up a call between two endpoints that are configured to use
+// DTLS key agreement. The offerer don't support SDES. As a result, DTLS is
+// negotiated and used for transport.
+TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestOfferDtlsButNotSdes) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ FakeConstraints setup_constraints;
+ setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+ receiving_client()->RemoveSdesCryptoFromReceivedSdp(true);
+ LocalP2PTest();
+ VerifyRenderedSize(640, 480);
+}
+
+// This test sets up a Jsep call between two parties, and the callee only
+// accept to receive video.
+TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerVideo) {
+ ASSERT_TRUE(CreateTestClients());
+ receiving_client()->SetReceiveAudioVideo(false, true);
+ LocalP2PTest();
+}
+
+// This test sets up a Jsep call between two parties, and the callee only
+// accept to receive audio.
+TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerAudio) {
+ ASSERT_TRUE(CreateTestClients());
+ receiving_client()->SetReceiveAudioVideo(true, false);
+ LocalP2PTest();
+}
+
+// This test sets up a Jsep call between two parties, and the callee reject both
+// audio and video.
+TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerNone) {
+ ASSERT_TRUE(CreateTestClients());
+ receiving_client()->SetReceiveAudioVideo(false, false);
+ LocalP2PTest();
+}
+
+// This test sets up an audio and video call between two parties. After the call
+// runs for a while (10 frames), the caller sends an update offer with video
+// being rejected. Once the re-negotiation is done, the video flow should stop
+// and the audio flow should continue.
+// Disabled due to b/14955157.
+TEST_F(JsepPeerConnectionP2PTestClient,
+ DISABLED_UpdateOfferWithRejectedContent) {
+ ASSERT_TRUE(CreateTestClients());
+ LocalP2PTest();
+ TestUpdateOfferWithRejectedContent();
+}
+
+// This test sets up a Jsep call between two parties. The MSID is removed from
+// the SDP strings from the caller.
+// Disabled due to b/14955157.
+TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestWithoutMsid) {
+ ASSERT_TRUE(CreateTestClients());
+ receiving_client()->RemoveMsidFromReceivedSdp(true);
+ // TODO(perkj): Currently there is a bug that cause audio to stop playing if
+ // audio and video is muxed when MSID is disabled. Remove
+ // SetRemoveBundleFromSdp once
+ // https://code.google.com/p/webrtc/issues/detail?id=1193 is fixed.
+ receiving_client()->RemoveBundleFromReceivedSdp(true);
+ LocalP2PTest();
+}
+
+// This test sets up a Jsep call between two parties and the initiating peer
+// sends two steams.
+// TODO(perkj): Disabled due to
+// https://code.google.com/p/webrtc/issues/detail?id=1454
+TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestTwoStreams) {
+ ASSERT_TRUE(CreateTestClients());
+ // Set optional video constraint to max 320pixels to decrease CPU usage.
+ FakeConstraints constraint;
+ constraint.SetOptionalMaxWidth(320);
+ SetVideoConstraints(constraint, constraint);
+ initializing_client()->AddMediaStream(true, true);
+ initializing_client()->AddMediaStream(false, true);
+ ASSERT_EQ(2u, initializing_client()->NumberOfLocalMediaStreams());
+ LocalP2PTest();
+ EXPECT_EQ(2u, receiving_client()->number_of_remote_streams());
+}
+
+// Test that we can receive the audio output level from a remote audio track.
+TEST_F(JsepPeerConnectionP2PTestClient, GetAudioOutputLevelStats) {
+ ASSERT_TRUE(CreateTestClients());
+ LocalP2PTest();
+
+ StreamCollectionInterface* remote_streams =
+ initializing_client()->remote_streams();
+ ASSERT_GT(remote_streams->count(), 0u);
+ ASSERT_GT(remote_streams->at(0)->GetAudioTracks().size(), 0u);
+ MediaStreamTrackInterface* remote_audio_track =
+ remote_streams->at(0)->GetAudioTracks()[0];
+
+ // Get the audio output level stats. Note that the level is not available
+ // until a RTCP packet has been received.
+ EXPECT_TRUE_WAIT(
+ initializing_client()->GetAudioOutputLevelStats(remote_audio_track) > 0,
+ kMaxWaitForStatsMs);
+}
+
+// Test that an audio input level is reported.
+TEST_F(JsepPeerConnectionP2PTestClient, GetAudioInputLevelStats) {
+ ASSERT_TRUE(CreateTestClients());
+ LocalP2PTest();
+
+ // Get the audio input level stats. The level should be available very
+ // soon after the test starts.
+ EXPECT_TRUE_WAIT(initializing_client()->GetAudioInputLevelStats() > 0,
+ kMaxWaitForStatsMs);
+}
+
+// Test that we can get incoming byte counts from both audio and video tracks.
+TEST_F(JsepPeerConnectionP2PTestClient, GetBytesReceivedStats) {
+ ASSERT_TRUE(CreateTestClients());
+ LocalP2PTest();
+
+ StreamCollectionInterface* remote_streams =
+ initializing_client()->remote_streams();
+ ASSERT_GT(remote_streams->count(), 0u);
+ ASSERT_GT(remote_streams->at(0)->GetAudioTracks().size(), 0u);
+ MediaStreamTrackInterface* remote_audio_track =
+ remote_streams->at(0)->GetAudioTracks()[0];
+ EXPECT_TRUE_WAIT(
+ initializing_client()->GetBytesReceivedStats(remote_audio_track) > 0,
+ kMaxWaitForStatsMs);
+
+ MediaStreamTrackInterface* remote_video_track =
+ remote_streams->at(0)->GetVideoTracks()[0];
+ EXPECT_TRUE_WAIT(
+ initializing_client()->GetBytesReceivedStats(remote_video_track) > 0,
+ kMaxWaitForStatsMs);
+}
+
+// Test that we can get outgoing byte counts from both audio and video tracks.
+TEST_F(JsepPeerConnectionP2PTestClient, GetBytesSentStats) {
+ ASSERT_TRUE(CreateTestClients());
+ LocalP2PTest();
+
+ StreamCollectionInterface* local_streams =
+ initializing_client()->local_streams();
+ ASSERT_GT(local_streams->count(), 0u);
+ ASSERT_GT(local_streams->at(0)->GetAudioTracks().size(), 0u);
+ MediaStreamTrackInterface* local_audio_track =
+ local_streams->at(0)->GetAudioTracks()[0];
+ EXPECT_TRUE_WAIT(
+ initializing_client()->GetBytesSentStats(local_audio_track) > 0,
+ kMaxWaitForStatsMs);
+
+ MediaStreamTrackInterface* local_video_track =
+ local_streams->at(0)->GetVideoTracks()[0];
+ EXPECT_TRUE_WAIT(
+ initializing_client()->GetBytesSentStats(local_video_track) > 0,
+ kMaxWaitForStatsMs);
+}
+
+// Test that DTLS 1.0 is used if both sides only support DTLS 1.0.
+TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12None) {
+ PeerConnectionFactory::Options init_options;
+ init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ PeerConnectionFactory::Options recv_options;
+ recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ ASSERT_TRUE(
+ CreateTestClients(nullptr, &init_options, nullptr, &recv_options));
+ rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+ init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+ initializing_client()->pc()->RegisterUMAObserver(init_observer);
+ LocalP2PTest();
+
+ EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+ rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)),
+ initializing_client()->GetDtlsCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(1, init_observer->GetEnumCounter(
+ webrtc::kEnumCounterAudioSslCipher,
+ rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+ rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)));
+
+ EXPECT_EQ_WAIT(kDefaultSrtpCipher,
+ initializing_client()->GetSrtpCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(1, init_observer->GetEnumCounter(
+ webrtc::kEnumCounterAudioSrtpCipher,
+ rtc::GetSrtpCryptoSuiteFromName(kDefaultSrtpCipher)));
+}
+
+// Test that DTLS 1.2 is used if both ends support it.
+TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Both) {
+ PeerConnectionFactory::Options init_options;
+ init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ PeerConnectionFactory::Options recv_options;
+ recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ ASSERT_TRUE(
+ CreateTestClients(nullptr, &init_options, nullptr, &recv_options));
+ rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+ init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+ initializing_client()->pc()->RegisterUMAObserver(init_observer);
+ LocalP2PTest();
+
+ EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+ rtc::SSL_PROTOCOL_DTLS_12, rtc::KT_DEFAULT)),
+ initializing_client()->GetDtlsCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(1, init_observer->GetEnumCounter(
+ webrtc::kEnumCounterAudioSslCipher,
+ rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+ rtc::SSL_PROTOCOL_DTLS_12, rtc::KT_DEFAULT)));
+
+ EXPECT_EQ_WAIT(kDefaultSrtpCipher,
+ initializing_client()->GetSrtpCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(1, init_observer->GetEnumCounter(
+ webrtc::kEnumCounterAudioSrtpCipher,
+ rtc::GetSrtpCryptoSuiteFromName(kDefaultSrtpCipher)));
+}
+
+// Test that DTLS 1.0 is used if the initator supports DTLS 1.2 and the
+// received supports 1.0.
+TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Init) {
+ PeerConnectionFactory::Options init_options;
+ init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ PeerConnectionFactory::Options recv_options;
+ recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ ASSERT_TRUE(
+ CreateTestClients(nullptr, &init_options, nullptr, &recv_options));
+ rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+ init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+ initializing_client()->pc()->RegisterUMAObserver(init_observer);
+ LocalP2PTest();
+
+ EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+ rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)),
+ initializing_client()->GetDtlsCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(1, init_observer->GetEnumCounter(
+ webrtc::kEnumCounterAudioSslCipher,
+ rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+ rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)));
+
+ EXPECT_EQ_WAIT(kDefaultSrtpCipher,
+ initializing_client()->GetSrtpCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(1, init_observer->GetEnumCounter(
+ webrtc::kEnumCounterAudioSrtpCipher,
+ rtc::GetSrtpCryptoSuiteFromName(kDefaultSrtpCipher)));
+}
+
+// Test that DTLS 1.0 is used if the initator supports DTLS 1.0 and the
+// received supports 1.2.
+TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Recv) {
+ PeerConnectionFactory::Options init_options;
+ init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ PeerConnectionFactory::Options recv_options;
+ recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ ASSERT_TRUE(
+ CreateTestClients(nullptr, &init_options, nullptr, &recv_options));
+ rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+ init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+ initializing_client()->pc()->RegisterUMAObserver(init_observer);
+ LocalP2PTest();
+
+ EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+ rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)),
+ initializing_client()->GetDtlsCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(1, init_observer->GetEnumCounter(
+ webrtc::kEnumCounterAudioSslCipher,
+ rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+ rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)));
+
+ EXPECT_EQ_WAIT(kDefaultSrtpCipher,
+ initializing_client()->GetSrtpCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(1, init_observer->GetEnumCounter(
+ webrtc::kEnumCounterAudioSrtpCipher,
+ rtc::GetSrtpCryptoSuiteFromName(kDefaultSrtpCipher)));
+}
+
+// This test sets up a call between two parties with audio, video and data.
+TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestDataChannel) {
+ FakeConstraints setup_constraints;
+ setup_constraints.SetAllowRtpDataChannels();
+ ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+ initializing_client()->CreateDataChannel();
+ LocalP2PTest();
+ ASSERT_TRUE(initializing_client()->data_channel() != nullptr);
+ ASSERT_TRUE(receiving_client()->data_channel() != nullptr);
+ EXPECT_TRUE_WAIT(initializing_client()->data_observer()->IsOpen(),
+ kMaxWaitMs);
+ EXPECT_TRUE_WAIT(receiving_client()->data_observer()->IsOpen(),
+ kMaxWaitMs);
+
+ std::string data = "hello world";
+
+ SendRtpData(initializing_client()->data_channel(), data);
+ EXPECT_EQ_WAIT(data, receiving_client()->data_observer()->last_message(),
+ kMaxWaitMs);
+
+ SendRtpData(receiving_client()->data_channel(), data);
+ EXPECT_EQ_WAIT(data, initializing_client()->data_observer()->last_message(),
+ kMaxWaitMs);
+
+ receiving_client()->data_channel()->Close();
+ // Send new offer and answer.
+ receiving_client()->Negotiate();
+ EXPECT_FALSE(initializing_client()->data_observer()->IsOpen());
+ EXPECT_FALSE(receiving_client()->data_observer()->IsOpen());
+}
+
+// This test sets up a call between two parties and creates a data channel.
+// The test tests that received data is buffered unless an observer has been
+// registered.
+// Rtp data channels can receive data before the underlying
+// transport has detected that a channel is writable and thus data can be
+// received before the data channel state changes to open. That is hard to test
+// but the same buffering is used in that case.
+TEST_F(JsepPeerConnectionP2PTestClient, RegisterDataChannelObserver) {
+ FakeConstraints setup_constraints;
+ setup_constraints.SetAllowRtpDataChannels();
+ ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+ initializing_client()->CreateDataChannel();
+ initializing_client()->Negotiate();
+
+ ASSERT_TRUE(initializing_client()->data_channel() != nullptr);
+ ASSERT_TRUE(receiving_client()->data_channel() != nullptr);
+ EXPECT_TRUE_WAIT(initializing_client()->data_observer()->IsOpen(),
+ kMaxWaitMs);
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen,
+ receiving_client()->data_channel()->state(), kMaxWaitMs);
+
+ // Unregister the existing observer.
+ receiving_client()->data_channel()->UnregisterObserver();
+
+ std::string data = "hello world";
+ SendRtpData(initializing_client()->data_channel(), data);
+
+ // Wait a while to allow the sent data to arrive before an observer is
+ // registered..
+ rtc::Thread::Current()->ProcessMessages(100);
+
+ MockDataChannelObserver new_observer(receiving_client()->data_channel());
+ EXPECT_EQ_WAIT(data, new_observer.last_message(), kMaxWaitMs);
+}
+
+// This test sets up a call between two parties with audio, video and but only
+// the initiating client support data.
+TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestReceiverDoesntSupportData) {
+ FakeConstraints setup_constraints_1;
+ setup_constraints_1.SetAllowRtpDataChannels();
+ // Must disable DTLS to make negotiation succeed.
+ setup_constraints_1.SetMandatory(
+ MediaConstraintsInterface::kEnableDtlsSrtp, false);
+ FakeConstraints setup_constraints_2;
+ setup_constraints_2.SetMandatory(
+ MediaConstraintsInterface::kEnableDtlsSrtp, false);
+ ASSERT_TRUE(CreateTestClients(&setup_constraints_1, &setup_constraints_2));
+ initializing_client()->CreateDataChannel();
+ LocalP2PTest();
+ EXPECT_TRUE(initializing_client()->data_channel() != nullptr);
+ EXPECT_FALSE(receiving_client()->data_channel());
+ EXPECT_FALSE(initializing_client()->data_observer()->IsOpen());
+}
+
+// This test sets up a call between two parties with audio, video. When audio
+// and video is setup and flowing and data channel is negotiated.
+TEST_F(JsepPeerConnectionP2PTestClient, AddDataChannelAfterRenegotiation) {
+ FakeConstraints setup_constraints;
+ setup_constraints.SetAllowRtpDataChannels();
+ ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+ LocalP2PTest();
+ initializing_client()->CreateDataChannel();
+ // Send new offer and answer.
+ initializing_client()->Negotiate();
+ ASSERT_TRUE(initializing_client()->data_channel() != nullptr);
+ ASSERT_TRUE(receiving_client()->data_channel() != nullptr);
+ EXPECT_TRUE_WAIT(initializing_client()->data_observer()->IsOpen(),
+ kMaxWaitMs);
+ EXPECT_TRUE_WAIT(receiving_client()->data_observer()->IsOpen(),
+ kMaxWaitMs);
+}
+
+// This test sets up a Jsep call with SCTP DataChannel and verifies the
+// negotiation is completed without error.
+#ifdef HAVE_SCTP
+TEST_F(JsepPeerConnectionP2PTestClient, CreateOfferWithSctpDataChannel) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ FakeConstraints constraints;
+ constraints.SetMandatory(
+ MediaConstraintsInterface::kEnableDtlsSrtp, true);
+ ASSERT_TRUE(CreateTestClients(&constraints, &constraints));
+ initializing_client()->CreateDataChannel();
+ initializing_client()->Negotiate(false, false);
+}
+#endif
+
+// This test sets up a call between two parties with audio, and video.
+// During the call, the initializing side restart ice and the test verifies that
+// new ice candidates are generated and audio and video still can flow.
+TEST_F(JsepPeerConnectionP2PTestClient, IceRestart) {
+ ASSERT_TRUE(CreateTestClients());
+
+ // Negotiate and wait for ice completion and make sure audio and video plays.
+ LocalP2PTest();
+
+ // Create a SDP string of the first audio candidate for both clients.
+ const webrtc::IceCandidateCollection* audio_candidates_initiator =
+ initializing_client()->pc()->local_description()->candidates(0);
+ const webrtc::IceCandidateCollection* audio_candidates_receiver =
+ receiving_client()->pc()->local_description()->candidates(0);
+ ASSERT_GT(audio_candidates_initiator->count(), 0u);
+ ASSERT_GT(audio_candidates_receiver->count(), 0u);
+ std::string initiator_candidate;
+ EXPECT_TRUE(
+ audio_candidates_initiator->at(0)->ToString(&initiator_candidate));
+ std::string receiver_candidate;
+ EXPECT_TRUE(audio_candidates_receiver->at(0)->ToString(&receiver_candidate));
+
+ // Restart ice on the initializing client.
+ receiving_client()->SetExpectIceRestart(true);
+ initializing_client()->IceRestart();
+
+ // Negotiate and wait for ice completion again and make sure audio and video
+ // plays.
+ LocalP2PTest();
+
+ // Create a SDP string of the first audio candidate for both clients again.
+ const webrtc::IceCandidateCollection* audio_candidates_initiator_restart =
+ initializing_client()->pc()->local_description()->candidates(0);
+ const webrtc::IceCandidateCollection* audio_candidates_reciever_restart =
+ receiving_client()->pc()->local_description()->candidates(0);
+ ASSERT_GT(audio_candidates_initiator_restart->count(), 0u);
+ ASSERT_GT(audio_candidates_reciever_restart->count(), 0u);
+ std::string initiator_candidate_restart;
+ EXPECT_TRUE(audio_candidates_initiator_restart->at(0)->ToString(
+ &initiator_candidate_restart));
+ std::string receiver_candidate_restart;
+ EXPECT_TRUE(audio_candidates_reciever_restart->at(0)->ToString(
+ &receiver_candidate_restart));
+
+ // Verify that the first candidates in the local session descriptions has
+ // changed.
+ EXPECT_NE(initiator_candidate, initiator_candidate_restart);
+ EXPECT_NE(receiver_candidate, receiver_candidate_restart);
+}
+
+// This test sets up a Jsep call between two parties with external
+// VideoDecoderFactory.
+// TODO(holmer): Disabled due to sometimes crashing on buildbots.
+// See issue webrtc/2378.
+TEST_F(JsepPeerConnectionP2PTestClient,
+ DISABLED_LocalP2PTestWithVideoDecoderFactory) {
+ ASSERT_TRUE(CreateTestClients());
+ EnableVideoDecoderFactory();
+ LocalP2PTest();
+}
+
+class IceServerParsingTest : public testing::Test {
+ public:
+ // Convenience for parsing a single URL.
+ bool ParseUrl(const std::string& url) {
+ return ParseUrl(url, std::string(), std::string());
+ }
+
+ bool ParseUrl(const std::string& url,
+ const std::string& username,
+ const std::string& password) {
+ PeerConnectionInterface::IceServers servers;
+ PeerConnectionInterface::IceServer server;
+ server.urls.push_back(url);
+ server.username = username;
+ server.password = password;
+ servers.push_back(server);
+ return webrtc::ParseIceServers(servers, &stun_configurations_,
+ &turn_configurations_);
+ }
+
+ protected:
+ webrtc::StunConfigurations stun_configurations_;
+ webrtc::TurnConfigurations turn_configurations_;
+};
+
+// Make sure all STUN/TURN prefixes are parsed correctly.
+TEST_F(IceServerParsingTest, ParseStunPrefixes) {
+ EXPECT_TRUE(ParseUrl("stun:hostname"));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ(0U, turn_configurations_.size());
+ stun_configurations_.clear();
+
+ EXPECT_TRUE(ParseUrl("stuns:hostname"));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ(0U, turn_configurations_.size());
+ stun_configurations_.clear();
+
+ EXPECT_TRUE(ParseUrl("turn:hostname"));
+ EXPECT_EQ(0U, stun_configurations_.size());
+ EXPECT_EQ(1U, turn_configurations_.size());
+ EXPECT_FALSE(turn_configurations_[0].secure);
+ turn_configurations_.clear();
+
+ EXPECT_TRUE(ParseUrl("turns:hostname"));
+ EXPECT_EQ(0U, stun_configurations_.size());
+ EXPECT_EQ(1U, turn_configurations_.size());
+ EXPECT_TRUE(turn_configurations_[0].secure);
+ turn_configurations_.clear();
+
+ // invalid prefixes
+ EXPECT_FALSE(ParseUrl("stunn:hostname"));
+ EXPECT_FALSE(ParseUrl(":hostname"));
+ EXPECT_FALSE(ParseUrl(":"));
+ EXPECT_FALSE(ParseUrl(""));
+}
+
+TEST_F(IceServerParsingTest, VerifyDefaults) {
+ // TURNS defaults
+ EXPECT_TRUE(ParseUrl("turns:hostname"));
+ EXPECT_EQ(1U, turn_configurations_.size());
+ EXPECT_EQ(5349, turn_configurations_[0].server.port());
+ EXPECT_EQ("tcp", turn_configurations_[0].transport_type);
+ turn_configurations_.clear();
+
+ // TURN defaults
+ EXPECT_TRUE(ParseUrl("turn:hostname"));
+ EXPECT_EQ(1U, turn_configurations_.size());
+ EXPECT_EQ(3478, turn_configurations_[0].server.port());
+ EXPECT_EQ("udp", turn_configurations_[0].transport_type);
+ turn_configurations_.clear();
+
+ // STUN defaults
+ EXPECT_TRUE(ParseUrl("stun:hostname"));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ(3478, stun_configurations_[0].server.port());
+ stun_configurations_.clear();
+}
+
+// Check that the 6 combinations of IPv4/IPv6/hostname and with/without port
+// can be parsed correctly.
+TEST_F(IceServerParsingTest, ParseHostnameAndPort) {
+ EXPECT_TRUE(ParseUrl("stun:1.2.3.4:1234"));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ("1.2.3.4", stun_configurations_[0].server.hostname());
+ EXPECT_EQ(1234, stun_configurations_[0].server.port());
+ stun_configurations_.clear();
+
+ EXPECT_TRUE(ParseUrl("stun:[1:2:3:4:5:6:7:8]:4321"));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ("1:2:3:4:5:6:7:8", stun_configurations_[0].server.hostname());
+ EXPECT_EQ(4321, stun_configurations_[0].server.port());
+ stun_configurations_.clear();
+
+ EXPECT_TRUE(ParseUrl("stun:hostname:9999"));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ("hostname", stun_configurations_[0].server.hostname());
+ EXPECT_EQ(9999, stun_configurations_[0].server.port());
+ stun_configurations_.clear();
+
+ EXPECT_TRUE(ParseUrl("stun:1.2.3.4"));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ("1.2.3.4", stun_configurations_[0].server.hostname());
+ EXPECT_EQ(3478, stun_configurations_[0].server.port());
+ stun_configurations_.clear();
+
+ EXPECT_TRUE(ParseUrl("stun:[1:2:3:4:5:6:7:8]"));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ("1:2:3:4:5:6:7:8", stun_configurations_[0].server.hostname());
+ EXPECT_EQ(3478, stun_configurations_[0].server.port());
+ stun_configurations_.clear();
+
+ EXPECT_TRUE(ParseUrl("stun:hostname"));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ("hostname", stun_configurations_[0].server.hostname());
+ EXPECT_EQ(3478, stun_configurations_[0].server.port());
+ stun_configurations_.clear();
+
+ // Try some invalid hostname:port strings.
+ EXPECT_FALSE(ParseUrl("stun:hostname:99a99"));
+ EXPECT_FALSE(ParseUrl("stun:hostname:-1"));
+ EXPECT_FALSE(ParseUrl("stun:hostname:"));
+ EXPECT_FALSE(ParseUrl("stun:[1:2:3:4:5:6:7:8]junk:1000"));
+ EXPECT_FALSE(ParseUrl("stun::5555"));
+ EXPECT_FALSE(ParseUrl("stun:"));
+}
+
+// Test parsing the "?transport=xxx" part of the URL.
+TEST_F(IceServerParsingTest, ParseTransport) {
+ EXPECT_TRUE(ParseUrl("turn:hostname:1234?transport=tcp"));
+ EXPECT_EQ(1U, turn_configurations_.size());
+ EXPECT_EQ("tcp", turn_configurations_[0].transport_type);
+ turn_configurations_.clear();
+
+ EXPECT_TRUE(ParseUrl("turn:hostname?transport=udp"));
+ EXPECT_EQ(1U, turn_configurations_.size());
+ EXPECT_EQ("udp", turn_configurations_[0].transport_type);
+ turn_configurations_.clear();
+
+ EXPECT_FALSE(ParseUrl("turn:hostname?transport=invalid"));
+}
+
+// Test parsing ICE username contained in URL.
+TEST_F(IceServerParsingTest, ParseUsername) {
+ EXPECT_TRUE(ParseUrl("turn:user@hostname"));
+ EXPECT_EQ(1U, turn_configurations_.size());
+ EXPECT_EQ("user", turn_configurations_[0].username);
+ turn_configurations_.clear();
+
+ EXPECT_FALSE(ParseUrl("turn:@hostname"));
+ EXPECT_FALSE(ParseUrl("turn:username@"));
+ EXPECT_FALSE(ParseUrl("turn:@"));
+ EXPECT_FALSE(ParseUrl("turn:user@name@hostname"));
+}
+
+// Test that username and password from IceServer is copied into the resulting
+// TurnConfiguration.
+TEST_F(IceServerParsingTest, CopyUsernameAndPasswordFromIceServer) {
+ EXPECT_TRUE(ParseUrl("turn:hostname", "username", "password"));
+ EXPECT_EQ(1U, turn_configurations_.size());
+ EXPECT_EQ("username", turn_configurations_[0].username);
+ EXPECT_EQ("password", turn_configurations_[0].password);
+}
+
+// Ensure that if a server has multiple URLs, each one is parsed.
+TEST_F(IceServerParsingTest, ParseMultipleUrls) {
+ PeerConnectionInterface::IceServers servers;
+ PeerConnectionInterface::IceServer server;
+ server.urls.push_back("stun:hostname");
+ server.urls.push_back("turn:hostname");
+ servers.push_back(server);
+ EXPECT_TRUE(webrtc::ParseIceServers(servers, &stun_configurations_,
+ &turn_configurations_));
+ EXPECT_EQ(1U, stun_configurations_.size());
+ EXPECT_EQ(1U, turn_configurations_.size());
+}
+
+#endif // if !defined(THREAD_SANITIZER)
diff --git a/talk/app/webrtc/peerconnectionendtoend_unittest.cc b/talk/app/webrtc/peerconnectionendtoend_unittest.cc
new file mode 100644
index 0000000000..eacedd4eea
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionendtoend_unittest.cc
@@ -0,0 +1,398 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/test/peerconnectiontestwrapper.h"
+#include "talk/app/webrtc/test/mockpeerconnectionobservers.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+
+#define MAYBE_SKIP_TEST(feature) \
+ if (!(feature())) { \
+ LOG(LS_INFO) << "Feature disabled... skipping"; \
+ return; \
+ }
+
+using webrtc::DataChannelInterface;
+using webrtc::FakeConstraints;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::PeerConnectionInterface;
+
+namespace {
+
+const size_t kMaxWait = 10000;
+
+void RemoveLinesFromSdp(const std::string& line_start,
+ std::string* sdp) {
+ const char kSdpLineEnd[] = "\r\n";
+ size_t ssrc_pos = 0;
+ while ((ssrc_pos = sdp->find(line_start, ssrc_pos)) !=
+ std::string::npos) {
+ size_t end_ssrc = sdp->find(kSdpLineEnd, ssrc_pos);
+ sdp->erase(ssrc_pos, end_ssrc - ssrc_pos + strlen(kSdpLineEnd));
+ }
+}
+
+// Add |newlines| to the |message| after |line|.
+void InjectAfter(const std::string& line,
+ const std::string& newlines,
+ std::string* message) {
+ const std::string tmp = line + newlines;
+ rtc::replace_substrs(line.c_str(), line.length(),
+ tmp.c_str(), tmp.length(), message);
+}
+
+void Replace(const std::string& line,
+ const std::string& newlines,
+ std::string* message) {
+ rtc::replace_substrs(line.c_str(), line.length(),
+ newlines.c_str(), newlines.length(), message);
+}
+
+void UseExternalSdes(std::string* sdp) {
+ // Remove current crypto specification.
+ RemoveLinesFromSdp("a=crypto", sdp);
+ RemoveLinesFromSdp("a=fingerprint", sdp);
+ // Add external crypto.
+ const char kAudioSdes[] =
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR\r\n";
+ const char kVideoSdes[] =
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj\r\n";
+ const char kDataSdes[] =
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj\r\n";
+ InjectAfter("a=mid:audio\r\n", kAudioSdes, sdp);
+ InjectAfter("a=mid:video\r\n", kVideoSdes, sdp);
+ InjectAfter("a=mid:data\r\n", kDataSdes, sdp);
+}
+
+void RemoveBundle(std::string* sdp) {
+ RemoveLinesFromSdp("a=group:BUNDLE", sdp);
+}
+
+} // namespace
+
+class PeerConnectionEndToEndTest
+ : public sigslot::has_slots<>,
+ public testing::Test {
+ public:
+ typedef std::vector<rtc::scoped_refptr<DataChannelInterface> >
+ DataChannelList;
+
+ PeerConnectionEndToEndTest()
+ : caller_(new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+ "caller")),
+ callee_(new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+ "callee")) {
+ }
+
+ void CreatePcs() {
+ CreatePcs(NULL);
+ }
+
+ void CreatePcs(const MediaConstraintsInterface* pc_constraints) {
+ EXPECT_TRUE(caller_->CreatePc(pc_constraints));
+ EXPECT_TRUE(callee_->CreatePc(pc_constraints));
+ PeerConnectionTestWrapper::Connect(caller_.get(), callee_.get());
+
+ caller_->SignalOnDataChannel.connect(
+ this, &PeerConnectionEndToEndTest::OnCallerAddedDataChanel);
+ callee_->SignalOnDataChannel.connect(
+ this, &PeerConnectionEndToEndTest::OnCalleeAddedDataChannel);
+ }
+
+ void GetAndAddUserMedia() {
+ FakeConstraints audio_constraints;
+ FakeConstraints video_constraints;
+ GetAndAddUserMedia(true, audio_constraints, true, video_constraints);
+ }
+
+ void GetAndAddUserMedia(bool audio, FakeConstraints audio_constraints,
+ bool video, FakeConstraints video_constraints) {
+ caller_->GetAndAddUserMedia(audio, audio_constraints,
+ video, video_constraints);
+ callee_->GetAndAddUserMedia(audio, audio_constraints,
+ video, video_constraints);
+ }
+
+ void Negotiate() {
+ caller_->CreateOffer(NULL);
+ }
+
+ void WaitForCallEstablished() {
+ caller_->WaitForCallEstablished();
+ callee_->WaitForCallEstablished();
+ }
+
+ void WaitForConnection() {
+ caller_->WaitForConnection();
+ callee_->WaitForConnection();
+ }
+
+ void OnCallerAddedDataChanel(DataChannelInterface* dc) {
+ caller_signaled_data_channels_.push_back(dc);
+ }
+
+ void OnCalleeAddedDataChannel(DataChannelInterface* dc) {
+ callee_signaled_data_channels_.push_back(dc);
+ }
+
+ // Tests that |dc1| and |dc2| can send to and receive from each other.
+ void TestDataChannelSendAndReceive(
+ DataChannelInterface* dc1, DataChannelInterface* dc2) {
+ rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc1_observer(
+ new webrtc::MockDataChannelObserver(dc1));
+
+ rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc2_observer(
+ new webrtc::MockDataChannelObserver(dc2));
+
+ static const std::string kDummyData = "abcdefg";
+ webrtc::DataBuffer buffer(kDummyData);
+ EXPECT_TRUE(dc1->Send(buffer));
+ EXPECT_EQ_WAIT(kDummyData, dc2_observer->last_message(), kMaxWait);
+
+ EXPECT_TRUE(dc2->Send(buffer));
+ EXPECT_EQ_WAIT(kDummyData, dc1_observer->last_message(), kMaxWait);
+
+ EXPECT_EQ(1U, dc1_observer->received_message_count());
+ EXPECT_EQ(1U, dc2_observer->received_message_count());
+ }
+
+ void WaitForDataChannelsToOpen(DataChannelInterface* local_dc,
+ const DataChannelList& remote_dc_list,
+ size_t remote_dc_index) {
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, local_dc->state(), kMaxWait);
+
+ EXPECT_TRUE_WAIT(remote_dc_list.size() > remote_dc_index, kMaxWait);
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen,
+ remote_dc_list[remote_dc_index]->state(),
+ kMaxWait);
+ EXPECT_EQ(local_dc->id(), remote_dc_list[remote_dc_index]->id());
+ }
+
+ void CloseDataChannels(DataChannelInterface* local_dc,
+ const DataChannelList& remote_dc_list,
+ size_t remote_dc_index) {
+ local_dc->Close();
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, local_dc->state(), kMaxWait);
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed,
+ remote_dc_list[remote_dc_index]->state(),
+ kMaxWait);
+ }
+
+ protected:
+ rtc::scoped_refptr<PeerConnectionTestWrapper> caller_;
+ rtc::scoped_refptr<PeerConnectionTestWrapper> callee_;
+ DataChannelList caller_signaled_data_channels_;
+ DataChannelList callee_signaled_data_channels_;
+};
+
+TEST_F(PeerConnectionEndToEndTest, Call) {
+ CreatePcs();
+ GetAndAddUserMedia();
+ Negotiate();
+ WaitForCallEstablished();
+}
+
+// Disabled per b/14899892
+TEST_F(PeerConnectionEndToEndTest, DISABLED_CallWithLegacySdp) {
+ FakeConstraints pc_constraints;
+ pc_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+ false);
+ CreatePcs(&pc_constraints);
+ GetAndAddUserMedia();
+ Negotiate();
+ WaitForCallEstablished();
+}
+
+// Verifies that a DataChannel created before the negotiation can transition to
+// "OPEN" and transfer data.
+TEST_F(PeerConnectionEndToEndTest, CreateDataChannelBeforeNegotiate) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ CreatePcs();
+
+ webrtc::DataChannelInit init;
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("data", init));
+ rtc::scoped_refptr<DataChannelInterface> callee_dc(
+ callee_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+
+ WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 0);
+ WaitForDataChannelsToOpen(callee_dc, caller_signaled_data_channels_, 0);
+
+ TestDataChannelSendAndReceive(caller_dc, callee_signaled_data_channels_[0]);
+ TestDataChannelSendAndReceive(callee_dc, caller_signaled_data_channels_[0]);
+
+ CloseDataChannels(caller_dc, callee_signaled_data_channels_, 0);
+ CloseDataChannels(callee_dc, caller_signaled_data_channels_, 0);
+}
+
+// Verifies that a DataChannel created after the negotiation can transition to
+// "OPEN" and transfer data.
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=3980.
+#define MAYBE_CreateDataChannelAfterNegotiate DISABLED_CreateDataChannelAfterNegotiate
+#else
+#define MAYBE_CreateDataChannelAfterNegotiate CreateDataChannelAfterNegotiate
+#endif
+TEST_F(PeerConnectionEndToEndTest, MAYBE_CreateDataChannelAfterNegotiate) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ CreatePcs();
+
+ webrtc::DataChannelInit init;
+
+ // This DataChannel is for creating the data content in the negotiation.
+ rtc::scoped_refptr<DataChannelInterface> dummy(
+ caller_->CreateDataChannel("data", init));
+ Negotiate();
+ WaitForConnection();
+
+ // Creates new DataChannels after the negotiation and verifies their states.
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("hello", init));
+ rtc::scoped_refptr<DataChannelInterface> callee_dc(
+ callee_->CreateDataChannel("hello", init));
+
+ WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 1);
+ WaitForDataChannelsToOpen(callee_dc, caller_signaled_data_channels_, 0);
+
+ TestDataChannelSendAndReceive(caller_dc, callee_signaled_data_channels_[1]);
+ TestDataChannelSendAndReceive(callee_dc, caller_signaled_data_channels_[0]);
+
+ CloseDataChannels(caller_dc, callee_signaled_data_channels_, 1);
+ CloseDataChannels(callee_dc, caller_signaled_data_channels_, 0);
+}
+
+// Verifies that DataChannel IDs are even/odd based on the DTLS roles.
+TEST_F(PeerConnectionEndToEndTest, DataChannelIdAssignment) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ CreatePcs();
+
+ webrtc::DataChannelInit init;
+ rtc::scoped_refptr<DataChannelInterface> caller_dc_1(
+ caller_->CreateDataChannel("data", init));
+ rtc::scoped_refptr<DataChannelInterface> callee_dc_1(
+ callee_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+
+ EXPECT_EQ(1U, caller_dc_1->id() % 2);
+ EXPECT_EQ(0U, callee_dc_1->id() % 2);
+
+ rtc::scoped_refptr<DataChannelInterface> caller_dc_2(
+ caller_->CreateDataChannel("data", init));
+ rtc::scoped_refptr<DataChannelInterface> callee_dc_2(
+ callee_->CreateDataChannel("data", init));
+
+ EXPECT_EQ(1U, caller_dc_2->id() % 2);
+ EXPECT_EQ(0U, callee_dc_2->id() % 2);
+}
+
+// Verifies that the message is received by the right remote DataChannel when
+// there are multiple DataChannels.
+TEST_F(PeerConnectionEndToEndTest,
+ MessageTransferBetweenTwoPairsOfDataChannels) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ CreatePcs();
+
+ webrtc::DataChannelInit init;
+
+ rtc::scoped_refptr<DataChannelInterface> caller_dc_1(
+ caller_->CreateDataChannel("data", init));
+ rtc::scoped_refptr<DataChannelInterface> caller_dc_2(
+ caller_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+ WaitForDataChannelsToOpen(caller_dc_1, callee_signaled_data_channels_, 0);
+ WaitForDataChannelsToOpen(caller_dc_2, callee_signaled_data_channels_, 1);
+
+ rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc_1_observer(
+ new webrtc::MockDataChannelObserver(callee_signaled_data_channels_[0]));
+
+ rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc_2_observer(
+ new webrtc::MockDataChannelObserver(callee_signaled_data_channels_[1]));
+
+ const std::string message_1 = "hello 1";
+ const std::string message_2 = "hello 2";
+
+ caller_dc_1->Send(webrtc::DataBuffer(message_1));
+ EXPECT_EQ_WAIT(message_1, dc_1_observer->last_message(), kMaxWait);
+
+ caller_dc_2->Send(webrtc::DataBuffer(message_2));
+ EXPECT_EQ_WAIT(message_2, dc_2_observer->last_message(), kMaxWait);
+
+ EXPECT_EQ(1U, dc_1_observer->received_message_count());
+ EXPECT_EQ(1U, dc_2_observer->received_message_count());
+}
+
+// Verifies that a DataChannel added from an OPEN message functions after
+// a channel has been previously closed (webrtc issue 3778).
+// This previously failed because the new channel re-uses the ID of the closed
+// channel, and the closed channel was incorrectly still assigned to the id.
+// TODO(deadbeef): This is disabled because there's currently a race condition
+// caused by the fact that a data channel signals that it's closed before it
+// really is. Re-enable this test once that's fixed.
+TEST_F(PeerConnectionEndToEndTest,
+ DISABLED_DataChannelFromOpenWorksAfterClose) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ CreatePcs();
+
+ webrtc::DataChannelInit init;
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+
+ WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 0);
+ CloseDataChannels(caller_dc, callee_signaled_data_channels_, 0);
+
+ // Create a new channel and ensure it works after closing the previous one.
+ caller_dc = caller_->CreateDataChannel("data2", init);
+
+ WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 1);
+ TestDataChannelSendAndReceive(caller_dc, callee_signaled_data_channels_[1]);
+
+ CloseDataChannels(caller_dc, callee_signaled_data_channels_, 1);
+}
diff --git a/talk/app/webrtc/peerconnectionfactory.cc b/talk/app/webrtc/peerconnectionfactory.cc
new file mode 100644
index 0000000000..b46b4b68d3
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionfactory.cc
@@ -0,0 +1,322 @@
+/*
+ * libjingle
+ * Copyright 2004--2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/peerconnectionfactory.h"
+
+#include "talk/app/webrtc/audiotrack.h"
+#include "talk/app/webrtc/localaudiosource.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/mediastreamproxy.h"
+#include "talk/app/webrtc/mediastreamtrackproxy.h"
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/peerconnectionfactoryproxy.h"
+#include "talk/app/webrtc/peerconnectionproxy.h"
+#include "talk/app/webrtc/portallocatorfactory.h"
+#include "talk/app/webrtc/videosource.h"
+#include "talk/app/webrtc/videosourceproxy.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "talk/media/webrtc/webrtcmediaengine.h"
+#include "talk/media/webrtc/webrtcvideodecoderfactory.h"
+#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/modules/audio_device/include/audio_device.h"
+
+namespace webrtc {
+
+namespace {
+
+// Passes down the calls to |store_|. See usage in CreatePeerConnection.
+class DtlsIdentityStoreWrapper : public DtlsIdentityStoreInterface {
+ public:
+ DtlsIdentityStoreWrapper(
+ const rtc::scoped_refptr<RefCountedDtlsIdentityStore>& store)
+ : store_(store) {
+ RTC_DCHECK(store_);
+ }
+
+ void RequestIdentity(
+ rtc::KeyType key_type,
+ const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>&
+ observer) override {
+ store_->RequestIdentity(key_type, observer);
+ }
+
+ private:
+ rtc::scoped_refptr<RefCountedDtlsIdentityStore> store_;
+};
+
+} // anonymous namespace
+
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory() {
+ rtc::scoped_refptr<PeerConnectionFactory> pc_factory(
+ new rtc::RefCountedObject<PeerConnectionFactory>());
+
+
+ // Call Initialize synchronously but make sure its executed on
+ // |signaling_thread|.
+ MethodCall0<PeerConnectionFactory, bool> call(
+ pc_factory.get(),
+ &PeerConnectionFactory::Initialize);
+ bool result = call.Marshal(pc_factory->signaling_thread());
+
+ if (!result) {
+ return NULL;
+ }
+ return PeerConnectionFactoryProxy::Create(pc_factory->signaling_thread(),
+ pc_factory);
+}
+
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory(
+ rtc::Thread* worker_thread,
+ rtc::Thread* signaling_thread,
+ AudioDeviceModule* default_adm,
+ cricket::WebRtcVideoEncoderFactory* encoder_factory,
+ cricket::WebRtcVideoDecoderFactory* decoder_factory) {
+ rtc::scoped_refptr<PeerConnectionFactory> pc_factory(
+ new rtc::RefCountedObject<PeerConnectionFactory>(worker_thread,
+ signaling_thread,
+ default_adm,
+ encoder_factory,
+ decoder_factory));
+
+ // Call Initialize synchronously but make sure its executed on
+ // |signaling_thread|.
+ MethodCall0<PeerConnectionFactory, bool> call(
+ pc_factory.get(),
+ &PeerConnectionFactory::Initialize);
+ bool result = call.Marshal(signaling_thread);
+
+ if (!result) {
+ return NULL;
+ }
+ return PeerConnectionFactoryProxy::Create(signaling_thread, pc_factory);
+}
+
+PeerConnectionFactory::PeerConnectionFactory()
+ : owns_ptrs_(true),
+ wraps_current_thread_(false),
+ signaling_thread_(rtc::ThreadManager::Instance()->CurrentThread()),
+ worker_thread_(new rtc::Thread) {
+ if (!signaling_thread_) {
+ signaling_thread_ = rtc::ThreadManager::Instance()->WrapCurrentThread();
+ wraps_current_thread_ = true;
+ }
+ worker_thread_->Start();
+}
+
+PeerConnectionFactory::PeerConnectionFactory(
+ rtc::Thread* worker_thread,
+ rtc::Thread* signaling_thread,
+ AudioDeviceModule* default_adm,
+ cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
+ cricket::WebRtcVideoDecoderFactory* video_decoder_factory)
+ : owns_ptrs_(false),
+ wraps_current_thread_(false),
+ signaling_thread_(signaling_thread),
+ worker_thread_(worker_thread),
+ default_adm_(default_adm),
+ video_encoder_factory_(video_encoder_factory),
+ video_decoder_factory_(video_decoder_factory) {
+ ASSERT(worker_thread != NULL);
+ ASSERT(signaling_thread != NULL);
+ // TODO: Currently there is no way creating an external adm in
+ // libjingle source tree. So we can 't currently assert if this is NULL.
+ // ASSERT(default_adm != NULL);
+}
+
+PeerConnectionFactory::~PeerConnectionFactory() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ channel_manager_.reset(nullptr);
+ default_allocator_factory_ = nullptr;
+
+ // Make sure |worker_thread_| and |signaling_thread_| outlive
+ // |dtls_identity_store_|.
+ dtls_identity_store_ = nullptr;
+
+ if (owns_ptrs_) {
+ if (wraps_current_thread_)
+ rtc::ThreadManager::Instance()->UnwrapCurrentThread();
+ delete worker_thread_;
+ }
+}
+
+bool PeerConnectionFactory::Initialize() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::InitRandom(rtc::Time());
+
+ default_allocator_factory_ = PortAllocatorFactory::Create(worker_thread_);
+ if (!default_allocator_factory_)
+ return false;
+
+ // TODO: Need to make sure only one VoE is created inside
+ // WebRtcMediaEngine.
+ cricket::MediaEngineInterface* media_engine =
+ worker_thread_->Invoke<cricket::MediaEngineInterface*>(rtc::Bind(
+ &PeerConnectionFactory::CreateMediaEngine_w, this));
+
+ channel_manager_.reset(
+ new cricket::ChannelManager(media_engine, worker_thread_));
+
+ channel_manager_->SetVideoRtxEnabled(true);
+ if (!channel_manager_->Init()) {
+ return false;
+ }
+
+ dtls_identity_store_ = new RefCountedDtlsIdentityStore(
+ signaling_thread_, worker_thread_);
+
+ return true;
+}
+
+rtc::scoped_refptr<AudioSourceInterface>
+PeerConnectionFactory::CreateAudioSource(
+ const MediaConstraintsInterface* constraints) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::scoped_refptr<LocalAudioSource> source(
+ LocalAudioSource::Create(options_, constraints));
+ return source;
+}
+
+rtc::scoped_refptr<VideoSourceInterface>
+PeerConnectionFactory::CreateVideoSource(
+ cricket::VideoCapturer* capturer,
+ const MediaConstraintsInterface* constraints) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::scoped_refptr<VideoSource> source(
+ VideoSource::Create(channel_manager_.get(), capturer, constraints));
+ return VideoSourceProxy::Create(signaling_thread_, source);
+}
+
+bool PeerConnectionFactory::StartAecDump(rtc::PlatformFile file) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ return channel_manager_->StartAecDump(file);
+}
+
+void PeerConnectionFactory::StopAecDump() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ channel_manager_->StopAecDump();
+}
+
+bool PeerConnectionFactory::StartRtcEventLog(rtc::PlatformFile file) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ return channel_manager_->StartRtcEventLog(file);
+}
+
+void PeerConnectionFactory::StopRtcEventLog() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ channel_manager_->StopRtcEventLog();
+}
+
+rtc::scoped_refptr<PeerConnectionInterface>
+PeerConnectionFactory::CreatePeerConnection(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ const MediaConstraintsInterface* constraints,
+ PortAllocatorFactoryInterface* allocator_factory,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ PeerConnectionObserver* observer) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ RTC_DCHECK(allocator_factory || default_allocator_factory_);
+
+ if (!dtls_identity_store.get()) {
+ // Because |pc|->Initialize takes ownership of the store we need a new
+ // wrapper object that can be deleted without deleting the underlying
+ // |dtls_identity_store_|, protecting it from being deleted multiple times.
+ dtls_identity_store.reset(
+ new DtlsIdentityStoreWrapper(dtls_identity_store_));
+ }
+
+ PortAllocatorFactoryInterface* chosen_allocator_factory =
+ allocator_factory ? allocator_factory : default_allocator_factory_.get();
+ chosen_allocator_factory->SetNetworkIgnoreMask(options_.network_ignore_mask);
+
+ rtc::scoped_refptr<PeerConnection> pc(
+ new rtc::RefCountedObject<PeerConnection>(this));
+ if (!pc->Initialize(
+ configuration,
+ constraints,
+ chosen_allocator_factory,
+ dtls_identity_store.Pass(),
+ observer)) {
+ return NULL;
+ }
+ return PeerConnectionProxy::Create(signaling_thread(), pc);
+}
+
+rtc::scoped_refptr<MediaStreamInterface>
+PeerConnectionFactory::CreateLocalMediaStream(const std::string& label) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ return MediaStreamProxy::Create(signaling_thread_,
+ MediaStream::Create(label));
+}
+
+rtc::scoped_refptr<VideoTrackInterface>
+PeerConnectionFactory::CreateVideoTrack(
+ const std::string& id,
+ VideoSourceInterface* source) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::scoped_refptr<VideoTrackInterface> track(
+ VideoTrack::Create(id, source));
+ return VideoTrackProxy::Create(signaling_thread_, track);
+}
+
+rtc::scoped_refptr<AudioTrackInterface>
+PeerConnectionFactory::CreateAudioTrack(const std::string& id,
+ AudioSourceInterface* source) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::scoped_refptr<AudioTrackInterface> track(
+ AudioTrack::Create(id, source));
+ return AudioTrackProxy::Create(signaling_thread_, track);
+}
+
+webrtc::MediaControllerInterface* PeerConnectionFactory::CreateMediaController()
+ const {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ return MediaControllerInterface::Create(worker_thread_,
+ channel_manager_.get());
+}
+
+rtc::Thread* PeerConnectionFactory::signaling_thread() {
+ // This method can be called on a different thread when the factory is
+ // created in CreatePeerConnectionFactory().
+ return signaling_thread_;
+}
+
+rtc::Thread* PeerConnectionFactory::worker_thread() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ return worker_thread_;
+}
+
+cricket::MediaEngineInterface* PeerConnectionFactory::CreateMediaEngine_w() {
+ ASSERT(worker_thread_ == rtc::Thread::Current());
+ return cricket::WebRtcMediaEngineFactory::Create(
+ default_adm_.get(), video_encoder_factory_.get(),
+ video_decoder_factory_.get());
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/peerconnectionfactory.h b/talk/app/webrtc/peerconnectionfactory.h
new file mode 100644
index 0000000000..af4117a9d3
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionfactory.h
@@ -0,0 +1,128 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_PEERCONNECTIONFACTORY_H_
+#define TALK_APP_WEBRTC_PEERCONNECTIONFACTORY_H_
+
+#include <string>
+
+#include "talk/app/webrtc/dtlsidentitystore.h"
+#include "talk/app/webrtc/mediacontroller.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+typedef rtc::RefCountedObject<DtlsIdentityStoreImpl>
+ RefCountedDtlsIdentityStore;
+
+class PeerConnectionFactory : public PeerConnectionFactoryInterface {
+ public:
+ virtual void SetOptions(const Options& options) {
+ options_ = options;
+ }
+
+ // webrtc::PeerConnectionFactoryInterface override;
+ rtc::scoped_refptr<PeerConnectionInterface>
+ CreatePeerConnection(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ const MediaConstraintsInterface* constraints,
+ PortAllocatorFactoryInterface* allocator_factory,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ PeerConnectionObserver* observer) override;
+
+ bool Initialize();
+
+ rtc::scoped_refptr<MediaStreamInterface>
+ CreateLocalMediaStream(const std::string& label) override;
+
+ rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
+ const MediaConstraintsInterface* constraints) override;
+
+ rtc::scoped_refptr<VideoSourceInterface> CreateVideoSource(
+ cricket::VideoCapturer* capturer,
+ const MediaConstraintsInterface* constraints) override;
+
+ rtc::scoped_refptr<VideoTrackInterface>
+ CreateVideoTrack(const std::string& id,
+ VideoSourceInterface* video_source) override;
+
+ rtc::scoped_refptr<AudioTrackInterface>
+ CreateAudioTrack(const std::string& id,
+ AudioSourceInterface* audio_source) override;
+
+ bool StartAecDump(rtc::PlatformFile file) override;
+ void StopAecDump() override;
+ bool StartRtcEventLog(rtc::PlatformFile file) override;
+ void StopRtcEventLog() override;
+
+ virtual webrtc::MediaControllerInterface* CreateMediaController() const;
+ virtual rtc::Thread* signaling_thread();
+ virtual rtc::Thread* worker_thread();
+ const Options& options() const { return options_; }
+
+ protected:
+ PeerConnectionFactory();
+ PeerConnectionFactory(
+ rtc::Thread* worker_thread,
+ rtc::Thread* signaling_thread,
+ AudioDeviceModule* default_adm,
+ cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
+ cricket::WebRtcVideoDecoderFactory* video_decoder_factory);
+ virtual ~PeerConnectionFactory();
+
+ private:
+ cricket::MediaEngineInterface* CreateMediaEngine_w();
+
+ bool owns_ptrs_;
+ bool wraps_current_thread_;
+ rtc::Thread* signaling_thread_;
+ rtc::Thread* worker_thread_;
+ Options options_;
+ rtc::scoped_refptr<PortAllocatorFactoryInterface> default_allocator_factory_;
+ // External Audio device used for audio playback.
+ rtc::scoped_refptr<AudioDeviceModule> default_adm_;
+ rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+ // External Video encoder factory. This can be NULL if the client has not
+ // injected any. In that case, video engine will use the internal SW encoder.
+ rtc::scoped_ptr<cricket::WebRtcVideoEncoderFactory>
+ video_encoder_factory_;
+ // External Video decoder factory. This can be NULL if the client has not
+ // injected any. In that case, video engine will use the internal SW decoder.
+ rtc::scoped_ptr<cricket::WebRtcVideoDecoderFactory>
+ video_decoder_factory_;
+
+ rtc::scoped_refptr<RefCountedDtlsIdentityStore> dtls_identity_store_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_PEERCONNECTIONFACTORY_H_
diff --git a/talk/app/webrtc/peerconnectionfactory_unittest.cc b/talk/app/webrtc/peerconnectionfactory_unittest.cc
new file mode 100644
index 0000000000..f1d5353abd
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionfactory_unittest.cc
@@ -0,0 +1,426 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/fakeportallocatorfactory.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/peerconnectionfactory.h"
+#include "talk/app/webrtc/test/fakedtlsidentitystore.h"
+#include "talk/app/webrtc/test/fakevideotrackrenderer.h"
+#include "talk/app/webrtc/videosourceinterface.h"
+#include "talk/media/base/fakevideocapturer.h"
+#include "talk/media/webrtc/webrtccommon.h"
+#include "talk/media/webrtc/webrtcvoe.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread.h"
+
+using webrtc::DataChannelInterface;
+using webrtc::DtlsIdentityStoreInterface;
+using webrtc::FakeVideoTrackRenderer;
+using webrtc::MediaStreamInterface;
+using webrtc::PeerConnectionFactoryInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::PortAllocatorFactoryInterface;
+using webrtc::VideoSourceInterface;
+using webrtc::VideoTrackInterface;
+
+namespace {
+
+typedef std::vector<PortAllocatorFactoryInterface::StunConfiguration>
+ StunConfigurations;
+typedef std::vector<PortAllocatorFactoryInterface::TurnConfiguration>
+ TurnConfigurations;
+
+static const char kStunIceServer[] = "stun:stun.l.google.com:19302";
+static const char kTurnIceServer[] = "turn:test%40hello.com@test.com:1234";
+static const char kTurnIceServerWithTransport[] =
+ "turn:test@hello.com?transport=tcp";
+static const char kSecureTurnIceServer[] =
+ "turns:test@hello.com?transport=tcp";
+static const char kSecureTurnIceServerWithoutTransportParam[] =
+ "turns:test_no_transport@hello.com:443";
+static const char kSecureTurnIceServerWithoutTransportAndPortParam[] =
+ "turns:test_no_transport@hello.com";
+static const char kTurnIceServerWithNoUsernameInUri[] =
+ "turn:test.com:1234";
+static const char kTurnPassword[] = "turnpassword";
+static const int kDefaultStunPort = 3478;
+static const int kDefaultStunTlsPort = 5349;
+static const char kTurnUsername[] = "test";
+static const char kStunIceServerWithIPv4Address[] = "stun:1.2.3.4:1234";
+static const char kStunIceServerWithIPv4AddressWithoutPort[] = "stun:1.2.3.4";
+static const char kStunIceServerWithIPv6Address[] = "stun:[2401:fa00:4::]:1234";
+static const char kStunIceServerWithIPv6AddressWithoutPort[] =
+ "stun:[2401:fa00:4::]";
+static const char kTurnIceServerWithIPv6Address[] =
+ "turn:test@[2401:fa00:4::]:1234";
+
+class NullPeerConnectionObserver : public PeerConnectionObserver {
+ public:
+ virtual void OnMessage(const std::string& msg) {}
+ virtual void OnSignalingMessage(const std::string& msg) {}
+ virtual void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) {}
+ virtual void OnAddStream(MediaStreamInterface* stream) {}
+ virtual void OnRemoveStream(MediaStreamInterface* stream) {}
+ virtual void OnDataChannel(DataChannelInterface* data_channel) {}
+ virtual void OnRenegotiationNeeded() {}
+ virtual void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {}
+ virtual void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {}
+ virtual void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {}
+};
+
+} // namespace
+
+class PeerConnectionFactoryTest : public testing::Test {
+ void SetUp() {
+ factory_ = webrtc::CreatePeerConnectionFactory(rtc::Thread::Current(),
+ rtc::Thread::Current(),
+ NULL,
+ NULL,
+ NULL);
+
+ ASSERT_TRUE(factory_.get() != NULL);
+ allocator_factory_ = webrtc::FakePortAllocatorFactory::Create();
+ }
+
+ protected:
+ void VerifyStunConfigurations(StunConfigurations stun_config) {
+ webrtc::FakePortAllocatorFactory* allocator =
+ static_cast<webrtc::FakePortAllocatorFactory*>(
+ allocator_factory_.get());
+ ASSERT_TRUE(allocator != NULL);
+ EXPECT_EQ(stun_config.size(), allocator->stun_configs().size());
+ for (size_t i = 0; i < stun_config.size(); ++i) {
+ EXPECT_EQ(stun_config[i].server.ToString(),
+ allocator->stun_configs()[i].server.ToString());
+ }
+ }
+
+ void VerifyTurnConfigurations(TurnConfigurations turn_config) {
+ webrtc::FakePortAllocatorFactory* allocator =
+ static_cast<webrtc::FakePortAllocatorFactory*>(
+ allocator_factory_.get());
+ ASSERT_TRUE(allocator != NULL);
+ EXPECT_EQ(turn_config.size(), allocator->turn_configs().size());
+ for (size_t i = 0; i < turn_config.size(); ++i) {
+ EXPECT_EQ(turn_config[i].server.ToString(),
+ allocator->turn_configs()[i].server.ToString());
+ EXPECT_EQ(turn_config[i].username, allocator->turn_configs()[i].username);
+ EXPECT_EQ(turn_config[i].password, allocator->turn_configs()[i].password);
+ EXPECT_EQ(turn_config[i].transport_type,
+ allocator->turn_configs()[i].transport_type);
+ }
+ }
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
+ NullPeerConnectionObserver observer_;
+ rtc::scoped_refptr<PortAllocatorFactoryInterface> allocator_factory_;
+};
+
+// Verify creation of PeerConnection using internal ADM, video factory and
+// internal libjingle threads.
+TEST(PeerConnectionFactoryTestInternal, CreatePCUsingInternalModules) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ webrtc::CreatePeerConnectionFactory());
+
+ NullPeerConnectionObserver observer;
+ webrtc::PeerConnectionInterface::IceServers servers;
+
+ rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+ new FakeDtlsIdentityStore());
+ rtc::scoped_refptr<PeerConnectionInterface> pc(
+ factory->CreatePeerConnection(
+ servers, nullptr, nullptr, dtls_identity_store.Pass(), &observer));
+
+ EXPECT_TRUE(pc.get() != nullptr);
+}
+
+// This test verifies creation of PeerConnection with valid STUN and TURN
+// configuration. Also verifies the URL's parsed correctly as expected.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServers) {
+ PeerConnectionInterface::RTCConfiguration config;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kStunIceServer;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServer;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServerWithTransport;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ new FakeDtlsIdentityStore());
+ rtc::scoped_refptr<PeerConnectionInterface> pc(
+ factory_->CreatePeerConnection(config, nullptr,
+ allocator_factory_.get(),
+ dtls_identity_store.Pass(),
+ &observer_));
+ EXPECT_TRUE(pc.get() != NULL);
+ StunConfigurations stun_configs;
+ webrtc::PortAllocatorFactoryInterface::StunConfiguration stun1(
+ "stun.l.google.com", 19302);
+ stun_configs.push_back(stun1);
+ VerifyStunConfigurations(stun_configs);
+ TurnConfigurations turn_configs;
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
+ "test.com", 1234, "test@hello.com", kTurnPassword, "udp", false);
+ turn_configs.push_back(turn1);
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
+ "hello.com", kDefaultStunPort, "test", kTurnPassword, "tcp", false);
+ turn_configs.push_back(turn2);
+ VerifyTurnConfigurations(turn_configs);
+}
+
+// This test verifies creation of PeerConnection with valid STUN and TURN
+// configuration. Also verifies the list of URL's parsed correctly as expected.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServersUrls) {
+ PeerConnectionInterface::RTCConfiguration config;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back(kStunIceServer);
+ ice_server.urls.push_back(kTurnIceServer);
+ ice_server.urls.push_back(kTurnIceServerWithTransport);
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ new FakeDtlsIdentityStore());
+ rtc::scoped_refptr<PeerConnectionInterface> pc(
+ factory_->CreatePeerConnection(config, nullptr,
+ allocator_factory_.get(),
+ dtls_identity_store.Pass(),
+ &observer_));
+ EXPECT_TRUE(pc.get() != NULL);
+ StunConfigurations stun_configs;
+ webrtc::PortAllocatorFactoryInterface::StunConfiguration stun1(
+ "stun.l.google.com", 19302);
+ stun_configs.push_back(stun1);
+ VerifyStunConfigurations(stun_configs);
+ TurnConfigurations turn_configs;
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
+ "test.com", 1234, "test@hello.com", kTurnPassword, "udp", false);
+ turn_configs.push_back(turn1);
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
+ "hello.com", kDefaultStunPort, "test", kTurnPassword, "tcp", false);
+ turn_configs.push_back(turn2);
+ VerifyTurnConfigurations(turn_configs);
+}
+
+// This test verifies creation of PeerConnection with valid STUN and TURN
+// configuration. Also verifies the URL's parsed correctly as expected.
+// This version doesn't use RTCConfiguration.
+// TODO(mallinath) - Remove this method after clients start using RTCConfig.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServersOldSignature) {
+ webrtc::PeerConnectionInterface::IceServers ice_servers;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kStunIceServer;
+ ice_servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServer;
+ ice_server.password = kTurnPassword;
+ ice_servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServerWithTransport;
+ ice_server.password = kTurnPassword;
+ ice_servers.push_back(ice_server);
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ new FakeDtlsIdentityStore());
+ rtc::scoped_refptr<PeerConnectionInterface> pc(
+ factory_->CreatePeerConnection(ice_servers, nullptr,
+ allocator_factory_.get(),
+ dtls_identity_store.Pass(),
+ &observer_));
+ EXPECT_TRUE(pc.get() != NULL);
+ StunConfigurations stun_configs;
+ webrtc::PortAllocatorFactoryInterface::StunConfiguration stun1(
+ "stun.l.google.com", 19302);
+ stun_configs.push_back(stun1);
+ VerifyStunConfigurations(stun_configs);
+ TurnConfigurations turn_configs;
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
+ "test.com", 1234, "test@hello.com", kTurnPassword, "udp", false);
+ turn_configs.push_back(turn1);
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
+ "hello.com", kDefaultStunPort, "test", kTurnPassword, "tcp", false);
+ turn_configs.push_back(turn2);
+ VerifyTurnConfigurations(turn_configs);
+}
+
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingNoUsernameInUri) {
+ PeerConnectionInterface::RTCConfiguration config;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kStunIceServer;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServerWithNoUsernameInUri;
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ new FakeDtlsIdentityStore());
+ rtc::scoped_refptr<PeerConnectionInterface> pc(
+ factory_->CreatePeerConnection(config, nullptr,
+ allocator_factory_.get(),
+ dtls_identity_store.Pass(),
+ &observer_));
+ EXPECT_TRUE(pc.get() != NULL);
+ TurnConfigurations turn_configs;
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn(
+ "test.com", 1234, kTurnUsername, kTurnPassword, "udp", false);
+ turn_configs.push_back(turn);
+ VerifyTurnConfigurations(turn_configs);
+}
+
+// This test verifies the PeerConnection created properly with TURN url which
+// has transport parameter in it.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingTurnUrlWithTransportParam) {
+ PeerConnectionInterface::RTCConfiguration config;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kTurnIceServerWithTransport;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ new FakeDtlsIdentityStore());
+ rtc::scoped_refptr<PeerConnectionInterface> pc(
+ factory_->CreatePeerConnection(config, nullptr,
+ allocator_factory_.get(),
+ dtls_identity_store.Pass(),
+ &observer_));
+ EXPECT_TRUE(pc.get() != NULL);
+ TurnConfigurations turn_configs;
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn(
+ "hello.com", kDefaultStunPort, "test", kTurnPassword, "tcp", false);
+ turn_configs.push_back(turn);
+ VerifyTurnConfigurations(turn_configs);
+}
+
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) {
+ PeerConnectionInterface::RTCConfiguration config;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kSecureTurnIceServer;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kSecureTurnIceServerWithoutTransportParam;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kSecureTurnIceServerWithoutTransportAndPortParam;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ new FakeDtlsIdentityStore());
+ rtc::scoped_refptr<PeerConnectionInterface> pc(
+ factory_->CreatePeerConnection(config, nullptr,
+ allocator_factory_.get(),
+ dtls_identity_store.Pass(),
+ &observer_));
+ EXPECT_TRUE(pc.get() != NULL);
+ TurnConfigurations turn_configs;
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
+ "hello.com", kDefaultStunTlsPort, "test", kTurnPassword, "tcp", true);
+ turn_configs.push_back(turn1);
+ // TURNS with transport param should be default to tcp.
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
+ "hello.com", 443, "test_no_transport", kTurnPassword, "tcp", true);
+ turn_configs.push_back(turn2);
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn3(
+ "hello.com", kDefaultStunTlsPort, "test_no_transport",
+ kTurnPassword, "tcp", true);
+ turn_configs.push_back(turn3);
+ VerifyTurnConfigurations(turn_configs);
+}
+
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIPLiteralAddress) {
+ PeerConnectionInterface::RTCConfiguration config;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kStunIceServerWithIPv4Address;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kStunIceServerWithIPv4AddressWithoutPort;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kStunIceServerWithIPv6Address;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kStunIceServerWithIPv6AddressWithoutPort;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServerWithIPv6Address;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ new FakeDtlsIdentityStore());
+ rtc::scoped_refptr<PeerConnectionInterface> pc(
+ factory_->CreatePeerConnection(config, nullptr,
+ allocator_factory_.get(),
+ dtls_identity_store.Pass(),
+ &observer_));
+ EXPECT_TRUE(pc.get() != NULL);
+ StunConfigurations stun_configs;
+ webrtc::PortAllocatorFactoryInterface::StunConfiguration stun1(
+ "1.2.3.4", 1234);
+ stun_configs.push_back(stun1);
+ webrtc::PortAllocatorFactoryInterface::StunConfiguration stun2(
+ "1.2.3.4", 3478);
+ stun_configs.push_back(stun2); // Default port
+ webrtc::PortAllocatorFactoryInterface::StunConfiguration stun3(
+ "2401:fa00:4::", 1234);
+ stun_configs.push_back(stun3);
+ webrtc::PortAllocatorFactoryInterface::StunConfiguration stun4(
+ "2401:fa00:4::", 3478);
+ stun_configs.push_back(stun4); // Default port
+ VerifyStunConfigurations(stun_configs);
+
+ TurnConfigurations turn_configs;
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
+ "2401:fa00:4::", 1234, "test", kTurnPassword, "udp", false);
+ turn_configs.push_back(turn1);
+ VerifyTurnConfigurations(turn_configs);
+}
+
+// This test verifies the captured stream is rendered locally using a
+// local video track.
+TEST_F(PeerConnectionFactoryTest, LocalRendering) {
+ cricket::FakeVideoCapturer* capturer = new cricket::FakeVideoCapturer();
+ // The source take ownership of |capturer|.
+ rtc::scoped_refptr<VideoSourceInterface> source(
+ factory_->CreateVideoSource(capturer, NULL));
+ ASSERT_TRUE(source.get() != NULL);
+ rtc::scoped_refptr<VideoTrackInterface> track(
+ factory_->CreateVideoTrack("testlabel", source));
+ ASSERT_TRUE(track.get() != NULL);
+ FakeVideoTrackRenderer local_renderer(track);
+
+ EXPECT_EQ(0, local_renderer.num_rendered_frames());
+ EXPECT_TRUE(capturer->CaptureFrame());
+ EXPECT_EQ(1, local_renderer.num_rendered_frames());
+
+ track->set_enabled(false);
+ EXPECT_TRUE(capturer->CaptureFrame());
+ EXPECT_EQ(1, local_renderer.num_rendered_frames());
+
+ track->set_enabled(true);
+ EXPECT_TRUE(capturer->CaptureFrame());
+ EXPECT_EQ(2, local_renderer.num_rendered_frames());
+}
+
diff --git a/talk/app/webrtc/peerconnectionfactoryproxy.h b/talk/app/webrtc/peerconnectionfactoryproxy.h
new file mode 100644
index 0000000000..5e924df3a1
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionfactoryproxy.h
@@ -0,0 +1,83 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_PEERCONNECTIONFACTORYPROXY_H_
+#define TALK_APP_WEBRTC_PEERCONNECTIONFACTORYPROXY_H_
+
+#include <string>
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/proxy.h"
+#include "webrtc/base/bind.h"
+
+namespace webrtc {
+
+BEGIN_PROXY_MAP(PeerConnectionFactory)
+ PROXY_METHOD1(void, SetOptions, const Options&)
+ // Can't use PROXY_METHOD5 because scoped_ptr must be Pass()ed.
+ // TODO(tommi,hbos): Use of templates to support scoped_ptr?
+ rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
+ const PeerConnectionInterface::RTCConfiguration& a1,
+ const MediaConstraintsInterface* a2,
+ PortAllocatorFactoryInterface* a3,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> a4,
+ PeerConnectionObserver* a5) override {
+ return owner_thread_->Invoke<rtc::scoped_refptr<PeerConnectionInterface>>(
+ rtc::Bind(&PeerConnectionFactoryProxy::CreatePeerConnection_ot, this,
+ a1, a2, a3, a4.release(), a5));
+ }
+ PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>,
+ CreateLocalMediaStream, const std::string&)
+ PROXY_METHOD1(rtc::scoped_refptr<AudioSourceInterface>,
+ CreateAudioSource, const MediaConstraintsInterface*)
+ PROXY_METHOD2(rtc::scoped_refptr<VideoSourceInterface>,
+ CreateVideoSource, cricket::VideoCapturer*,
+ const MediaConstraintsInterface*)
+ PROXY_METHOD2(rtc::scoped_refptr<VideoTrackInterface>,
+ CreateVideoTrack, const std::string&, VideoSourceInterface*)
+ PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>,
+ CreateAudioTrack, const std::string&, AudioSourceInterface*)
+ PROXY_METHOD1(bool, StartAecDump, rtc::PlatformFile)
+ PROXY_METHOD0(void, StopAecDump)
+ PROXY_METHOD1(bool, StartRtcEventLog, rtc::PlatformFile)
+ PROXY_METHOD0(void, StopRtcEventLog)
+
+ private:
+ rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection_ot(
+ const PeerConnectionInterface::RTCConfiguration& a1,
+ const MediaConstraintsInterface* a2,
+ PortAllocatorFactoryInterface* a3,
+ DtlsIdentityStoreInterface* a4,
+ PeerConnectionObserver* a5) {
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> ptr_a4(a4);
+ return c_->CreatePeerConnection(a1, a2, a3, ptr_a4.Pass(), a5);
+ }
+END_PROXY()
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_PEERCONNECTIONFACTORYPROXY_H_
diff --git a/talk/app/webrtc/peerconnectioninterface.h b/talk/app/webrtc/peerconnectioninterface.h
new file mode 100644
index 0000000000..77caa9d78b
--- /dev/null
+++ b/talk/app/webrtc/peerconnectioninterface.h
@@ -0,0 +1,667 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains the PeerConnection interface as defined in
+// http://dev.w3.org/2011/webrtc/editor/webrtc.html#peer-to-peer-connections.
+// Applications must use this interface to implement peerconnection.
+// PeerConnectionFactory class provides factory methods to create
+// peerconnection, mediastream and media tracks objects.
+//
+// The Following steps are needed to setup a typical call using Jsep.
+// 1. Create a PeerConnectionFactoryInterface. Check constructors for more
+// information about input parameters.
+// 2. Create a PeerConnection object. Provide a configuration string which
+// points either to stun or turn server to generate ICE candidates and provide
+// an object that implements the PeerConnectionObserver interface.
+// 3. Create local MediaStream and MediaTracks using the PeerConnectionFactory
+// and add it to PeerConnection by calling AddStream.
+// 4. Create an offer and serialize it and send it to the remote peer.
+// 5. Once an ice candidate have been found PeerConnection will call the
+// observer function OnIceCandidate. The candidates must also be serialized and
+// sent to the remote peer.
+// 6. Once an answer is received from the remote peer, call
+// SetLocalSessionDescription with the offer and SetRemoteSessionDescription
+// with the remote answer.
+// 7. Once a remote candidate is received from the remote peer, provide it to
+// the peerconnection by calling AddIceCandidate.
+
+
+// The Receiver of a call can decide to accept or reject the call.
+// This decision will be taken by the application not peerconnection.
+// If application decides to accept the call
+// 1. Create PeerConnectionFactoryInterface if it doesn't exist.
+// 2. Create a new PeerConnection.
+// 3. Provide the remote offer to the new PeerConnection object by calling
+// SetRemoteSessionDescription.
+// 4. Generate an answer to the remote offer by calling CreateAnswer and send it
+// back to the remote peer.
+// 5. Provide the local answer to the new PeerConnection by calling
+// SetLocalSessionDescription with the answer.
+// 6. Provide the remote ice candidates by calling AddIceCandidate.
+// 7. Once a candidate have been found PeerConnection will call the observer
+// function OnIceCandidate. Send these candidates to the remote peer.
+
+#ifndef TALK_APP_WEBRTC_PEERCONNECTIONINTERFACE_H_
+#define TALK_APP_WEBRTC_PEERCONNECTIONINTERFACE_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/datachannelinterface.h"
+#include "talk/app/webrtc/dtlsidentitystore.h"
+#include "talk/app/webrtc/dtmfsenderinterface.h"
+#include "talk/app/webrtc/dtlsidentitystore.h"
+#include "talk/app/webrtc/jsep.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/rtpreceiverinterface.h"
+#include "talk/app/webrtc/rtpsenderinterface.h"
+#include "talk/app/webrtc/statstypes.h"
+#include "talk/app/webrtc/umametrics.h"
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/network.h"
+#include "webrtc/base/rtccertificate.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/socketaddress.h"
+
+namespace rtc {
+class SSLIdentity;
+class Thread;
+}
+
+namespace cricket {
+class PortAllocator;
+class WebRtcVideoDecoderFactory;
+class WebRtcVideoEncoderFactory;
+}
+
+namespace webrtc {
+class AudioDeviceModule;
+class MediaConstraintsInterface;
+
+// MediaStream container interface.
+class StreamCollectionInterface : public rtc::RefCountInterface {
+ public:
+ // TODO(ronghuawu): Update the function names to c++ style, e.g. find -> Find.
+ virtual size_t count() = 0;
+ virtual MediaStreamInterface* at(size_t index) = 0;
+ virtual MediaStreamInterface* find(const std::string& label) = 0;
+ virtual MediaStreamTrackInterface* FindAudioTrack(
+ const std::string& id) = 0;
+ virtual MediaStreamTrackInterface* FindVideoTrack(
+ const std::string& id) = 0;
+
+ protected:
+ // Dtor protected as objects shouldn't be deleted via this interface.
+ ~StreamCollectionInterface() {}
+};
+
+class StatsObserver : public rtc::RefCountInterface {
+ public:
+ virtual void OnComplete(const StatsReports& reports) = 0;
+
+ protected:
+ virtual ~StatsObserver() {}
+};
+
+class MetricsObserverInterface : public rtc::RefCountInterface {
+ public:
+
+ // |type| is the type of the enum counter to be incremented. |counter|
+ // is the particular counter in that type. |counter_max| is the next sequence
+ // number after the highest counter.
+ virtual void IncrementEnumCounter(PeerConnectionEnumCounterType type,
+ int counter,
+ int counter_max) {}
+
+ // This is used to handle sparse counters like SSL cipher suites.
+ // TODO(guoweis): Remove the implementation once the dependency's interface
+ // definition is updated.
+ virtual void IncrementSparseEnumCounter(PeerConnectionEnumCounterType type,
+ int counter) {
+ IncrementEnumCounter(type, counter, 0 /* Ignored */);
+ }
+
+ virtual void AddHistogramSample(PeerConnectionMetricsName type,
+ int value) = 0;
+
+ protected:
+ virtual ~MetricsObserverInterface() {}
+};
+
+typedef MetricsObserverInterface UMAObserver;
+
+class PeerConnectionInterface : public rtc::RefCountInterface {
+ public:
+ // See http://dev.w3.org/2011/webrtc/editor/webrtc.html#state-definitions .
+ enum SignalingState {
+ kStable,
+ kHaveLocalOffer,
+ kHaveLocalPrAnswer,
+ kHaveRemoteOffer,
+ kHaveRemotePrAnswer,
+ kClosed,
+ };
+
+ // TODO(bemasc): Remove IceState when callers are changed to
+ // IceConnection/GatheringState.
+ enum IceState {
+ kIceNew,
+ kIceGathering,
+ kIceWaiting,
+ kIceChecking,
+ kIceConnected,
+ kIceCompleted,
+ kIceFailed,
+ kIceClosed,
+ };
+
+ enum IceGatheringState {
+ kIceGatheringNew,
+ kIceGatheringGathering,
+ kIceGatheringComplete
+ };
+
+ enum IceConnectionState {
+ kIceConnectionNew,
+ kIceConnectionChecking,
+ kIceConnectionConnected,
+ kIceConnectionCompleted,
+ kIceConnectionFailed,
+ kIceConnectionDisconnected,
+ kIceConnectionClosed,
+ kIceConnectionMax,
+ };
+
+ struct IceServer {
+ // TODO(jbauch): Remove uri when all code using it has switched to urls.
+ std::string uri;
+ std::vector<std::string> urls;
+ std::string username;
+ std::string password;
+ };
+ typedef std::vector<IceServer> IceServers;
+
+ enum IceTransportsType {
+ // TODO(pthatcher): Rename these kTransporTypeXXX, but update
+ // Chromium at the same time.
+ kNone,
+ kRelay,
+ kNoHost,
+ kAll
+ };
+
+ // https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-08#section-4.1.1
+ enum BundlePolicy {
+ kBundlePolicyBalanced,
+ kBundlePolicyMaxBundle,
+ kBundlePolicyMaxCompat
+ };
+
+ // https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-09#section-4.1.1
+ enum RtcpMuxPolicy {
+ kRtcpMuxPolicyNegotiate,
+ kRtcpMuxPolicyRequire,
+ };
+
+ enum TcpCandidatePolicy {
+ kTcpCandidatePolicyEnabled,
+ kTcpCandidatePolicyDisabled
+ };
+
+ enum ContinualGatheringPolicy {
+ GATHER_ONCE,
+ GATHER_CONTINUALLY
+ };
+
+ // TODO(hbos): Change into class with private data and public getters.
+ struct RTCConfiguration {
+ static const int kUndefined = -1;
+ // Default maximum number of packets in the audio jitter buffer.
+ static const int kAudioJitterBufferMaxPackets = 50;
+ // TODO(pthatcher): Rename this ice_transport_type, but update
+ // Chromium at the same time.
+ IceTransportsType type;
+ // TODO(pthatcher): Rename this ice_servers, but update Chromium
+ // at the same time.
+ IceServers servers;
+ // A localhost candidate is signaled whenever a candidate with the any
+ // address is allocated.
+ bool enable_localhost_ice_candidate;
+ BundlePolicy bundle_policy;
+ RtcpMuxPolicy rtcp_mux_policy;
+ TcpCandidatePolicy tcp_candidate_policy;
+ int audio_jitter_buffer_max_packets;
+ bool audio_jitter_buffer_fast_accelerate;
+ int ice_connection_receiving_timeout;
+ ContinualGatheringPolicy continual_gathering_policy;
+ std::vector<rtc::scoped_refptr<rtc::RTCCertificate>> certificates;
+
+ RTCConfiguration()
+ : type(kAll),
+ enable_localhost_ice_candidate(false),
+ bundle_policy(kBundlePolicyBalanced),
+ rtcp_mux_policy(kRtcpMuxPolicyNegotiate),
+ tcp_candidate_policy(kTcpCandidatePolicyEnabled),
+ audio_jitter_buffer_max_packets(kAudioJitterBufferMaxPackets),
+ audio_jitter_buffer_fast_accelerate(false),
+ ice_connection_receiving_timeout(kUndefined),
+ continual_gathering_policy(GATHER_ONCE) {}
+ };
+
+ struct RTCOfferAnswerOptions {
+ static const int kUndefined = -1;
+ static const int kMaxOfferToReceiveMedia = 1;
+
+ // The default value for constraint offerToReceiveX:true.
+ static const int kOfferToReceiveMediaTrue = 1;
+
+ int offer_to_receive_video;
+ int offer_to_receive_audio;
+ bool voice_activity_detection;
+ bool ice_restart;
+ bool use_rtp_mux;
+
+ RTCOfferAnswerOptions()
+ : offer_to_receive_video(kUndefined),
+ offer_to_receive_audio(kUndefined),
+ voice_activity_detection(true),
+ ice_restart(false),
+ use_rtp_mux(true) {}
+
+ RTCOfferAnswerOptions(int offer_to_receive_video,
+ int offer_to_receive_audio,
+ bool voice_activity_detection,
+ bool ice_restart,
+ bool use_rtp_mux)
+ : offer_to_receive_video(offer_to_receive_video),
+ offer_to_receive_audio(offer_to_receive_audio),
+ voice_activity_detection(voice_activity_detection),
+ ice_restart(ice_restart),
+ use_rtp_mux(use_rtp_mux) {}
+ };
+
+ // Used by GetStats to decide which stats to include in the stats reports.
+ // |kStatsOutputLevelStandard| includes the standard stats for Javascript API;
+ // |kStatsOutputLevelDebug| includes both the standard stats and additional
+ // stats for debugging purposes.
+ enum StatsOutputLevel {
+ kStatsOutputLevelStandard,
+ kStatsOutputLevelDebug,
+ };
+
+ // Accessor methods to active local streams.
+ virtual rtc::scoped_refptr<StreamCollectionInterface>
+ local_streams() = 0;
+
+ // Accessor methods to remote streams.
+ virtual rtc::scoped_refptr<StreamCollectionInterface>
+ remote_streams() = 0;
+
+ // Add a new MediaStream to be sent on this PeerConnection.
+ // Note that a SessionDescription negotiation is needed before the
+ // remote peer can receive the stream.
+ virtual bool AddStream(MediaStreamInterface* stream) = 0;
+
+ // Remove a MediaStream from this PeerConnection.
+ // Note that a SessionDescription negotiation is need before the
+ // remote peer is notified.
+ virtual void RemoveStream(MediaStreamInterface* stream) = 0;
+
+ // Returns pointer to the created DtmfSender on success.
+ // Otherwise returns NULL.
+ virtual rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
+ AudioTrackInterface* track) = 0;
+
+ // TODO(deadbeef): Make these pure virtual once all subclasses implement them.
+ virtual std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
+ const {
+ return std::vector<rtc::scoped_refptr<RtpSenderInterface>>();
+ }
+
+ virtual std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
+ const {
+ return std::vector<rtc::scoped_refptr<RtpReceiverInterface>>();
+ }
+
+ virtual bool GetStats(StatsObserver* observer,
+ MediaStreamTrackInterface* track,
+ StatsOutputLevel level) = 0;
+
+ virtual rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ const std::string& label,
+ const DataChannelInit* config) = 0;
+
+ virtual const SessionDescriptionInterface* local_description() const = 0;
+ virtual const SessionDescriptionInterface* remote_description() const = 0;
+
+ // Create a new offer.
+ // The CreateSessionDescriptionObserver callback will be called when done.
+ virtual void CreateOffer(CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints) {}
+
+ // TODO(jiayl): remove the default impl and the old interface when chromium
+ // code is updated.
+ virtual void CreateOffer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) {}
+
+ // Create an answer to an offer.
+ // The CreateSessionDescriptionObserver callback will be called when done.
+ virtual void CreateAnswer(CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints) = 0;
+ // Sets the local session description.
+ // JsepInterface takes the ownership of |desc| even if it fails.
+ // The |observer| callback will be called when done.
+ virtual void SetLocalDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) = 0;
+ // Sets the remote session description.
+ // JsepInterface takes the ownership of |desc| even if it fails.
+ // The |observer| callback will be called when done.
+ virtual void SetRemoteDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) = 0;
+ // Restarts or updates the ICE Agent process of gathering local candidates
+ // and pinging remote candidates.
+ // TODO(deadbeef): Remove once Chrome is moved over to SetConfiguration.
+ virtual bool UpdateIce(const IceServers& configuration,
+ const MediaConstraintsInterface* constraints) {
+ return false;
+ }
+ // Sets the PeerConnection's global configuration to |config|.
+ // Any changes to STUN/TURN servers or ICE candidate policy will affect the
+ // next gathering phase, and cause the next call to createOffer to generate
+ // new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies
+ // cannot be changed with this method.
+ // TODO(deadbeef): Make this pure virtual once all Chrome subclasses of
+ // PeerConnectionInterface implement it.
+ virtual bool SetConfiguration(
+ const PeerConnectionInterface::RTCConfiguration& config) {
+ return false;
+ }
+ // Provides a remote candidate to the ICE Agent.
+ // A copy of the |candidate| will be created and added to the remote
+ // description. So the caller of this method still has the ownership of the
+ // |candidate|.
+ // TODO(ronghuawu): Consider to change this so that the AddIceCandidate will
+ // take the ownership of the |candidate|.
+ virtual bool AddIceCandidate(const IceCandidateInterface* candidate) = 0;
+
+ virtual void RegisterUMAObserver(UMAObserver* observer) = 0;
+
+ // Returns the current SignalingState.
+ virtual SignalingState signaling_state() = 0;
+
+ // TODO(bemasc): Remove ice_state when callers are changed to
+ // IceConnection/GatheringState.
+ // Returns the current IceState.
+ virtual IceState ice_state() = 0;
+ virtual IceConnectionState ice_connection_state() = 0;
+ virtual IceGatheringState ice_gathering_state() = 0;
+
+ // Terminates all media and closes the transport.
+ virtual void Close() = 0;
+
+ protected:
+ // Dtor protected as objects shouldn't be deleted via this interface.
+ ~PeerConnectionInterface() {}
+};
+
+// PeerConnection callback interface. Application should implement these
+// methods.
+class PeerConnectionObserver {
+ public:
+ enum StateType {
+ kSignalingState,
+ kIceState,
+ };
+
+ // Triggered when the SignalingState changed.
+ virtual void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) {}
+
+ // Triggered when SignalingState or IceState have changed.
+ // TODO(bemasc): Remove once callers transition to OnSignalingChange.
+ virtual void OnStateChange(StateType state_changed) {}
+
+ // Triggered when media is received on a new stream from remote peer.
+ virtual void OnAddStream(MediaStreamInterface* stream) = 0;
+
+ // Triggered when a remote peer close a stream.
+ virtual void OnRemoveStream(MediaStreamInterface* stream) = 0;
+
+ // Triggered when a remote peer open a data channel.
+ virtual void OnDataChannel(DataChannelInterface* data_channel) = 0;
+
+ // Triggered when renegotiation is needed, for example the ICE has restarted.
+ virtual void OnRenegotiationNeeded() = 0;
+
+ // Called any time the IceConnectionState changes
+ virtual void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {}
+
+ // Called any time the IceGatheringState changes
+ virtual void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {}
+
+ // New Ice candidate have been found.
+ virtual void OnIceCandidate(const IceCandidateInterface* candidate) = 0;
+
+ // TODO(bemasc): Remove this once callers transition to OnIceGatheringChange.
+ // All Ice candidates have been found.
+ virtual void OnIceComplete() {}
+
+ // Called when the ICE connection receiving status changes.
+ virtual void OnIceConnectionReceivingChange(bool receiving) {}
+
+ protected:
+ // Dtor protected as objects shouldn't be deleted via this interface.
+ ~PeerConnectionObserver() {}
+};
+
+// Factory class used for creating cricket::PortAllocator that is used
+// for ICE negotiation.
+class PortAllocatorFactoryInterface : public rtc::RefCountInterface {
+ public:
+ struct StunConfiguration {
+ StunConfiguration(const std::string& address, int port)
+ : server(address, port) {}
+ // STUN server address and port.
+ rtc::SocketAddress server;
+ };
+
+ struct TurnConfiguration {
+ TurnConfiguration(const std::string& address,
+ int port,
+ const std::string& username,
+ const std::string& password,
+ const std::string& transport_type,
+ bool secure)
+ : server(address, port),
+ username(username),
+ password(password),
+ transport_type(transport_type),
+ secure(secure) {}
+ rtc::SocketAddress server;
+ std::string username;
+ std::string password;
+ std::string transport_type;
+ bool secure;
+ };
+
+ virtual cricket::PortAllocator* CreatePortAllocator(
+ const std::vector<StunConfiguration>& stun_servers,
+ const std::vector<TurnConfiguration>& turn_configurations) = 0;
+
+ // TODO(phoglund): Make pure virtual when Chrome's factory implements this.
+ // After this method is called, the port allocator should consider loopback
+ // network interfaces as well.
+ virtual void SetNetworkIgnoreMask(int network_ignore_mask) {
+ }
+
+ protected:
+ PortAllocatorFactoryInterface() {}
+ ~PortAllocatorFactoryInterface() {}
+};
+
+// PeerConnectionFactoryInterface is the factory interface use for creating
+// PeerConnection, MediaStream and media tracks.
+// PeerConnectionFactoryInterface will create required libjingle threads,
+// socket and network manager factory classes for networking.
+// If an application decides to provide its own threads and network
+// implementation of these classes it should use the alternate
+// CreatePeerConnectionFactory method which accepts threads as input and use the
+// CreatePeerConnection version that takes a PortAllocatorFactoryInterface as
+// argument.
+class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
+ public:
+ class Options {
+ public:
+ Options() :
+ disable_encryption(false),
+ disable_sctp_data_channels(false),
+ disable_network_monitor(false),
+ network_ignore_mask(rtc::kDefaultNetworkIgnoreMask),
+ ssl_max_version(rtc::SSL_PROTOCOL_DTLS_10) {
+ }
+ bool disable_encryption;
+ bool disable_sctp_data_channels;
+ bool disable_network_monitor;
+
+ // Sets the network types to ignore. For instance, calling this with
+ // ADAPTER_TYPE_ETHERNET | ADAPTER_TYPE_LOOPBACK will ignore Ethernet and
+ // loopback interfaces.
+ int network_ignore_mask;
+
+ // Sets the maximum supported protocol version. The highest version
+ // supported by both ends will be used for the connection, i.e. if one
+ // party supports DTLS 1.0 and the other DTLS 1.2, DTLS 1.0 will be used.
+ rtc::SSLProtocolVersion ssl_max_version;
+ };
+
+ virtual void SetOptions(const Options& options) = 0;
+
+ virtual rtc::scoped_refptr<PeerConnectionInterface>
+ CreatePeerConnection(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ const MediaConstraintsInterface* constraints,
+ PortAllocatorFactoryInterface* allocator_factory,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ PeerConnectionObserver* observer) = 0;
+
+ // TODO(hbos): Remove below version after clients are updated to above method.
+ // In latest W3C WebRTC draft, PC constructor will take RTCConfiguration,
+ // and not IceServers. RTCConfiguration is made up of ice servers and
+ // ice transport type.
+ // http://dev.w3.org/2011/webrtc/editor/webrtc.html
+ inline rtc::scoped_refptr<PeerConnectionInterface>
+ CreatePeerConnection(
+ const PeerConnectionInterface::IceServers& servers,
+ const MediaConstraintsInterface* constraints,
+ PortAllocatorFactoryInterface* allocator_factory,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ PeerConnectionObserver* observer) {
+ PeerConnectionInterface::RTCConfiguration rtc_config;
+ rtc_config.servers = servers;
+ return CreatePeerConnection(rtc_config, constraints, allocator_factory,
+ dtls_identity_store.Pass(), observer);
+ }
+
+ virtual rtc::scoped_refptr<MediaStreamInterface>
+ CreateLocalMediaStream(const std::string& label) = 0;
+
+ // Creates a AudioSourceInterface.
+ // |constraints| decides audio processing settings but can be NULL.
+ virtual rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
+ const MediaConstraintsInterface* constraints) = 0;
+
+ // Creates a VideoSourceInterface. The new source take ownership of
+ // |capturer|. |constraints| decides video resolution and frame rate but can
+ // be NULL.
+ virtual rtc::scoped_refptr<VideoSourceInterface> CreateVideoSource(
+ cricket::VideoCapturer* capturer,
+ const MediaConstraintsInterface* constraints) = 0;
+
+ // Creates a new local VideoTrack. The same |source| can be used in several
+ // tracks.
+ virtual rtc::scoped_refptr<VideoTrackInterface>
+ CreateVideoTrack(const std::string& label,
+ VideoSourceInterface* source) = 0;
+
+ // Creates an new AudioTrack. At the moment |source| can be NULL.
+ virtual rtc::scoped_refptr<AudioTrackInterface>
+ CreateAudioTrack(const std::string& label,
+ AudioSourceInterface* source) = 0;
+
+ // Starts AEC dump using existing file. Takes ownership of |file| and passes
+ // it on to VoiceEngine (via other objects) immediately, which will take
+ // the ownerhip. If the operation fails, the file will be closed.
+ // TODO(grunell): Remove when Chromium has started to use AEC in each source.
+ // http://crbug.com/264611.
+ virtual bool StartAecDump(rtc::PlatformFile file) = 0;
+
+ // Stops logging the AEC dump.
+ virtual void StopAecDump() = 0;
+
+ // Starts RtcEventLog using existing file. Takes ownership of |file| and
+ // passes it on to VoiceEngine, which will take the ownership. If the
+ // operation fails the file will be closed. The logging will stop
+ // automatically after 10 minutes have passed, or when the StopRtcEventLog
+ // function is called.
+ // This function as well as the StopRtcEventLog don't really belong on this
+ // interface, this is a temporary solution until we move the logging object
+ // from inside voice engine to webrtc::Call, which will happen when the VoE
+ // restructuring effort is further along.
+ // TODO(ivoc): Move this into being:
+ // PeerConnection => MediaController => webrtc::Call.
+ virtual bool StartRtcEventLog(rtc::PlatformFile file) = 0;
+
+ // Stops logging the RtcEventLog.
+ virtual void StopRtcEventLog() = 0;
+
+ protected:
+ // Dtor and ctor protected as objects shouldn't be created or deleted via
+ // this interface.
+ PeerConnectionFactoryInterface() {}
+ ~PeerConnectionFactoryInterface() {} // NOLINT
+};
+
+// Create a new instance of PeerConnectionFactoryInterface.
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory();
+
+// Create a new instance of PeerConnectionFactoryInterface.
+// Ownership of |factory|, |default_adm|, and optionally |encoder_factory| and
+// |decoder_factory| transferred to the returned factory.
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory(
+ rtc::Thread* worker_thread,
+ rtc::Thread* signaling_thread,
+ AudioDeviceModule* default_adm,
+ cricket::WebRtcVideoEncoderFactory* encoder_factory,
+ cricket::WebRtcVideoDecoderFactory* decoder_factory);
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_PEERCONNECTIONINTERFACE_H_
diff --git a/talk/app/webrtc/peerconnectioninterface_unittest.cc b/talk/app/webrtc/peerconnectioninterface_unittest.cc
new file mode 100644
index 0000000000..63163fd651
--- /dev/null
+++ b/talk/app/webrtc/peerconnectioninterface_unittest.cc
@@ -0,0 +1,2317 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/audiotrack.h"
+#include "talk/app/webrtc/fakeportallocatorfactory.h"
+#include "talk/app/webrtc/jsepsessiondescription.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/rtpreceiverinterface.h"
+#include "talk/app/webrtc/rtpsenderinterface.h"
+#include "talk/app/webrtc/streamcollection.h"
+#include "talk/app/webrtc/test/fakeconstraints.h"
+#include "talk/app/webrtc/test/fakedtlsidentitystore.h"
+#include "talk/app/webrtc/test/mockpeerconnectionobservers.h"
+#include "talk/app/webrtc/test/testsdpstrings.h"
+#include "talk/app/webrtc/videosource.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "talk/media/base/fakevideocapturer.h"
+#include "talk/media/sctp/sctpdataengine.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/thread.h"
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kStreamLabel2[] = "local_stream_2";
+static const char kStreamLabel3[] = "local_stream_3";
+static const int kDefaultStunPort = 3478;
+static const char kStunAddressOnly[] = "stun:address";
+static const char kStunInvalidPort[] = "stun:address:-1";
+static const char kStunAddressPortAndMore1[] = "stun:address:port:more";
+static const char kStunAddressPortAndMore2[] = "stun:address:port more";
+static const char kTurnIceServerUri[] = "turn:user@turn.example.org";
+static const char kTurnUsername[] = "user";
+static const char kTurnPassword[] = "password";
+static const char kTurnHostname[] = "turn.example.org";
+static const uint32_t kTimeout = 10000U;
+
+static const char kStreams[][8] = {"stream1", "stream2"};
+static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"};
+static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"};
+
+static const char kRecvonly[] = "recvonly";
+static const char kSendrecv[] = "sendrecv";
+
+// Reference SDP with a MediaStream with label "stream1" and audio track with
+// id "audio_1" and a video track with id "video_1;
+static const char kSdpStringWithStream1[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "m=audio 1 RTP/AVPF 103\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=ssrc:1 cname:stream1\r\n"
+ "a=ssrc:1 mslabel:stream1\r\n"
+ "a=ssrc:1 label:audiotrack0\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc:2 cname:stream1\r\n"
+ "a=ssrc:2 mslabel:stream1\r\n"
+ "a=ssrc:2 label:videotrack0\r\n";
+
+// Reference SDP with two MediaStreams with label "stream1" and "stream2. Each
+// MediaStreams have one audio track and one video track.
+// This uses MSID.
+static const char kSdpStringWithStream1And2[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=msid-semantic: WMS stream1 stream2\r\n"
+ "m=audio 1 RTP/AVPF 103\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=ssrc:1 cname:stream1\r\n"
+ "a=ssrc:1 msid:stream1 audiotrack0\r\n"
+ "a=ssrc:3 cname:stream2\r\n"
+ "a=ssrc:3 msid:stream2 audiotrack1\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:120 VP8/0\r\n"
+ "a=ssrc:2 cname:stream1\r\n"
+ "a=ssrc:2 msid:stream1 videotrack0\r\n"
+ "a=ssrc:4 cname:stream2\r\n"
+ "a=ssrc:4 msid:stream2 videotrack1\r\n";
+
+// Reference SDP without MediaStreams. Msid is not supported.
+static const char kSdpStringWithoutStreams[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "m=audio 1 RTP/AVPF 103\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:120 VP8/90000\r\n";
+
+// Reference SDP without MediaStreams. Msid is supported.
+static const char kSdpStringWithMsidWithoutStreams[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=msid-semantic: WMS\r\n"
+ "m=audio 1 RTP/AVPF 103\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:120 VP8/90000\r\n";
+
+// Reference SDP without MediaStreams and audio only.
+static const char kSdpStringWithoutStreamsAudioOnly[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "m=audio 1 RTP/AVPF 103\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n";
+
+// Reference SENDONLY SDP without MediaStreams. Msid is not supported.
+static const char kSdpStringSendOnlyWithoutStreams[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "m=audio 1 RTP/AVPF 103\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=sendonly\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=sendonly\r\n"
+ "a=rtpmap:120 VP8/90000\r\n";
+
+static const char kSdpStringInit[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=msid-semantic: WMS\r\n";
+
+static const char kSdpStringAudio[] =
+ "m=audio 1 RTP/AVPF 103\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n";
+
+static const char kSdpStringVideo[] =
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:120 VP8/90000\r\n";
+
+static const char kSdpStringMs1Audio0[] =
+ "a=ssrc:1 cname:stream1\r\n"
+ "a=ssrc:1 msid:stream1 audiotrack0\r\n";
+
+static const char kSdpStringMs1Video0[] =
+ "a=ssrc:2 cname:stream1\r\n"
+ "a=ssrc:2 msid:stream1 videotrack0\r\n";
+
+static const char kSdpStringMs1Audio1[] =
+ "a=ssrc:3 cname:stream1\r\n"
+ "a=ssrc:3 msid:stream1 audiotrack1\r\n";
+
+static const char kSdpStringMs1Video1[] =
+ "a=ssrc:4 cname:stream1\r\n"
+ "a=ssrc:4 msid:stream1 videotrack1\r\n";
+
+#define MAYBE_SKIP_TEST(feature) \
+ if (!(feature())) { \
+ LOG(LS_INFO) << "Feature disabled... skipping"; \
+ return; \
+ }
+
+using rtc::scoped_ptr;
+using rtc::scoped_refptr;
+using webrtc::AudioSourceInterface;
+using webrtc::AudioTrack;
+using webrtc::AudioTrackInterface;
+using webrtc::DataBuffer;
+using webrtc::DataChannelInterface;
+using webrtc::FakeConstraints;
+using webrtc::FakePortAllocatorFactory;
+using webrtc::IceCandidateInterface;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStream;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::MockCreateSessionDescriptionObserver;
+using webrtc::MockDataChannelObserver;
+using webrtc::MockSetSessionDescriptionObserver;
+using webrtc::MockStatsObserver;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::PortAllocatorFactoryInterface;
+using webrtc::RtpReceiverInterface;
+using webrtc::RtpSenderInterface;
+using webrtc::SdpParseError;
+using webrtc::SessionDescriptionInterface;
+using webrtc::StreamCollection;
+using webrtc::StreamCollectionInterface;
+using webrtc::VideoSourceInterface;
+using webrtc::VideoTrack;
+using webrtc::VideoTrackInterface;
+
+typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
+
+namespace {
+
+// Gets the first ssrc of given content type from the ContentInfo.
+bool GetFirstSsrc(const cricket::ContentInfo* content_info, int* ssrc) {
+ if (!content_info || !ssrc) {
+ return false;
+ }
+ const cricket::MediaContentDescription* media_desc =
+ static_cast<const cricket::MediaContentDescription*>(
+ content_info->description);
+ if (!media_desc || media_desc->streams().empty()) {
+ return false;
+ }
+ *ssrc = media_desc->streams().begin()->first_ssrc();
+ return true;
+}
+
+void SetSsrcToZero(std::string* sdp) {
+ const char kSdpSsrcAtribute[] = "a=ssrc:";
+ const char kSdpSsrcAtributeZero[] = "a=ssrc:0";
+ size_t ssrc_pos = 0;
+ while ((ssrc_pos = sdp->find(kSdpSsrcAtribute, ssrc_pos)) !=
+ std::string::npos) {
+ size_t end_ssrc = sdp->find(" ", ssrc_pos);
+ sdp->replace(ssrc_pos, end_ssrc - ssrc_pos, kSdpSsrcAtributeZero);
+ ssrc_pos = end_ssrc;
+ }
+}
+
+// Check if |streams| contains the specified track.
+bool ContainsTrack(const std::vector<cricket::StreamParams>& streams,
+ const std::string& stream_label,
+ const std::string& track_id) {
+ for (const cricket::StreamParams& params : streams) {
+ if (params.sync_label == stream_label && params.id == track_id) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Check if |senders| contains the specified sender, by id.
+bool ContainsSender(
+ const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& senders,
+ const std::string& id) {
+ for (const auto& sender : senders) {
+ if (sender->id() == id) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Create a collection of streams.
+// CreateStreamCollection(1) creates a collection that
+// correspond to kSdpStringWithStream1.
+// CreateStreamCollection(2) correspond to kSdpStringWithStream1And2.
+rtc::scoped_refptr<StreamCollection> CreateStreamCollection(
+ int number_of_streams) {
+ rtc::scoped_refptr<StreamCollection> local_collection(
+ StreamCollection::Create());
+
+ for (int i = 0; i < number_of_streams; ++i) {
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
+ webrtc::MediaStream::Create(kStreams[i]));
+
+ // Add a local audio track.
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ webrtc::AudioTrack::Create(kAudioTracks[i], nullptr));
+ stream->AddTrack(audio_track);
+
+ // Add a local video track.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ webrtc::VideoTrack::Create(kVideoTracks[i], nullptr));
+ stream->AddTrack(video_track);
+
+ local_collection->AddStream(stream);
+ }
+ return local_collection;
+}
+
+// Check equality of StreamCollections.
+bool CompareStreamCollections(StreamCollectionInterface* s1,
+ StreamCollectionInterface* s2) {
+ if (s1 == nullptr || s2 == nullptr || s1->count() != s2->count()) {
+ return false;
+ }
+
+ for (size_t i = 0; i != s1->count(); ++i) {
+ if (s1->at(i)->label() != s2->at(i)->label()) {
+ return false;
+ }
+ webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks();
+ webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks();
+ webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks();
+ webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks();
+
+ if (audio_tracks1.size() != audio_tracks2.size()) {
+ return false;
+ }
+ for (size_t j = 0; j != audio_tracks1.size(); ++j) {
+ if (audio_tracks1[j]->id() != audio_tracks2[j]->id()) {
+ return false;
+ }
+ }
+ if (video_tracks1.size() != video_tracks2.size()) {
+ return false;
+ }
+ for (size_t j = 0; j != video_tracks1.size(); ++j) {
+ if (video_tracks1[j]->id() != video_tracks2[j]->id()) {
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+class MockPeerConnectionObserver : public PeerConnectionObserver {
+ public:
+ MockPeerConnectionObserver() : remote_streams_(StreamCollection::Create()) {}
+ ~MockPeerConnectionObserver() {
+ }
+ void SetPeerConnectionInterface(PeerConnectionInterface* pc) {
+ pc_ = pc;
+ if (pc) {
+ state_ = pc_->signaling_state();
+ }
+ }
+ virtual void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) {
+ EXPECT_EQ(pc_->signaling_state(), new_state);
+ state_ = new_state;
+ }
+ // TODO(bemasc): Remove this once callers transition to OnIceGatheringChange.
+ virtual void OnStateChange(StateType state_changed) {
+ if (pc_.get() == NULL)
+ return;
+ switch (state_changed) {
+ case kSignalingState:
+ // OnSignalingChange and OnStateChange(kSignalingState) should always
+ // be called approximately simultaneously. To ease testing, we require
+ // that they always be called in that order. This check verifies
+ // that OnSignalingChange has just been called.
+ EXPECT_EQ(pc_->signaling_state(), state_);
+ break;
+ case kIceState:
+ ADD_FAILURE();
+ break;
+ default:
+ ADD_FAILURE();
+ break;
+ }
+ }
+
+ MediaStreamInterface* RemoteStream(const std::string& label) {
+ return remote_streams_->find(label);
+ }
+ StreamCollectionInterface* remote_streams() const { return remote_streams_; }
+ virtual void OnAddStream(MediaStreamInterface* stream) {
+ last_added_stream_ = stream;
+ remote_streams_->AddStream(stream);
+ }
+ virtual void OnRemoveStream(MediaStreamInterface* stream) {
+ last_removed_stream_ = stream;
+ remote_streams_->RemoveStream(stream);
+ }
+ virtual void OnRenegotiationNeeded() {
+ renegotiation_needed_ = true;
+ }
+ virtual void OnDataChannel(DataChannelInterface* data_channel) {
+ last_datachannel_ = data_channel;
+ }
+
+ virtual void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ EXPECT_EQ(pc_->ice_connection_state(), new_state);
+ }
+ virtual void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {
+ EXPECT_EQ(pc_->ice_gathering_state(), new_state);
+ }
+ virtual void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {
+ EXPECT_NE(PeerConnectionInterface::kIceGatheringNew,
+ pc_->ice_gathering_state());
+
+ std::string sdp;
+ EXPECT_TRUE(candidate->ToString(&sdp));
+ EXPECT_LT(0u, sdp.size());
+ last_candidate_.reset(webrtc::CreateIceCandidate(candidate->sdp_mid(),
+ candidate->sdp_mline_index(), sdp, NULL));
+ EXPECT_TRUE(last_candidate_.get() != NULL);
+ }
+ // TODO(bemasc): Remove this once callers transition to OnSignalingChange.
+ virtual void OnIceComplete() {
+ ice_complete_ = true;
+ // OnIceGatheringChange(IceGatheringCompleted) and OnIceComplete() should
+ // be called approximately simultaneously. For ease of testing, this
+ // check additionally requires that they be called in the above order.
+ EXPECT_EQ(PeerConnectionInterface::kIceGatheringComplete,
+ pc_->ice_gathering_state());
+ }
+
+ // Returns the label of the last added stream.
+ // Empty string if no stream have been added.
+ std::string GetLastAddedStreamLabel() {
+ if (last_added_stream_.get())
+ return last_added_stream_->label();
+ return "";
+ }
+ std::string GetLastRemovedStreamLabel() {
+ if (last_removed_stream_.get())
+ return last_removed_stream_->label();
+ return "";
+ }
+
+ scoped_refptr<PeerConnectionInterface> pc_;
+ PeerConnectionInterface::SignalingState state_;
+ scoped_ptr<IceCandidateInterface> last_candidate_;
+ scoped_refptr<DataChannelInterface> last_datachannel_;
+ rtc::scoped_refptr<StreamCollection> remote_streams_;
+ bool renegotiation_needed_ = false;
+ bool ice_complete_ = false;
+
+ private:
+ scoped_refptr<MediaStreamInterface> last_added_stream_;
+ scoped_refptr<MediaStreamInterface> last_removed_stream_;
+};
+
+} // namespace
+
+class PeerConnectionInterfaceTest : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ pc_factory_ = webrtc::CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(), NULL, NULL,
+ NULL);
+ ASSERT_TRUE(pc_factory_.get() != NULL);
+ }
+
+ void CreatePeerConnection() {
+ CreatePeerConnection("", "", NULL);
+ }
+
+ void CreatePeerConnection(webrtc::MediaConstraintsInterface* constraints) {
+ CreatePeerConnection("", "", constraints);
+ }
+
+ void CreatePeerConnection(const std::string& uri,
+ const std::string& password,
+ webrtc::MediaConstraintsInterface* constraints) {
+ PeerConnectionInterface::IceServer server;
+ PeerConnectionInterface::IceServers servers;
+ if (!uri.empty()) {
+ server.uri = uri;
+ server.password = password;
+ servers.push_back(server);
+ }
+
+ port_allocator_factory_ = FakePortAllocatorFactory::Create();
+
+ // DTLS does not work in a loopback call, so is disabled for most of the
+ // tests in this file. We only create a FakeIdentityService if the test
+ // explicitly sets the constraint.
+ FakeConstraints default_constraints;
+ if (!constraints) {
+ constraints = &default_constraints;
+
+ default_constraints.AddMandatory(
+ webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, false);
+ }
+
+ scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store;
+ bool dtls;
+ if (FindConstraint(constraints,
+ webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ &dtls,
+ nullptr) && dtls) {
+ dtls_identity_store.reset(new FakeDtlsIdentityStore());
+ }
+ pc_ = pc_factory_->CreatePeerConnection(servers, constraints,
+ port_allocator_factory_.get(),
+ dtls_identity_store.Pass(),
+ &observer_);
+ ASSERT_TRUE(pc_.get() != NULL);
+ observer_.SetPeerConnectionInterface(pc_.get());
+ EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+ }
+
+ void CreatePeerConnectionExpectFail(const std::string& uri) {
+ PeerConnectionInterface::IceServer server;
+ PeerConnectionInterface::IceServers servers;
+ server.uri = uri;
+ servers.push_back(server);
+
+ scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store;
+ port_allocator_factory_ = FakePortAllocatorFactory::Create();
+ scoped_refptr<PeerConnectionInterface> pc;
+ pc = pc_factory_->CreatePeerConnection(
+ servers, nullptr, port_allocator_factory_.get(),
+ dtls_identity_store.Pass(), &observer_);
+ ASSERT_EQ(nullptr, pc);
+ }
+
+ void CreatePeerConnectionWithDifferentConfigurations() {
+ CreatePeerConnection(kStunAddressOnly, "", NULL);
+ EXPECT_EQ(1u, port_allocator_factory_->stun_configs().size());
+ EXPECT_EQ(0u, port_allocator_factory_->turn_configs().size());
+ EXPECT_EQ("address",
+ port_allocator_factory_->stun_configs()[0].server.hostname());
+ EXPECT_EQ(kDefaultStunPort,
+ port_allocator_factory_->stun_configs()[0].server.port());
+
+ CreatePeerConnectionExpectFail(kStunInvalidPort);
+ CreatePeerConnectionExpectFail(kStunAddressPortAndMore1);
+ CreatePeerConnectionExpectFail(kStunAddressPortAndMore2);
+
+ CreatePeerConnection(kTurnIceServerUri, kTurnPassword, NULL);
+ EXPECT_EQ(0u, port_allocator_factory_->stun_configs().size());
+ EXPECT_EQ(1u, port_allocator_factory_->turn_configs().size());
+ EXPECT_EQ(kTurnUsername,
+ port_allocator_factory_->turn_configs()[0].username);
+ EXPECT_EQ(kTurnPassword,
+ port_allocator_factory_->turn_configs()[0].password);
+ EXPECT_EQ(kTurnHostname,
+ port_allocator_factory_->turn_configs()[0].server.hostname());
+ }
+
+ void ReleasePeerConnection() {
+ pc_ = NULL;
+ observer_.SetPeerConnectionInterface(NULL);
+ }
+
+ void AddVideoStream(const std::string& label) {
+ // Create a local stream.
+ scoped_refptr<MediaStreamInterface> stream(
+ pc_factory_->CreateLocalMediaStream(label));
+ scoped_refptr<VideoSourceInterface> video_source(
+ pc_factory_->CreateVideoSource(new cricket::FakeVideoCapturer(), NULL));
+ scoped_refptr<VideoTrackInterface> video_track(
+ pc_factory_->CreateVideoTrack(label + "v0", video_source));
+ stream->AddTrack(video_track.get());
+ EXPECT_TRUE(pc_->AddStream(stream));
+ EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+ observer_.renegotiation_needed_ = false;
+ }
+
+ void AddVoiceStream(const std::string& label) {
+ // Create a local stream.
+ scoped_refptr<MediaStreamInterface> stream(
+ pc_factory_->CreateLocalMediaStream(label));
+ scoped_refptr<AudioTrackInterface> audio_track(
+ pc_factory_->CreateAudioTrack(label + "a0", NULL));
+ stream->AddTrack(audio_track.get());
+ EXPECT_TRUE(pc_->AddStream(stream));
+ EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+ observer_.renegotiation_needed_ = false;
+ }
+
+ void AddAudioVideoStream(const std::string& stream_label,
+ const std::string& audio_track_label,
+ const std::string& video_track_label) {
+ // Create a local stream.
+ scoped_refptr<MediaStreamInterface> stream(
+ pc_factory_->CreateLocalMediaStream(stream_label));
+ scoped_refptr<AudioTrackInterface> audio_track(
+ pc_factory_->CreateAudioTrack(
+ audio_track_label, static_cast<AudioSourceInterface*>(NULL)));
+ stream->AddTrack(audio_track.get());
+ scoped_refptr<VideoTrackInterface> video_track(
+ pc_factory_->CreateVideoTrack(video_track_label, NULL));
+ stream->AddTrack(video_track.get());
+ EXPECT_TRUE(pc_->AddStream(stream));
+ EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+ observer_.renegotiation_needed_ = false;
+ }
+
+ bool DoCreateOfferAnswer(SessionDescriptionInterface** desc,
+ bool offer,
+ MediaConstraintsInterface* constraints) {
+ rtc::scoped_refptr<MockCreateSessionDescriptionObserver>
+ observer(new rtc::RefCountedObject<
+ MockCreateSessionDescriptionObserver>());
+ if (offer) {
+ pc_->CreateOffer(observer, constraints);
+ } else {
+ pc_->CreateAnswer(observer, constraints);
+ }
+ EXPECT_EQ_WAIT(true, observer->called(), kTimeout);
+ *desc = observer->release_desc();
+ return observer->result();
+ }
+
+ bool DoCreateOffer(SessionDescriptionInterface** desc,
+ MediaConstraintsInterface* constraints) {
+ return DoCreateOfferAnswer(desc, true, constraints);
+ }
+
+ bool DoCreateAnswer(SessionDescriptionInterface** desc,
+ MediaConstraintsInterface* constraints) {
+ return DoCreateOfferAnswer(desc, false, constraints);
+ }
+
+ bool DoSetSessionDescription(SessionDescriptionInterface* desc, bool local) {
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+ observer(new rtc::RefCountedObject<
+ MockSetSessionDescriptionObserver>());
+ if (local) {
+ pc_->SetLocalDescription(observer, desc);
+ } else {
+ pc_->SetRemoteDescription(observer, desc);
+ }
+ EXPECT_EQ_WAIT(true, observer->called(), kTimeout);
+ return observer->result();
+ }
+
+ bool DoSetLocalDescription(SessionDescriptionInterface* desc) {
+ return DoSetSessionDescription(desc, true);
+ }
+
+ bool DoSetRemoteDescription(SessionDescriptionInterface* desc) {
+ return DoSetSessionDescription(desc, false);
+ }
+
+ // Calls PeerConnection::GetStats and check the return value.
+ // It does not verify the values in the StatReports since a RTCP packet might
+ // be required.
+ bool DoGetStats(MediaStreamTrackInterface* track) {
+ rtc::scoped_refptr<MockStatsObserver> observer(
+ new rtc::RefCountedObject<MockStatsObserver>());
+ if (!pc_->GetStats(
+ observer, track, PeerConnectionInterface::kStatsOutputLevelStandard))
+ return false;
+ EXPECT_TRUE_WAIT(observer->called(), kTimeout);
+ return observer->called();
+ }
+
+ void InitiateCall() {
+ CreatePeerConnection();
+ // Create a local stream with audio&video tracks.
+ AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+ CreateOfferReceiveAnswer();
+ }
+
+ // Verify that RTP Header extensions has been negotiated for audio and video.
+ void VerifyRemoteRtpHeaderExtensions() {
+ const cricket::MediaContentDescription* desc =
+ cricket::GetFirstAudioContentDescription(
+ pc_->remote_description()->description());
+ ASSERT_TRUE(desc != NULL);
+ EXPECT_GT(desc->rtp_header_extensions().size(), 0u);
+
+ desc = cricket::GetFirstVideoContentDescription(
+ pc_->remote_description()->description());
+ ASSERT_TRUE(desc != NULL);
+ EXPECT_GT(desc->rtp_header_extensions().size(), 0u);
+ }
+
+ void CreateOfferAsRemoteDescription() {
+ rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+ std::string sdp;
+ EXPECT_TRUE(offer->ToString(&sdp));
+ SessionDescriptionInterface* remote_offer =
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+ sdp, NULL);
+ EXPECT_TRUE(DoSetRemoteDescription(remote_offer));
+ EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_);
+ }
+
+ void CreateAndSetRemoteOffer(const std::string& sdp) {
+ SessionDescriptionInterface* remote_offer =
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+ sdp, nullptr);
+ EXPECT_TRUE(DoSetRemoteDescription(remote_offer));
+ EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_);
+ }
+
+ void CreateAnswerAsLocalDescription() {
+ scoped_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(answer.use(), nullptr));
+
+ // TODO(perkj): Currently SetLocalDescription fails if any parameters in an
+ // audio codec change, even if the parameter has nothing to do with
+ // receiving. Not all parameters are serialized to SDP.
+ // Since CreatePrAnswerAsLocalDescription serialize/deserialize
+ // the SessionDescription, it is necessary to do that here to in order to
+ // get ReceiveOfferCreatePrAnswerAndAnswer and RenegotiateAudioOnly to pass.
+ // https://code.google.com/p/webrtc/issues/detail?id=1356
+ std::string sdp;
+ EXPECT_TRUE(answer->ToString(&sdp));
+ SessionDescriptionInterface* new_answer =
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kAnswer,
+ sdp, NULL);
+ EXPECT_TRUE(DoSetLocalDescription(new_answer));
+ EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+ }
+
+ void CreatePrAnswerAsLocalDescription() {
+ scoped_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(answer.use(), nullptr));
+
+ std::string sdp;
+ EXPECT_TRUE(answer->ToString(&sdp));
+ SessionDescriptionInterface* pr_answer =
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kPrAnswer,
+ sdp, NULL);
+ EXPECT_TRUE(DoSetLocalDescription(pr_answer));
+ EXPECT_EQ(PeerConnectionInterface::kHaveLocalPrAnswer, observer_.state_);
+ }
+
+ void CreateOfferReceiveAnswer() {
+ CreateOfferAsLocalDescription();
+ std::string sdp;
+ EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+ CreateAnswerAsRemoteDescription(sdp);
+ }
+
+ void CreateOfferAsLocalDescription() {
+ rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+ // TODO(perkj): Currently SetLocalDescription fails if any parameters in an
+ // audio codec change, even if the parameter has nothing to do with
+ // receiving. Not all parameters are serialized to SDP.
+ // Since CreatePrAnswerAsLocalDescription serialize/deserialize
+ // the SessionDescription, it is necessary to do that here to in order to
+ // get ReceiveOfferCreatePrAnswerAndAnswer and RenegotiateAudioOnly to pass.
+ // https://code.google.com/p/webrtc/issues/detail?id=1356
+ std::string sdp;
+ EXPECT_TRUE(offer->ToString(&sdp));
+ SessionDescriptionInterface* new_offer =
+ webrtc::CreateSessionDescription(
+ SessionDescriptionInterface::kOffer,
+ sdp, NULL);
+
+ EXPECT_TRUE(DoSetLocalDescription(new_offer));
+ EXPECT_EQ(PeerConnectionInterface::kHaveLocalOffer, observer_.state_);
+ // Wait for the ice_complete message, so that SDP will have candidates.
+ EXPECT_TRUE_WAIT(observer_.ice_complete_, kTimeout);
+ }
+
+ void CreateAnswerAsRemoteDescription(const std::string& sdp) {
+ webrtc::JsepSessionDescription* answer = new webrtc::JsepSessionDescription(
+ SessionDescriptionInterface::kAnswer);
+ EXPECT_TRUE(answer->Initialize(sdp, NULL));
+ EXPECT_TRUE(DoSetRemoteDescription(answer));
+ EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+ }
+
+ void CreatePrAnswerAndAnswerAsRemoteDescription(const std::string& sdp) {
+ webrtc::JsepSessionDescription* pr_answer =
+ new webrtc::JsepSessionDescription(
+ SessionDescriptionInterface::kPrAnswer);
+ EXPECT_TRUE(pr_answer->Initialize(sdp, NULL));
+ EXPECT_TRUE(DoSetRemoteDescription(pr_answer));
+ EXPECT_EQ(PeerConnectionInterface::kHaveRemotePrAnswer, observer_.state_);
+ webrtc::JsepSessionDescription* answer =
+ new webrtc::JsepSessionDescription(
+ SessionDescriptionInterface::kAnswer);
+ EXPECT_TRUE(answer->Initialize(sdp, NULL));
+ EXPECT_TRUE(DoSetRemoteDescription(answer));
+ EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+ }
+
+ // Help function used for waiting until a the last signaled remote stream has
+ // the same label as |stream_label|. In a few of the tests in this file we
+ // answer with the same session description as we offer and thus we can
+ // check if OnAddStream have been called with the same stream as we offer to
+ // send.
+ void WaitAndVerifyOnAddStream(const std::string& stream_label) {
+ EXPECT_EQ_WAIT(stream_label, observer_.GetLastAddedStreamLabel(), kTimeout);
+ }
+
+ // Creates an offer and applies it as a local session description.
+ // Creates an answer with the same SDP an the offer but removes all lines
+ // that start with a:ssrc"
+ void CreateOfferReceiveAnswerWithoutSsrc() {
+ CreateOfferAsLocalDescription();
+ std::string sdp;
+ EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+ SetSsrcToZero(&sdp);
+ CreateAnswerAsRemoteDescription(sdp);
+ }
+
+ // This function creates a MediaStream with label kStreams[0] and
+ // |number_of_audio_tracks| and |number_of_video_tracks| tracks and the
+ // corresponding SessionDescriptionInterface. The SessionDescriptionInterface
+ // is returned in |desc| and the MediaStream is stored in
+ // |reference_collection_|
+ void CreateSessionDescriptionAndReference(
+ size_t number_of_audio_tracks,
+ size_t number_of_video_tracks,
+ SessionDescriptionInterface** desc) {
+ ASSERT_TRUE(desc != nullptr);
+ ASSERT_LE(number_of_audio_tracks, 2u);
+ ASSERT_LE(number_of_video_tracks, 2u);
+
+ reference_collection_ = StreamCollection::Create();
+ std::string sdp_ms1 = std::string(kSdpStringInit);
+
+ std::string mediastream_label = kStreams[0];
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
+ webrtc::MediaStream::Create(mediastream_label));
+ reference_collection_->AddStream(stream);
+
+ if (number_of_audio_tracks > 0) {
+ sdp_ms1 += std::string(kSdpStringAudio);
+ sdp_ms1 += std::string(kSdpStringMs1Audio0);
+ AddAudioTrack(kAudioTracks[0], stream);
+ }
+ if (number_of_audio_tracks > 1) {
+ sdp_ms1 += kSdpStringMs1Audio1;
+ AddAudioTrack(kAudioTracks[1], stream);
+ }
+
+ if (number_of_video_tracks > 0) {
+ sdp_ms1 += std::string(kSdpStringVideo);
+ sdp_ms1 += std::string(kSdpStringMs1Video0);
+ AddVideoTrack(kVideoTracks[0], stream);
+ }
+ if (number_of_video_tracks > 1) {
+ sdp_ms1 += kSdpStringMs1Video1;
+ AddVideoTrack(kVideoTracks[1], stream);
+ }
+
+ *desc = webrtc::CreateSessionDescription(
+ SessionDescriptionInterface::kOffer, sdp_ms1, nullptr);
+ }
+
+ void AddAudioTrack(const std::string& track_id,
+ MediaStreamInterface* stream) {
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ webrtc::AudioTrack::Create(track_id, nullptr));
+ ASSERT_TRUE(stream->AddTrack(audio_track));
+ }
+
+ void AddVideoTrack(const std::string& track_id,
+ MediaStreamInterface* stream) {
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ webrtc::VideoTrack::Create(track_id, nullptr));
+ ASSERT_TRUE(stream->AddTrack(video_track));
+ }
+
+ scoped_refptr<FakePortAllocatorFactory> port_allocator_factory_;
+ scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory_;
+ scoped_refptr<PeerConnectionInterface> pc_;
+ MockPeerConnectionObserver observer_;
+ rtc::scoped_refptr<StreamCollection> reference_collection_;
+};
+
+TEST_F(PeerConnectionInterfaceTest,
+ CreatePeerConnectionWithDifferentConfigurations) {
+ CreatePeerConnectionWithDifferentConfigurations();
+}
+
+TEST_F(PeerConnectionInterfaceTest, AddStreams) {
+ CreatePeerConnection();
+ AddVideoStream(kStreamLabel1);
+ AddVoiceStream(kStreamLabel2);
+ ASSERT_EQ(2u, pc_->local_streams()->count());
+
+ // Test we can add multiple local streams to one peerconnection.
+ scoped_refptr<MediaStreamInterface> stream(
+ pc_factory_->CreateLocalMediaStream(kStreamLabel3));
+ scoped_refptr<AudioTrackInterface> audio_track(
+ pc_factory_->CreateAudioTrack(
+ kStreamLabel3, static_cast<AudioSourceInterface*>(NULL)));
+ stream->AddTrack(audio_track.get());
+ EXPECT_TRUE(pc_->AddStream(stream));
+ EXPECT_EQ(3u, pc_->local_streams()->count());
+
+ // Remove the third stream.
+ pc_->RemoveStream(pc_->local_streams()->at(2));
+ EXPECT_EQ(2u, pc_->local_streams()->count());
+
+ // Remove the second stream.
+ pc_->RemoveStream(pc_->local_streams()->at(1));
+ EXPECT_EQ(1u, pc_->local_streams()->count());
+
+ // Remove the first stream.
+ pc_->RemoveStream(pc_->local_streams()->at(0));
+ EXPECT_EQ(0u, pc_->local_streams()->count());
+}
+
+// Test that the created offer includes streams we added.
+TEST_F(PeerConnectionInterfaceTest, AddedStreamsPresentInOffer) {
+ CreatePeerConnection();
+ AddAudioVideoStream(kStreamLabel1, "audio_track", "video_track");
+ scoped_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(offer.accept(), nullptr));
+
+ const cricket::ContentInfo* audio_content =
+ cricket::GetFirstAudioContent(offer->description());
+ const cricket::AudioContentDescription* audio_desc =
+ static_cast<const cricket::AudioContentDescription*>(
+ audio_content->description);
+ EXPECT_TRUE(
+ ContainsTrack(audio_desc->streams(), kStreamLabel1, "audio_track"));
+
+ const cricket::ContentInfo* video_content =
+ cricket::GetFirstVideoContent(offer->description());
+ const cricket::VideoContentDescription* video_desc =
+ static_cast<const cricket::VideoContentDescription*>(
+ video_content->description);
+ EXPECT_TRUE(
+ ContainsTrack(video_desc->streams(), kStreamLabel1, "video_track"));
+
+ // Add another stream and ensure the offer includes both the old and new
+ // streams.
+ AddAudioVideoStream(kStreamLabel2, "audio_track2", "video_track2");
+ ASSERT_TRUE(DoCreateOffer(offer.accept(), nullptr));
+
+ audio_content = cricket::GetFirstAudioContent(offer->description());
+ audio_desc = static_cast<const cricket::AudioContentDescription*>(
+ audio_content->description);
+ EXPECT_TRUE(
+ ContainsTrack(audio_desc->streams(), kStreamLabel1, "audio_track"));
+ EXPECT_TRUE(
+ ContainsTrack(audio_desc->streams(), kStreamLabel2, "audio_track2"));
+
+ video_content = cricket::GetFirstVideoContent(offer->description());
+ video_desc = static_cast<const cricket::VideoContentDescription*>(
+ video_content->description);
+ EXPECT_TRUE(
+ ContainsTrack(video_desc->streams(), kStreamLabel1, "video_track"));
+ EXPECT_TRUE(
+ ContainsTrack(video_desc->streams(), kStreamLabel2, "video_track2"));
+}
+
+TEST_F(PeerConnectionInterfaceTest, RemoveStream) {
+ CreatePeerConnection();
+ AddVideoStream(kStreamLabel1);
+ ASSERT_EQ(1u, pc_->local_streams()->count());
+ pc_->RemoveStream(pc_->local_streams()->at(0));
+ EXPECT_EQ(0u, pc_->local_streams()->count());
+}
+
+TEST_F(PeerConnectionInterfaceTest, CreateOfferReceiveAnswer) {
+ InitiateCall();
+ WaitAndVerifyOnAddStream(kStreamLabel1);
+ VerifyRemoteRtpHeaderExtensions();
+}
+
+TEST_F(PeerConnectionInterfaceTest, CreateOfferReceivePrAnswerAndAnswer) {
+ CreatePeerConnection();
+ AddVideoStream(kStreamLabel1);
+ CreateOfferAsLocalDescription();
+ std::string offer;
+ EXPECT_TRUE(pc_->local_description()->ToString(&offer));
+ CreatePrAnswerAndAnswerAsRemoteDescription(offer);
+ WaitAndVerifyOnAddStream(kStreamLabel1);
+}
+
+TEST_F(PeerConnectionInterfaceTest, ReceiveOfferCreateAnswer) {
+ CreatePeerConnection();
+ AddVideoStream(kStreamLabel1);
+
+ CreateOfferAsRemoteDescription();
+ CreateAnswerAsLocalDescription();
+
+ WaitAndVerifyOnAddStream(kStreamLabel1);
+}
+
+TEST_F(PeerConnectionInterfaceTest, ReceiveOfferCreatePrAnswerAndAnswer) {
+ CreatePeerConnection();
+ AddVideoStream(kStreamLabel1);
+
+ CreateOfferAsRemoteDescription();
+ CreatePrAnswerAsLocalDescription();
+ CreateAnswerAsLocalDescription();
+
+ WaitAndVerifyOnAddStream(kStreamLabel1);
+}
+
+TEST_F(PeerConnectionInterfaceTest, Renegotiate) {
+ InitiateCall();
+ ASSERT_EQ(1u, pc_->remote_streams()->count());
+ pc_->RemoveStream(pc_->local_streams()->at(0));
+ CreateOfferReceiveAnswer();
+ EXPECT_EQ(0u, pc_->remote_streams()->count());
+ AddVideoStream(kStreamLabel1);
+ CreateOfferReceiveAnswer();
+}
+
+// Tests that after negotiating an audio only call, the respondent can perform a
+// renegotiation that removes the audio stream.
+TEST_F(PeerConnectionInterfaceTest, RenegotiateAudioOnly) {
+ CreatePeerConnection();
+ AddVoiceStream(kStreamLabel1);
+ CreateOfferAsRemoteDescription();
+ CreateAnswerAsLocalDescription();
+
+ ASSERT_EQ(1u, pc_->remote_streams()->count());
+ pc_->RemoveStream(pc_->local_streams()->at(0));
+ CreateOfferReceiveAnswer();
+ EXPECT_EQ(0u, pc_->remote_streams()->count());
+}
+
+// Test that candidates are generated and that we can parse our own candidates.
+TEST_F(PeerConnectionInterfaceTest, IceCandidates) {
+ CreatePeerConnection();
+
+ EXPECT_FALSE(pc_->AddIceCandidate(observer_.last_candidate_.get()));
+ // SetRemoteDescription takes ownership of offer.
+ SessionDescriptionInterface* offer = NULL;
+ AddVideoStream(kStreamLabel1);
+ EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetRemoteDescription(offer));
+
+ // SetLocalDescription takes ownership of answer.
+ SessionDescriptionInterface* answer = NULL;
+ EXPECT_TRUE(DoCreateAnswer(&answer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(answer));
+
+ EXPECT_TRUE_WAIT(observer_.last_candidate_.get() != NULL, kTimeout);
+ EXPECT_TRUE_WAIT(observer_.ice_complete_, kTimeout);
+
+ EXPECT_TRUE(pc_->AddIceCandidate(observer_.last_candidate_.get()));
+}
+
+// Test that CreateOffer and CreateAnswer will fail if the track labels are
+// not unique.
+TEST_F(PeerConnectionInterfaceTest, CreateOfferAnswerWithInvalidStream) {
+ CreatePeerConnection();
+ // Create a regular offer for the CreateAnswer test later.
+ SessionDescriptionInterface* offer = NULL;
+ EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(offer != NULL);
+ delete offer;
+ offer = NULL;
+
+ // Create a local stream with audio&video tracks having same label.
+ AddAudioVideoStream(kStreamLabel1, "track_label", "track_label");
+
+ // Test CreateOffer
+ EXPECT_FALSE(DoCreateOffer(&offer, nullptr));
+
+ // Test CreateAnswer
+ SessionDescriptionInterface* answer = NULL;
+ EXPECT_FALSE(DoCreateAnswer(&answer, nullptr));
+}
+
+// Test that we will get different SSRCs for each tracks in the offer and answer
+// we created.
+TEST_F(PeerConnectionInterfaceTest, SsrcInOfferAnswer) {
+ CreatePeerConnection();
+ // Create a local stream with audio&video tracks having different labels.
+ AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+
+ // Test CreateOffer
+ scoped_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+ int audio_ssrc = 0;
+ int video_ssrc = 0;
+ EXPECT_TRUE(GetFirstSsrc(GetFirstAudioContent(offer->description()),
+ &audio_ssrc));
+ EXPECT_TRUE(GetFirstSsrc(GetFirstVideoContent(offer->description()),
+ &video_ssrc));
+ EXPECT_NE(audio_ssrc, video_ssrc);
+
+ // Test CreateAnswer
+ EXPECT_TRUE(DoSetRemoteDescription(offer.release()));
+ scoped_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(answer.use(), nullptr));
+ audio_ssrc = 0;
+ video_ssrc = 0;
+ EXPECT_TRUE(GetFirstSsrc(GetFirstAudioContent(answer->description()),
+ &audio_ssrc));
+ EXPECT_TRUE(GetFirstSsrc(GetFirstVideoContent(answer->description()),
+ &video_ssrc));
+ EXPECT_NE(audio_ssrc, video_ssrc);
+}
+
+// Test that we can specify a certain track that we want statistics about.
+TEST_F(PeerConnectionInterfaceTest, GetStatsForSpecificTrack) {
+ InitiateCall();
+ ASSERT_LT(0u, pc_->remote_streams()->count());
+ ASSERT_LT(0u, pc_->remote_streams()->at(0)->GetAudioTracks().size());
+ scoped_refptr<MediaStreamTrackInterface> remote_audio =
+ pc_->remote_streams()->at(0)->GetAudioTracks()[0];
+ EXPECT_TRUE(DoGetStats(remote_audio));
+
+ // Remove the stream. Since we are sending to our selves the local
+ // and the remote stream is the same.
+ pc_->RemoveStream(pc_->local_streams()->at(0));
+ // Do a re-negotiation.
+ CreateOfferReceiveAnswer();
+
+ ASSERT_EQ(0u, pc_->remote_streams()->count());
+
+ // Test that we still can get statistics for the old track. Even if it is not
+ // sent any longer.
+ EXPECT_TRUE(DoGetStats(remote_audio));
+}
+
+// Test that we can get stats on a video track.
+TEST_F(PeerConnectionInterfaceTest, GetStatsForVideoTrack) {
+ InitiateCall();
+ ASSERT_LT(0u, pc_->remote_streams()->count());
+ ASSERT_LT(0u, pc_->remote_streams()->at(0)->GetVideoTracks().size());
+ scoped_refptr<MediaStreamTrackInterface> remote_video =
+ pc_->remote_streams()->at(0)->GetVideoTracks()[0];
+ EXPECT_TRUE(DoGetStats(remote_video));
+}
+
+// Test that we don't get statistics for an invalid track.
+// TODO(tommi): Fix this test. DoGetStats will return true
+// for the unknown track (since GetStats is async), but no
+// data is returned for the track.
+TEST_F(PeerConnectionInterfaceTest, DISABLED_GetStatsForInvalidTrack) {
+ InitiateCall();
+ scoped_refptr<AudioTrackInterface> unknown_audio_track(
+ pc_factory_->CreateAudioTrack("unknown track", NULL));
+ EXPECT_FALSE(DoGetStats(unknown_audio_track));
+}
+
+// This test setup two RTP data channels in loop back.
+TEST_F(PeerConnectionInterfaceTest, TestDataChannel) {
+ FakeConstraints constraints;
+ constraints.SetAllowRtpDataChannels();
+ CreatePeerConnection(&constraints);
+ scoped_refptr<DataChannelInterface> data1 =
+ pc_->CreateDataChannel("test1", NULL);
+ scoped_refptr<DataChannelInterface> data2 =
+ pc_->CreateDataChannel("test2", NULL);
+ ASSERT_TRUE(data1 != NULL);
+ rtc::scoped_ptr<MockDataChannelObserver> observer1(
+ new MockDataChannelObserver(data1));
+ rtc::scoped_ptr<MockDataChannelObserver> observer2(
+ new MockDataChannelObserver(data2));
+
+ EXPECT_EQ(DataChannelInterface::kConnecting, data1->state());
+ EXPECT_EQ(DataChannelInterface::kConnecting, data2->state());
+ std::string data_to_send1 = "testing testing";
+ std::string data_to_send2 = "testing something else";
+ EXPECT_FALSE(data1->Send(DataBuffer(data_to_send1)));
+
+ CreateOfferReceiveAnswer();
+ EXPECT_TRUE_WAIT(observer1->IsOpen(), kTimeout);
+ EXPECT_TRUE_WAIT(observer2->IsOpen(), kTimeout);
+
+ EXPECT_EQ(DataChannelInterface::kOpen, data1->state());
+ EXPECT_EQ(DataChannelInterface::kOpen, data2->state());
+ EXPECT_TRUE(data1->Send(DataBuffer(data_to_send1)));
+ EXPECT_TRUE(data2->Send(DataBuffer(data_to_send2)));
+
+ EXPECT_EQ_WAIT(data_to_send1, observer1->last_message(), kTimeout);
+ EXPECT_EQ_WAIT(data_to_send2, observer2->last_message(), kTimeout);
+
+ data1->Close();
+ EXPECT_EQ(DataChannelInterface::kClosing, data1->state());
+ CreateOfferReceiveAnswer();
+ EXPECT_FALSE(observer1->IsOpen());
+ EXPECT_EQ(DataChannelInterface::kClosed, data1->state());
+ EXPECT_TRUE(observer2->IsOpen());
+
+ data_to_send2 = "testing something else again";
+ EXPECT_TRUE(data2->Send(DataBuffer(data_to_send2)));
+
+ EXPECT_EQ_WAIT(data_to_send2, observer2->last_message(), kTimeout);
+}
+
+// This test verifies that sendnig binary data over RTP data channels should
+// fail.
+TEST_F(PeerConnectionInterfaceTest, TestSendBinaryOnRtpDataChannel) {
+ FakeConstraints constraints;
+ constraints.SetAllowRtpDataChannels();
+ CreatePeerConnection(&constraints);
+ scoped_refptr<DataChannelInterface> data1 =
+ pc_->CreateDataChannel("test1", NULL);
+ scoped_refptr<DataChannelInterface> data2 =
+ pc_->CreateDataChannel("test2", NULL);
+ ASSERT_TRUE(data1 != NULL);
+ rtc::scoped_ptr<MockDataChannelObserver> observer1(
+ new MockDataChannelObserver(data1));
+ rtc::scoped_ptr<MockDataChannelObserver> observer2(
+ new MockDataChannelObserver(data2));
+
+ EXPECT_EQ(DataChannelInterface::kConnecting, data1->state());
+ EXPECT_EQ(DataChannelInterface::kConnecting, data2->state());
+
+ CreateOfferReceiveAnswer();
+ EXPECT_TRUE_WAIT(observer1->IsOpen(), kTimeout);
+ EXPECT_TRUE_WAIT(observer2->IsOpen(), kTimeout);
+
+ EXPECT_EQ(DataChannelInterface::kOpen, data1->state());
+ EXPECT_EQ(DataChannelInterface::kOpen, data2->state());
+
+ rtc::Buffer buffer("test", 4);
+ EXPECT_FALSE(data1->Send(DataBuffer(buffer, true)));
+}
+
+// This test setup a RTP data channels in loop back and test that a channel is
+// opened even if the remote end answer with a zero SSRC.
+TEST_F(PeerConnectionInterfaceTest, TestSendOnlyDataChannel) {
+ FakeConstraints constraints;
+ constraints.SetAllowRtpDataChannels();
+ CreatePeerConnection(&constraints);
+ scoped_refptr<DataChannelInterface> data1 =
+ pc_->CreateDataChannel("test1", NULL);
+ rtc::scoped_ptr<MockDataChannelObserver> observer1(
+ new MockDataChannelObserver(data1));
+
+ CreateOfferReceiveAnswerWithoutSsrc();
+
+ EXPECT_TRUE_WAIT(observer1->IsOpen(), kTimeout);
+
+ data1->Close();
+ EXPECT_EQ(DataChannelInterface::kClosing, data1->state());
+ CreateOfferReceiveAnswerWithoutSsrc();
+ EXPECT_EQ(DataChannelInterface::kClosed, data1->state());
+ EXPECT_FALSE(observer1->IsOpen());
+}
+
+// This test that if a data channel is added in an answer a receive only channel
+// channel is created.
+TEST_F(PeerConnectionInterfaceTest, TestReceiveOnlyDataChannel) {
+ FakeConstraints constraints;
+ constraints.SetAllowRtpDataChannels();
+ CreatePeerConnection(&constraints);
+
+ std::string offer_label = "offer_channel";
+ scoped_refptr<DataChannelInterface> offer_channel =
+ pc_->CreateDataChannel(offer_label, NULL);
+
+ CreateOfferAsLocalDescription();
+
+ // Replace the data channel label in the offer and apply it as an answer.
+ std::string receive_label = "answer_channel";
+ std::string sdp;
+ EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+ rtc::replace_substrs(offer_label.c_str(), offer_label.length(),
+ receive_label.c_str(), receive_label.length(),
+ &sdp);
+ CreateAnswerAsRemoteDescription(sdp);
+
+ // Verify that a new incoming data channel has been created and that
+ // it is open but can't we written to.
+ ASSERT_TRUE(observer_.last_datachannel_ != NULL);
+ DataChannelInterface* received_channel = observer_.last_datachannel_;
+ EXPECT_EQ(DataChannelInterface::kConnecting, received_channel->state());
+ EXPECT_EQ(receive_label, received_channel->label());
+ EXPECT_FALSE(received_channel->Send(DataBuffer("something")));
+
+ // Verify that the channel we initially offered has been rejected.
+ EXPECT_EQ(DataChannelInterface::kClosed, offer_channel->state());
+
+ // Do another offer / answer exchange and verify that the data channel is
+ // opened.
+ CreateOfferReceiveAnswer();
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, received_channel->state(),
+ kTimeout);
+}
+
+// This test that no data channel is returned if a reliable channel is
+// requested.
+// TODO(perkj): Remove this test once reliable channels are implemented.
+TEST_F(PeerConnectionInterfaceTest, CreateReliableRtpDataChannelShouldFail) {
+ FakeConstraints constraints;
+ constraints.SetAllowRtpDataChannels();
+ CreatePeerConnection(&constraints);
+
+ std::string label = "test";
+ webrtc::DataChannelInit config;
+ config.reliable = true;
+ scoped_refptr<DataChannelInterface> channel =
+ pc_->CreateDataChannel(label, &config);
+ EXPECT_TRUE(channel == NULL);
+}
+
+// Verifies that duplicated label is not allowed for RTP data channel.
+TEST_F(PeerConnectionInterfaceTest, RtpDuplicatedLabelNotAllowed) {
+ FakeConstraints constraints;
+ constraints.SetAllowRtpDataChannels();
+ CreatePeerConnection(&constraints);
+
+ std::string label = "test";
+ scoped_refptr<DataChannelInterface> channel =
+ pc_->CreateDataChannel(label, nullptr);
+ EXPECT_NE(channel, nullptr);
+
+ scoped_refptr<DataChannelInterface> dup_channel =
+ pc_->CreateDataChannel(label, nullptr);
+ EXPECT_EQ(dup_channel, nullptr);
+}
+
+// This tests that a SCTP data channel is returned using different
+// DataChannelInit configurations.
+TEST_F(PeerConnectionInterfaceTest, CreateSctpDataChannel) {
+ FakeConstraints constraints;
+ constraints.SetAllowDtlsSctpDataChannels();
+ CreatePeerConnection(&constraints);
+
+ webrtc::DataChannelInit config;
+
+ scoped_refptr<DataChannelInterface> channel =
+ pc_->CreateDataChannel("1", &config);
+ EXPECT_TRUE(channel != NULL);
+ EXPECT_TRUE(channel->reliable());
+ EXPECT_TRUE(observer_.renegotiation_needed_);
+ observer_.renegotiation_needed_ = false;
+
+ config.ordered = false;
+ channel = pc_->CreateDataChannel("2", &config);
+ EXPECT_TRUE(channel != NULL);
+ EXPECT_TRUE(channel->reliable());
+ EXPECT_FALSE(observer_.renegotiation_needed_);
+
+ config.ordered = true;
+ config.maxRetransmits = 0;
+ channel = pc_->CreateDataChannel("3", &config);
+ EXPECT_TRUE(channel != NULL);
+ EXPECT_FALSE(channel->reliable());
+ EXPECT_FALSE(observer_.renegotiation_needed_);
+
+ config.maxRetransmits = -1;
+ config.maxRetransmitTime = 0;
+ channel = pc_->CreateDataChannel("4", &config);
+ EXPECT_TRUE(channel != NULL);
+ EXPECT_FALSE(channel->reliable());
+ EXPECT_FALSE(observer_.renegotiation_needed_);
+}
+
+// This tests that no data channel is returned if both maxRetransmits and
+// maxRetransmitTime are set for SCTP data channels.
+TEST_F(PeerConnectionInterfaceTest,
+ CreateSctpDataChannelShouldFailForInvalidConfig) {
+ FakeConstraints constraints;
+ constraints.SetAllowDtlsSctpDataChannels();
+ CreatePeerConnection(&constraints);
+
+ std::string label = "test";
+ webrtc::DataChannelInit config;
+ config.maxRetransmits = 0;
+ config.maxRetransmitTime = 0;
+
+ scoped_refptr<DataChannelInterface> channel =
+ pc_->CreateDataChannel(label, &config);
+ EXPECT_TRUE(channel == NULL);
+}
+
+// The test verifies that creating a SCTP data channel with an id already in use
+// or out of range should fail.
+TEST_F(PeerConnectionInterfaceTest,
+ CreateSctpDataChannelWithInvalidIdShouldFail) {
+ FakeConstraints constraints;
+ constraints.SetAllowDtlsSctpDataChannels();
+ CreatePeerConnection(&constraints);
+
+ webrtc::DataChannelInit config;
+ scoped_refptr<DataChannelInterface> channel;
+
+ config.id = 1;
+ channel = pc_->CreateDataChannel("1", &config);
+ EXPECT_TRUE(channel != NULL);
+ EXPECT_EQ(1, channel->id());
+
+ channel = pc_->CreateDataChannel("x", &config);
+ EXPECT_TRUE(channel == NULL);
+
+ config.id = cricket::kMaxSctpSid;
+ channel = pc_->CreateDataChannel("max", &config);
+ EXPECT_TRUE(channel != NULL);
+ EXPECT_EQ(config.id, channel->id());
+
+ config.id = cricket::kMaxSctpSid + 1;
+ channel = pc_->CreateDataChannel("x", &config);
+ EXPECT_TRUE(channel == NULL);
+}
+
+// Verifies that duplicated label is allowed for SCTP data channel.
+TEST_F(PeerConnectionInterfaceTest, SctpDuplicatedLabelAllowed) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+
+ std::string label = "test";
+ scoped_refptr<DataChannelInterface> channel =
+ pc_->CreateDataChannel(label, nullptr);
+ EXPECT_NE(channel, nullptr);
+
+ scoped_refptr<DataChannelInterface> dup_channel =
+ pc_->CreateDataChannel(label, nullptr);
+ EXPECT_NE(dup_channel, nullptr);
+}
+
+// This test verifies that OnRenegotiationNeeded is fired for every new RTP
+// DataChannel.
+TEST_F(PeerConnectionInterfaceTest, RenegotiationNeededForNewRtpDataChannel) {
+ FakeConstraints constraints;
+ constraints.SetAllowRtpDataChannels();
+ CreatePeerConnection(&constraints);
+
+ scoped_refptr<DataChannelInterface> dc1 =
+ pc_->CreateDataChannel("test1", NULL);
+ EXPECT_TRUE(observer_.renegotiation_needed_);
+ observer_.renegotiation_needed_ = false;
+
+ scoped_refptr<DataChannelInterface> dc2 =
+ pc_->CreateDataChannel("test2", NULL);
+ EXPECT_TRUE(observer_.renegotiation_needed_);
+}
+
+// This test that a data channel closes when a PeerConnection is deleted/closed.
+TEST_F(PeerConnectionInterfaceTest, DataChannelCloseWhenPeerConnectionClose) {
+ FakeConstraints constraints;
+ constraints.SetAllowRtpDataChannels();
+ CreatePeerConnection(&constraints);
+
+ scoped_refptr<DataChannelInterface> data1 =
+ pc_->CreateDataChannel("test1", NULL);
+ scoped_refptr<DataChannelInterface> data2 =
+ pc_->CreateDataChannel("test2", NULL);
+ ASSERT_TRUE(data1 != NULL);
+ rtc::scoped_ptr<MockDataChannelObserver> observer1(
+ new MockDataChannelObserver(data1));
+ rtc::scoped_ptr<MockDataChannelObserver> observer2(
+ new MockDataChannelObserver(data2));
+
+ CreateOfferReceiveAnswer();
+ EXPECT_TRUE_WAIT(observer1->IsOpen(), kTimeout);
+ EXPECT_TRUE_WAIT(observer2->IsOpen(), kTimeout);
+
+ ReleasePeerConnection();
+ EXPECT_EQ(DataChannelInterface::kClosed, data1->state());
+ EXPECT_EQ(DataChannelInterface::kClosed, data2->state());
+}
+
+// This test that data channels can be rejected in an answer.
+TEST_F(PeerConnectionInterfaceTest, TestRejectDataChannelInAnswer) {
+ FakeConstraints constraints;
+ constraints.SetAllowRtpDataChannels();
+ CreatePeerConnection(&constraints);
+
+ scoped_refptr<DataChannelInterface> offer_channel(
+ pc_->CreateDataChannel("offer_channel", NULL));
+
+ CreateOfferAsLocalDescription();
+
+ // Create an answer where the m-line for data channels are rejected.
+ std::string sdp;
+ EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+ webrtc::JsepSessionDescription* answer = new webrtc::JsepSessionDescription(
+ SessionDescriptionInterface::kAnswer);
+ EXPECT_TRUE(answer->Initialize(sdp, NULL));
+ cricket::ContentInfo* data_info =
+ answer->description()->GetContentByName("data");
+ data_info->rejected = true;
+
+ DoSetRemoteDescription(answer);
+ EXPECT_EQ(DataChannelInterface::kClosed, offer_channel->state());
+}
+
+// Test that we can create a session description from an SDP string from
+// FireFox, use it as a remote session description, generate an answer and use
+// the answer as a local description.
+TEST_F(PeerConnectionInterfaceTest, ReceiveFireFoxOffer) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+ SessionDescriptionInterface* desc =
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+ webrtc::kFireFoxSdpOffer, nullptr);
+ EXPECT_TRUE(DoSetSessionDescription(desc, false));
+ CreateAnswerAsLocalDescription();
+ ASSERT_TRUE(pc_->local_description() != NULL);
+ ASSERT_TRUE(pc_->remote_description() != NULL);
+
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(pc_->local_description()->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_FALSE(content->rejected);
+
+ content =
+ cricket::GetFirstVideoContent(pc_->local_description()->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_FALSE(content->rejected);
+#ifdef HAVE_SCTP
+ content =
+ cricket::GetFirstDataContent(pc_->local_description()->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_TRUE(content->rejected);
+#endif
+}
+
+// Test that we can create an audio only offer and receive an answer with a
+// limited set of audio codecs and receive an updated offer with more audio
+// codecs, where the added codecs are not supported.
+TEST_F(PeerConnectionInterfaceTest, ReceiveUpdatedAudioOfferWithBadCodecs) {
+ CreatePeerConnection();
+ AddVoiceStream("audio_label");
+ CreateOfferAsLocalDescription();
+
+ SessionDescriptionInterface* answer =
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kAnswer,
+ webrtc::kAudioSdp, nullptr);
+ EXPECT_TRUE(DoSetSessionDescription(answer, false));
+
+ SessionDescriptionInterface* updated_offer =
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+ webrtc::kAudioSdpWithUnsupportedCodecs,
+ nullptr);
+ EXPECT_TRUE(DoSetSessionDescription(updated_offer, false));
+ CreateAnswerAsLocalDescription();
+}
+
+// Test that if we're receiving (but not sending) a track, subsequent offers
+// will have m-lines with a=recvonly.
+TEST_F(PeerConnectionInterfaceTest, CreateSubsequentRecvOnlyOffer) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithStream1);
+ CreateAnswerAsLocalDescription();
+
+ // At this point we should be receiving stream 1, but not sending anything.
+ // A new offer should be recvonly.
+ SessionDescriptionInterface* offer;
+ DoCreateOffer(&offer, nullptr);
+
+ const cricket::ContentInfo* video_content =
+ cricket::GetFirstVideoContent(offer->description());
+ const cricket::VideoContentDescription* video_desc =
+ static_cast<const cricket::VideoContentDescription*>(
+ video_content->description);
+ ASSERT_EQ(cricket::MD_RECVONLY, video_desc->direction());
+
+ const cricket::ContentInfo* audio_content =
+ cricket::GetFirstAudioContent(offer->description());
+ const cricket::AudioContentDescription* audio_desc =
+ static_cast<const cricket::AudioContentDescription*>(
+ audio_content->description);
+ ASSERT_EQ(cricket::MD_RECVONLY, audio_desc->direction());
+}
+
+// Test that if we're receiving (but not sending) a track, and the
+// offerToReceiveVideo/offerToReceiveAudio constraints are explicitly set to
+// false, the generated m-lines will be a=inactive.
+TEST_F(PeerConnectionInterfaceTest, CreateSubsequentInactiveOffer) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithStream1);
+ CreateAnswerAsLocalDescription();
+
+ // At this point we should be receiving stream 1, but not sending anything.
+ // A new offer would be recvonly, but we'll set the "no receive" constraints
+ // to make it inactive.
+ SessionDescriptionInterface* offer;
+ FakeConstraints offer_constraints;
+ offer_constraints.AddMandatory(
+ webrtc::MediaConstraintsInterface::kOfferToReceiveVideo, false);
+ offer_constraints.AddMandatory(
+ webrtc::MediaConstraintsInterface::kOfferToReceiveAudio, false);
+ DoCreateOffer(&offer, &offer_constraints);
+
+ const cricket::ContentInfo* video_content =
+ cricket::GetFirstVideoContent(offer->description());
+ const cricket::VideoContentDescription* video_desc =
+ static_cast<const cricket::VideoContentDescription*>(
+ video_content->description);
+ ASSERT_EQ(cricket::MD_INACTIVE, video_desc->direction());
+
+ const cricket::ContentInfo* audio_content =
+ cricket::GetFirstAudioContent(offer->description());
+ const cricket::AudioContentDescription* audio_desc =
+ static_cast<const cricket::AudioContentDescription*>(
+ audio_content->description);
+ ASSERT_EQ(cricket::MD_INACTIVE, audio_desc->direction());
+}
+
+// Test that PeerConnection::Close changes the states to closed and all remote
+// tracks change state to ended.
+TEST_F(PeerConnectionInterfaceTest, CloseAndTestStreamsAndStates) {
+ // Initialize a PeerConnection and negotiate local and remote session
+ // description.
+ InitiateCall();
+ ASSERT_EQ(1u, pc_->local_streams()->count());
+ ASSERT_EQ(1u, pc_->remote_streams()->count());
+
+ pc_->Close();
+
+ EXPECT_EQ(PeerConnectionInterface::kClosed, pc_->signaling_state());
+ EXPECT_EQ(PeerConnectionInterface::kIceConnectionClosed,
+ pc_->ice_connection_state());
+ EXPECT_EQ(PeerConnectionInterface::kIceGatheringComplete,
+ pc_->ice_gathering_state());
+
+ EXPECT_EQ(1u, pc_->local_streams()->count());
+ EXPECT_EQ(1u, pc_->remote_streams()->count());
+
+ scoped_refptr<MediaStreamInterface> remote_stream =
+ pc_->remote_streams()->at(0);
+ EXPECT_EQ(MediaStreamTrackInterface::kEnded,
+ remote_stream->GetVideoTracks()[0]->state());
+ EXPECT_EQ(MediaStreamTrackInterface::kEnded,
+ remote_stream->GetAudioTracks()[0]->state());
+}
+
+// Test that PeerConnection methods fails gracefully after
+// PeerConnection::Close has been called.
+TEST_F(PeerConnectionInterfaceTest, CloseAndTestMethods) {
+ CreatePeerConnection();
+ AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+ CreateOfferAsRemoteDescription();
+ CreateAnswerAsLocalDescription();
+
+ ASSERT_EQ(1u, pc_->local_streams()->count());
+ scoped_refptr<MediaStreamInterface> local_stream =
+ pc_->local_streams()->at(0);
+
+ pc_->Close();
+
+ pc_->RemoveStream(local_stream);
+ EXPECT_FALSE(pc_->AddStream(local_stream));
+
+ ASSERT_FALSE(local_stream->GetAudioTracks().empty());
+ rtc::scoped_refptr<webrtc::DtmfSenderInterface> dtmf_sender(
+ pc_->CreateDtmfSender(local_stream->GetAudioTracks()[0]));
+ EXPECT_TRUE(NULL == dtmf_sender); // local stream has been removed.
+
+ EXPECT_TRUE(pc_->CreateDataChannel("test", NULL) == NULL);
+
+ EXPECT_TRUE(pc_->local_description() != NULL);
+ EXPECT_TRUE(pc_->remote_description() != NULL);
+
+ rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ EXPECT_TRUE(DoCreateOffer(offer.use(), nullptr));
+ rtc::scoped_ptr<SessionDescriptionInterface> answer;
+ EXPECT_TRUE(DoCreateAnswer(answer.use(), nullptr));
+
+ std::string sdp;
+ ASSERT_TRUE(pc_->remote_description()->ToString(&sdp));
+ SessionDescriptionInterface* remote_offer =
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+ sdp, NULL);
+ EXPECT_FALSE(DoSetRemoteDescription(remote_offer));
+
+ ASSERT_TRUE(pc_->local_description()->ToString(&sdp));
+ SessionDescriptionInterface* local_offer =
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+ sdp, NULL);
+ EXPECT_FALSE(DoSetLocalDescription(local_offer));
+}
+
+// Test that GetStats can still be called after PeerConnection::Close.
+TEST_F(PeerConnectionInterfaceTest, CloseAndGetStats) {
+ InitiateCall();
+ pc_->Close();
+ DoGetStats(NULL);
+}
+
+// NOTE: The series of tests below come from what used to be
+// mediastreamsignaling_unittest.cc, and are mostly aimed at testing that
+// setting a remote or local description has the expected effects.
+
+// This test verifies that the remote MediaStreams corresponding to a received
+// SDP string is created. In this test the two separate MediaStreams are
+// signaled.
+TEST_F(PeerConnectionInterfaceTest, UpdateRemoteStreams) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithStream1);
+
+ rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1));
+ EXPECT_TRUE(
+ CompareStreamCollections(observer_.remote_streams(), reference.get()));
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != nullptr);
+
+ // Create a session description based on another SDP with another
+ // MediaStream.
+ CreateAndSetRemoteOffer(kSdpStringWithStream1And2);
+
+ rtc::scoped_refptr<StreamCollection> reference2(CreateStreamCollection(2));
+ EXPECT_TRUE(
+ CompareStreamCollections(observer_.remote_streams(), reference2.get()));
+}
+
+// This test verifies that when remote tracks are added/removed from SDP, the
+// created remote streams are updated appropriately.
+TEST_F(PeerConnectionInterfaceTest,
+ AddRemoveTrackFromExistingRemoteMediaStream) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1;
+ CreateSessionDescriptionAndReference(1, 1, desc_ms1.accept());
+ EXPECT_TRUE(DoSetRemoteDescription(desc_ms1.release()));
+ EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+ reference_collection_));
+
+ // Add extra audio and video tracks to the same MediaStream.
+ rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks;
+ CreateSessionDescriptionAndReference(2, 2, desc_ms1_two_tracks.accept());
+ EXPECT_TRUE(DoSetRemoteDescription(desc_ms1_two_tracks.release()));
+ EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+ reference_collection_));
+
+ // Remove the extra audio and video tracks.
+ rtc::scoped_ptr<SessionDescriptionInterface> desc_ms2;
+ CreateSessionDescriptionAndReference(1, 1, desc_ms2.accept());
+ EXPECT_TRUE(DoSetRemoteDescription(desc_ms2.release()));
+ EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+ reference_collection_));
+}
+
+// This tests that remote tracks are ended if a local session description is set
+// that rejects the media content type.
+TEST_F(PeerConnectionInterfaceTest, RejectMediaContent) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ // First create and set a remote offer, then reject its video content in our
+ // answer.
+ CreateAndSetRemoteOffer(kSdpStringWithStream1);
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> remote_video =
+ remote_stream->GetVideoTracks()[0];
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_video->state());
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> remote_audio =
+ remote_stream->GetAudioTracks()[0];
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
+
+ rtc::scoped_ptr<SessionDescriptionInterface> local_answer;
+ EXPECT_TRUE(DoCreateAnswer(local_answer.accept(), nullptr));
+ cricket::ContentInfo* video_info =
+ local_answer->description()->GetContentByName("video");
+ video_info->rejected = true;
+ EXPECT_TRUE(DoSetLocalDescription(local_answer.release()));
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
+
+ // Now create an offer where we reject both video and audio.
+ rtc::scoped_ptr<SessionDescriptionInterface> local_offer;
+ EXPECT_TRUE(DoCreateOffer(local_offer.accept(), nullptr));
+ video_info = local_offer->description()->GetContentByName("video");
+ ASSERT_TRUE(video_info != nullptr);
+ video_info->rejected = true;
+ cricket::ContentInfo* audio_info =
+ local_offer->description()->GetContentByName("audio");
+ ASSERT_TRUE(audio_info != nullptr);
+ audio_info->rejected = true;
+ EXPECT_TRUE(DoSetLocalDescription(local_offer.release()));
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state());
+}
+
+// This tests that we won't crash if the remote track has been removed outside
+// of PeerConnection and then PeerConnection tries to reject the track.
+TEST_F(PeerConnectionInterfaceTest, RemoveTrackThenRejectMediaContent) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithStream1);
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
+ remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
+
+ rtc::scoped_ptr<SessionDescriptionInterface> local_answer(
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kAnswer,
+ kSdpStringWithStream1, nullptr));
+ cricket::ContentInfo* video_info =
+ local_answer->description()->GetContentByName("video");
+ video_info->rejected = true;
+ cricket::ContentInfo* audio_info =
+ local_answer->description()->GetContentByName("audio");
+ audio_info->rejected = true;
+ EXPECT_TRUE(DoSetLocalDescription(local_answer.release()));
+
+ // No crash is a pass.
+}
+
+// This tests that if a recvonly remote description is set, no remote streams
+// will be created, even if the description contains SSRCs/MSIDs.
+// See: https://code.google.com/p/webrtc/issues/detail?id=5054
+TEST_F(PeerConnectionInterfaceTest, RecvonlyDescriptionDoesntCreateStream) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+
+ std::string recvonly_offer = kSdpStringWithStream1;
+ rtc::replace_substrs(kSendrecv, strlen(kSendrecv), kRecvonly,
+ strlen(kRecvonly), &recvonly_offer);
+ CreateAndSetRemoteOffer(recvonly_offer);
+
+ EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that a default MediaStream is created if a remote session
+// description doesn't contain any streams and no MSID support.
+// It also tests that the default stream is updated if a video m-line is added
+// in a subsequent session description.
+TEST_F(PeerConnectionInterfaceTest, SdpWithoutMsidCreatesDefaultStream) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+
+ EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_EQ(0u, remote_stream->GetVideoTracks().size());
+ EXPECT_EQ("default", remote_stream->label());
+
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id());
+ ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
+ EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id());
+}
+
+// This tests that a default MediaStream is created if a remote session
+// description doesn't contain any streams and media direction is send only.
+TEST_F(PeerConnectionInterfaceTest,
+ SendOnlySdpWithoutMsidCreatesDefaultStream) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringSendOnlyWithoutStreams);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+
+ EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
+ EXPECT_EQ("default", remote_stream->label());
+}
+
+// This tests that it won't crash when PeerConnection tries to remove
+// a remote track that as already been removed from the MediaStream.
+TEST_F(PeerConnectionInterfaceTest, RemoveAlreadyGoneRemoteStream) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithStream1);
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
+ remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
+
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+
+ // No crash is a pass.
+}
+
+// This tests that a default MediaStream is created if the remote session
+// description doesn't contain any streams and don't contain an indication if
+// MSID is supported.
+TEST_F(PeerConnectionInterfaceTest,
+ SdpWithoutMsidAndStreamsCreatesDefaultStream) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
+}
+
+// This tests that a default MediaStream is not created if the remote session
+// description doesn't contain any streams but does support MSID.
+TEST_F(PeerConnectionInterfaceTest, SdpWithMsidDontCreatesDefaultStream) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithMsidWithoutStreams);
+ EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that a default MediaStream is not created if a remote session
+// description is updated to not have any MediaStreams.
+TEST_F(PeerConnectionInterfaceTest, VerifyDefaultStreamIsNotCreated) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithStream1);
+ rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1));
+ EXPECT_TRUE(
+ CompareStreamCollections(observer_.remote_streams(), reference.get()));
+
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+ EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that an RtpSender is created when the local description is set
+// after adding a local stream.
+// TODO(deadbeef): This test and the one below it need to be updated when
+// an RtpSender's lifetime isn't determined by when a local description is set.
+TEST_F(PeerConnectionInterfaceTest, LocalDescriptionChanged) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ // Create an offer just to ensure we have an identity before we manually
+ // call SetLocalDescription.
+ rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+ ASSERT_TRUE(DoCreateOffer(throwaway.accept(), nullptr));
+
+ rtc::scoped_ptr<SessionDescriptionInterface> desc_1;
+ CreateSessionDescriptionAndReference(2, 2, desc_1.accept());
+
+ pc_->AddStream(reference_collection_->at(0));
+ EXPECT_TRUE(DoSetLocalDescription(desc_1.release()));
+ auto senders = pc_->GetSenders();
+ EXPECT_EQ(4u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[1]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
+
+ // Remove an audio and video track.
+ rtc::scoped_ptr<SessionDescriptionInterface> desc_2;
+ CreateSessionDescriptionAndReference(1, 1, desc_2.accept());
+ EXPECT_TRUE(DoSetLocalDescription(desc_2.release()));
+ senders = pc_->GetSenders();
+ EXPECT_EQ(2u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ EXPECT_FALSE(ContainsSender(senders, kAudioTracks[1]));
+ EXPECT_FALSE(ContainsSender(senders, kVideoTracks[1]));
+}
+
+// This tests that an RtpSender is created when the local description is set
+// before adding a local stream.
+TEST_F(PeerConnectionInterfaceTest,
+ AddLocalStreamAfterLocalDescriptionChanged) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ // Create an offer just to ensure we have an identity before we manually
+ // call SetLocalDescription.
+ rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+ ASSERT_TRUE(DoCreateOffer(throwaway.accept(), nullptr));
+
+ rtc::scoped_ptr<SessionDescriptionInterface> desc_1;
+ CreateSessionDescriptionAndReference(2, 2, desc_1.accept());
+
+ EXPECT_TRUE(DoSetLocalDescription(desc_1.release()));
+ auto senders = pc_->GetSenders();
+ EXPECT_EQ(0u, senders.size());
+
+ pc_->AddStream(reference_collection_->at(0));
+ senders = pc_->GetSenders();
+ EXPECT_EQ(4u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[1]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
+}
+
+// This tests that the expected behavior occurs if the SSRC on a local track is
+// changed when SetLocalDescription is called.
+TEST_F(PeerConnectionInterfaceTest,
+ ChangeSsrcOnTrackInLocalSessionDescription) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ // Create an offer just to ensure we have an identity before we manually
+ // call SetLocalDescription.
+ rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+ ASSERT_TRUE(DoCreateOffer(throwaway.accept(), nullptr));
+
+ rtc::scoped_ptr<SessionDescriptionInterface> desc;
+ CreateSessionDescriptionAndReference(1, 1, desc.accept());
+ std::string sdp;
+ desc->ToString(&sdp);
+
+ pc_->AddStream(reference_collection_->at(0));
+ EXPECT_TRUE(DoSetLocalDescription(desc.release()));
+ auto senders = pc_->GetSenders();
+ EXPECT_EQ(2u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+
+ // Change the ssrc of the audio and video track.
+ std::string ssrc_org = "a=ssrc:1";
+ std::string ssrc_to = "a=ssrc:97";
+ rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), ssrc_to.c_str(),
+ ssrc_to.length(), &sdp);
+ ssrc_org = "a=ssrc:2";
+ ssrc_to = "a=ssrc:98";
+ rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), ssrc_to.c_str(),
+ ssrc_to.length(), &sdp);
+ rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, sdp,
+ nullptr));
+
+ EXPECT_TRUE(DoSetLocalDescription(updated_desc.release()));
+ senders = pc_->GetSenders();
+ EXPECT_EQ(2u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ // TODO(deadbeef): Once RtpSenders expose parameters, check that the SSRC
+ // changed.
+}
+
+// This tests that the expected behavior occurs if a new session description is
+// set with the same tracks, but on a different MediaStream.
+TEST_F(PeerConnectionInterfaceTest, SignalSameTracksInSeparateMediaStream) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ // Create an offer just to ensure we have an identity before we manually
+ // call SetLocalDescription.
+ rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+ ASSERT_TRUE(DoCreateOffer(throwaway.accept(), nullptr));
+
+ rtc::scoped_ptr<SessionDescriptionInterface> desc;
+ CreateSessionDescriptionAndReference(1, 1, desc.accept());
+ std::string sdp;
+ desc->ToString(&sdp);
+
+ pc_->AddStream(reference_collection_->at(0));
+ EXPECT_TRUE(DoSetLocalDescription(desc.release()));
+ auto senders = pc_->GetSenders();
+ EXPECT_EQ(2u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+
+ // Add a new MediaStream but with the same tracks as in the first stream.
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream_1(
+ webrtc::MediaStream::Create(kStreams[1]));
+ stream_1->AddTrack(reference_collection_->at(0)->GetVideoTracks()[0]);
+ stream_1->AddTrack(reference_collection_->at(0)->GetAudioTracks()[0]);
+ pc_->AddStream(stream_1);
+
+ // Replace msid in the original SDP.
+ rtc::replace_substrs(kStreams[0], strlen(kStreams[0]), kStreams[1],
+ strlen(kStreams[1]), &sdp);
+
+ rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
+ webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, sdp,
+ nullptr));
+
+ EXPECT_TRUE(DoSetLocalDescription(updated_desc.release()));
+ senders = pc_->GetSenders();
+ EXPECT_EQ(2u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+}
+
+// The following tests verify that session options are created correctly.
+// TODO(deadbeef): Convert these tests to be more end-to-end. Instead of
+// "verify options are converted correctly", should be "pass options into
+// CreateOffer and verify the correct offer is produced."
+
+TEST(CreateSessionOptionsTest, GetOptionsForOfferWithInvalidAudioOption) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_audio = RTCOfferAnswerOptions::kUndefined - 1;
+
+ cricket::MediaSessionOptions options;
+ EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+
+ rtc_options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
+ EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+}
+
+TEST(CreateSessionOptionsTest, GetOptionsForOfferWithInvalidVideoOption) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_video = RTCOfferAnswerOptions::kUndefined - 1;
+
+ cricket::MediaSessionOptions options;
+ EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+
+ rtc_options.offer_to_receive_video =
+ RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
+ EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+}
+
+// Test that a MediaSessionOptions is created for an offer if
+// OfferToReceiveAudio and OfferToReceiveVideo options are set.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithAudioVideo) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_audio = 1;
+ rtc_options.offer_to_receive_video = 1;
+
+ cricket::MediaSessionOptions options;
+ EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+ EXPECT_TRUE(options.has_audio());
+ EXPECT_TRUE(options.has_video());
+ EXPECT_TRUE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// OfferToReceiveAudio is set.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithAudio) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_audio = 1;
+
+ cricket::MediaSessionOptions options;
+ EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+ EXPECT_TRUE(options.has_audio());
+ EXPECT_FALSE(options.has_video());
+ EXPECT_TRUE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// the default OfferOptions are used.
+TEST(CreateSessionOptionsTest, GetDefaultMediaSessionOptionsForOffer) {
+ RTCOfferAnswerOptions rtc_options;
+
+ cricket::MediaSessionOptions options;
+ EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+ EXPECT_TRUE(options.has_audio());
+ EXPECT_FALSE(options.has_video());
+ EXPECT_TRUE(options.bundle_enabled);
+ EXPECT_TRUE(options.vad_enabled);
+ EXPECT_FALSE(options.transport_options.ice_restart);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// OfferToReceiveVideo is set.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithVideo) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_audio = 0;
+ rtc_options.offer_to_receive_video = 1;
+
+ cricket::MediaSessionOptions options;
+ EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+ EXPECT_FALSE(options.has_audio());
+ EXPECT_TRUE(options.has_video());
+ EXPECT_TRUE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// UseRtpMux is set to false.
+TEST(CreateSessionOptionsTest,
+ GetMediaSessionOptionsForOfferWithBundleDisabled) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_audio = 1;
+ rtc_options.offer_to_receive_video = 1;
+ rtc_options.use_rtp_mux = false;
+
+ cricket::MediaSessionOptions options;
+ EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+ EXPECT_TRUE(options.has_audio());
+ EXPECT_TRUE(options.has_video());
+ EXPECT_FALSE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created to restart ice if
+// IceRestart is set. It also tests that subsequent MediaSessionOptions don't
+// have |transport_options.ice_restart| set.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithIceRestart) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.ice_restart = true;
+
+ cricket::MediaSessionOptions options;
+ EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+ EXPECT_TRUE(options.transport_options.ice_restart);
+
+ rtc_options = RTCOfferAnswerOptions();
+ EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+ EXPECT_FALSE(options.transport_options.ice_restart);
+}
+
+// Test that the MediaConstraints in an answer don't affect if audio and video
+// is offered in an offer but that if kOfferToReceiveAudio or
+// kOfferToReceiveVideo constraints are true in an offer, the media type will be
+// included in subsequent answers.
+TEST(CreateSessionOptionsTest, MediaConstraintsInAnswer) {
+ FakeConstraints answer_c;
+ answer_c.SetMandatoryReceiveAudio(true);
+ answer_c.SetMandatoryReceiveVideo(true);
+
+ cricket::MediaSessionOptions answer_options;
+ EXPECT_TRUE(ParseConstraintsForAnswer(&answer_c, &answer_options));
+ EXPECT_TRUE(answer_options.has_audio());
+ EXPECT_TRUE(answer_options.has_video());
+
+ RTCOfferAnswerOptions rtc_offer_options;
+
+ cricket::MediaSessionOptions offer_options;
+ EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_offer_options, &offer_options));
+ EXPECT_TRUE(offer_options.has_audio());
+ EXPECT_FALSE(offer_options.has_video());
+
+ RTCOfferAnswerOptions updated_rtc_offer_options;
+ updated_rtc_offer_options.offer_to_receive_audio = 1;
+ updated_rtc_offer_options.offer_to_receive_video = 1;
+
+ cricket::MediaSessionOptions updated_offer_options;
+ EXPECT_TRUE(ConvertRtcOptionsForOffer(updated_rtc_offer_options,
+ &updated_offer_options));
+ EXPECT_TRUE(updated_offer_options.has_audio());
+ EXPECT_TRUE(updated_offer_options.has_video());
+
+ // Since an offer has been created with both audio and video, subsequent
+ // offers and answers should contain both audio and video.
+ // Answers will only contain the media types that exist in the offer
+ // regardless of the value of |updated_answer_options.has_audio| and
+ // |updated_answer_options.has_video|.
+ FakeConstraints updated_answer_c;
+ answer_c.SetMandatoryReceiveAudio(false);
+ answer_c.SetMandatoryReceiveVideo(false);
+
+ cricket::MediaSessionOptions updated_answer_options;
+ EXPECT_TRUE(
+ ParseConstraintsForAnswer(&updated_answer_c, &updated_answer_options));
+ EXPECT_TRUE(updated_answer_options.has_audio());
+ EXPECT_TRUE(updated_answer_options.has_video());
+}
diff --git a/talk/app/webrtc/peerconnectionproxy.h b/talk/app/webrtc/peerconnectionproxy.h
new file mode 100644
index 0000000000..d207fbbdd8
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionproxy.h
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_PEERCONNECTIONPROXY_H_
+#define TALK_APP_WEBRTC_PEERCONNECTIONPROXY_H_
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/proxy.h"
+
+namespace webrtc {
+
+// Define proxy for PeerConnectionInterface.
+BEGIN_PROXY_MAP(PeerConnection)
+ PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>,
+ local_streams)
+ PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>,
+ remote_streams)
+ PROXY_METHOD1(bool, AddStream, MediaStreamInterface*)
+ PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*)
+ PROXY_METHOD1(rtc::scoped_refptr<DtmfSenderInterface>,
+ CreateDtmfSender, AudioTrackInterface*)
+ PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
+ GetSenders)
+ PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,
+ GetReceivers)
+ PROXY_METHOD3(bool, GetStats, StatsObserver*,
+ MediaStreamTrackInterface*,
+ StatsOutputLevel)
+ PROXY_METHOD2(rtc::scoped_refptr<DataChannelInterface>,
+ CreateDataChannel, const std::string&, const DataChannelInit*)
+ PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, local_description)
+ PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, remote_description)
+ PROXY_METHOD2(void, CreateOffer, CreateSessionDescriptionObserver*,
+ const MediaConstraintsInterface*)
+ PROXY_METHOD2(void, CreateAnswer, CreateSessionDescriptionObserver*,
+ const MediaConstraintsInterface*)
+ PROXY_METHOD2(void, SetLocalDescription, SetSessionDescriptionObserver*,
+ SessionDescriptionInterface*)
+ PROXY_METHOD2(void, SetRemoteDescription, SetSessionDescriptionObserver*,
+ SessionDescriptionInterface*)
+ PROXY_METHOD1(bool,
+ SetConfiguration,
+ const PeerConnectionInterface::RTCConfiguration&);
+ PROXY_METHOD1(bool, AddIceCandidate, const IceCandidateInterface*)
+ PROXY_METHOD1(void, RegisterUMAObserver, UMAObserver*)
+ PROXY_METHOD0(SignalingState, signaling_state)
+ PROXY_METHOD0(IceState, ice_state)
+ PROXY_METHOD0(IceConnectionState, ice_connection_state)
+ PROXY_METHOD0(IceGatheringState, ice_gathering_state)
+ PROXY_METHOD0(void, Close)
+END_PROXY()
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_PEERCONNECTIONPROXY_H_
diff --git a/talk/app/webrtc/portallocatorfactory.cc b/talk/app/webrtc/portallocatorfactory.cc
new file mode 100644
index 0000000000..bd6caccc80
--- /dev/null
+++ b/talk/app/webrtc/portallocatorfactory.cc
@@ -0,0 +1,92 @@
+/*
+ * libjingle
+ * Copyright 2004--2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/portallocatorfactory.h"
+
+#include "webrtc/p2p/base/basicpacketsocketfactory.h"
+#include "webrtc/p2p/client/basicportallocator.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/network.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+using rtc::scoped_ptr;
+
+rtc::scoped_refptr<PortAllocatorFactoryInterface>
+PortAllocatorFactory::Create(
+ rtc::Thread* worker_thread) {
+ rtc::RefCountedObject<PortAllocatorFactory>* allocator =
+ new rtc::RefCountedObject<PortAllocatorFactory>(worker_thread);
+ return allocator;
+}
+
+PortAllocatorFactory::PortAllocatorFactory(rtc::Thread* worker_thread)
+ : network_manager_(new rtc::BasicNetworkManager()),
+ socket_factory_(new rtc::BasicPacketSocketFactory(worker_thread)) {
+}
+
+PortAllocatorFactory::~PortAllocatorFactory() {}
+
+void PortAllocatorFactory::SetNetworkIgnoreMask(int network_ignore_mask) {
+ network_manager_->set_network_ignore_mask(network_ignore_mask);
+}
+
+cricket::PortAllocator* PortAllocatorFactory::CreatePortAllocator(
+ const std::vector<StunConfiguration>& stun,
+ const std::vector<TurnConfiguration>& turn) {
+ cricket::ServerAddresses stun_hosts;
+ typedef std::vector<StunConfiguration>::const_iterator StunIt;
+ for (StunIt stun_it = stun.begin(); stun_it != stun.end(); ++stun_it) {
+ stun_hosts.insert(stun_it->server);
+ }
+
+ scoped_ptr<cricket::BasicPortAllocator> allocator(
+ new cricket::BasicPortAllocator(
+ network_manager_.get(), socket_factory_.get(), stun_hosts));
+
+ for (size_t i = 0; i < turn.size(); ++i) {
+ cricket::RelayCredentials credentials(turn[i].username, turn[i].password);
+ cricket::RelayServerConfig relay_server(cricket::RELAY_TURN);
+ cricket::ProtocolType protocol;
+ if (cricket::StringToProto(turn[i].transport_type.c_str(), &protocol)) {
+ relay_server.ports.push_back(cricket::ProtocolAddress(
+ turn[i].server, protocol, turn[i].secure));
+ relay_server.credentials = credentials;
+ // First in the list gets highest priority.
+ relay_server.priority = static_cast<int>(turn.size() - i - 1);
+ allocator->AddRelay(relay_server);
+ } else {
+ LOG(LS_WARNING) << "Ignoring TURN server " << turn[i].server << ". "
+ << "Reason= Incorrect " << turn[i].transport_type
+ << " transport parameter.";
+ }
+ }
+ return allocator.release();
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/portallocatorfactory.h b/talk/app/webrtc/portallocatorfactory.h
new file mode 100644
index 0000000000..83376d0b84
--- /dev/null
+++ b/talk/app/webrtc/portallocatorfactory.h
@@ -0,0 +1,72 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file defines the default implementation of
+// PortAllocatorFactoryInterface.
+// This implementation creates instances of cricket::HTTPPortAllocator and uses
+// the BasicNetworkManager and BasicPacketSocketFactory.
+
+#ifndef TALK_APP_WEBRTC_PORTALLOCATORFACTORY_H_
+#define TALK_APP_WEBRTC_PORTALLOCATORFACTORY_H_
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace cricket {
+class PortAllocator;
+}
+
+namespace rtc {
+class BasicNetworkManager;
+class BasicPacketSocketFactory;
+}
+
+namespace webrtc {
+
+class PortAllocatorFactory : public PortAllocatorFactoryInterface {
+ public:
+ static rtc::scoped_refptr<PortAllocatorFactoryInterface> Create(
+ rtc::Thread* worker_thread);
+
+ virtual cricket::PortAllocator* CreatePortAllocator(
+ const std::vector<StunConfiguration>& stun,
+ const std::vector<TurnConfiguration>& turn);
+
+ virtual void SetNetworkIgnoreMask(int network_ignore_mask);
+
+ protected:
+ explicit PortAllocatorFactory(rtc::Thread* worker_thread);
+ ~PortAllocatorFactory();
+
+ private:
+ rtc::scoped_ptr<rtc::BasicNetworkManager> network_manager_;
+ rtc::scoped_ptr<rtc::BasicPacketSocketFactory> socket_factory_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_PORTALLOCATORFACTORY_H_
diff --git a/talk/app/webrtc/proxy.h b/talk/app/webrtc/proxy.h
new file mode 100644
index 0000000000..76a5c1eff2
--- /dev/null
+++ b/talk/app/webrtc/proxy.h
@@ -0,0 +1,391 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains Macros for creating proxies for webrtc MediaStream and
+// PeerConnection classes.
+
+//
+// Example usage:
+//
+// class TestInterface : public rtc::RefCountInterface {
+// public:
+// std::string FooA() = 0;
+// std::string FooB(bool arg1) const = 0;
+// std::string FooC(bool arg1)= 0;
+// };
+//
+// Note that return types can not be a const reference.
+//
+// class Test : public TestInterface {
+// ... implementation of the interface.
+// };
+//
+// BEGIN_PROXY_MAP(Test)
+// PROXY_METHOD0(std::string, FooA)
+// PROXY_CONSTMETHOD1(std::string, FooB, arg1)
+// PROXY_METHOD1(std::string, FooC, arg1)
+// END_PROXY()
+//
+// The proxy can be created using TestProxy::Create(Thread*, TestInterface*).
+
+#ifndef TALK_APP_WEBRTC_PROXY_H_
+#define TALK_APP_WEBRTC_PROXY_H_
+
+#include "webrtc/base/event.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+template <typename R>
+class ReturnType {
+ public:
+ template<typename C, typename M>
+ void Invoke(C* c, M m) { r_ = (c->*m)(); }
+ template<typename C, typename M, typename T1>
+ void Invoke(C* c, M m, T1 a1) { r_ = (c->*m)(a1); }
+ template<typename C, typename M, typename T1, typename T2>
+ void Invoke(C* c, M m, T1 a1, T2 a2) { r_ = (c->*m)(a1, a2); }
+ template<typename C, typename M, typename T1, typename T2, typename T3>
+ void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) { r_ = (c->*m)(a1, a2, a3); }
+ template<typename C, typename M, typename T1, typename T2, typename T3,
+ typename T4>
+ void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4) {
+ r_ = (c->*m)(a1, a2, a3, a4);
+ }
+ template<typename C, typename M, typename T1, typename T2, typename T3,
+ typename T4, typename T5>
+ void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) {
+ r_ = (c->*m)(a1, a2, a3, a4, a5);
+ }
+
+ R value() { return r_; }
+
+ private:
+ R r_;
+};
+
+template <>
+class ReturnType<void> {
+ public:
+ template<typename C, typename M>
+ void Invoke(C* c, M m) { (c->*m)(); }
+ template<typename C, typename M, typename T1>
+ void Invoke(C* c, M m, T1 a1) { (c->*m)(a1); }
+ template<typename C, typename M, typename T1, typename T2>
+ void Invoke(C* c, M m, T1 a1, T2 a2) { (c->*m)(a1, a2); }
+ template<typename C, typename M, typename T1, typename T2, typename T3>
+ void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) { (c->*m)(a1, a2, a3); }
+
+ void value() {}
+};
+
+namespace internal {
+
+class SynchronousMethodCall
+ : public rtc::MessageData,
+ public rtc::MessageHandler {
+ public:
+ explicit SynchronousMethodCall(rtc::MessageHandler* proxy)
+ : e_(), proxy_(proxy) {}
+ ~SynchronousMethodCall() {}
+
+ void Invoke(rtc::Thread* t) {
+ if (t->IsCurrent()) {
+ proxy_->OnMessage(NULL);
+ } else {
+ e_.reset(new rtc::Event(false, false));
+ t->Post(this, 0);
+ e_->Wait(rtc::Event::kForever);
+ }
+ }
+
+ private:
+ void OnMessage(rtc::Message*) { proxy_->OnMessage(NULL); e_->Set(); }
+ rtc::scoped_ptr<rtc::Event> e_;
+ rtc::MessageHandler* proxy_;
+};
+
+} // namespace internal
+
+template <typename C, typename R>
+class MethodCall0 : public rtc::Message,
+ public rtc::MessageHandler {
+ public:
+ typedef R (C::*Method)();
+ MethodCall0(C* c, Method m) : c_(c), m_(m) {}
+
+ R Marshal(rtc::Thread* t) {
+ internal::SynchronousMethodCall(this).Invoke(t);
+ return r_.value();
+ }
+
+ private:
+ void OnMessage(rtc::Message*) { r_.Invoke(c_, m_); }
+
+ C* c_;
+ Method m_;
+ ReturnType<R> r_;
+};
+
+template <typename C, typename R>
+class ConstMethodCall0 : public rtc::Message,
+ public rtc::MessageHandler {
+ public:
+ typedef R (C::*Method)() const;
+ ConstMethodCall0(C* c, Method m) : c_(c), m_(m) {}
+
+ R Marshal(rtc::Thread* t) {
+ internal::SynchronousMethodCall(this).Invoke(t);
+ return r_.value();
+ }
+
+ private:
+ void OnMessage(rtc::Message*) { r_.Invoke(c_, m_); }
+
+ C* c_;
+ Method m_;
+ ReturnType<R> r_;
+};
+
+template <typename C, typename R, typename T1>
+class MethodCall1 : public rtc::Message,
+ public rtc::MessageHandler {
+ public:
+ typedef R (C::*Method)(T1 a1);
+ MethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(a1) {}
+
+ R Marshal(rtc::Thread* t) {
+ internal::SynchronousMethodCall(this).Invoke(t);
+ return r_.value();
+ }
+
+ private:
+ void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_); }
+
+ C* c_;
+ Method m_;
+ ReturnType<R> r_;
+ T1 a1_;
+};
+
+template <typename C, typename R, typename T1>
+class ConstMethodCall1 : public rtc::Message,
+ public rtc::MessageHandler {
+ public:
+ typedef R (C::*Method)(T1 a1) const;
+ ConstMethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(a1) {}
+
+ R Marshal(rtc::Thread* t) {
+ internal::SynchronousMethodCall(this).Invoke(t);
+ return r_.value();
+ }
+
+ private:
+ void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_); }
+
+ C* c_;
+ Method m_;
+ ReturnType<R> r_;
+ T1 a1_;
+};
+
+template <typename C, typename R, typename T1, typename T2>
+class MethodCall2 : public rtc::Message,
+ public rtc::MessageHandler {
+ public:
+ typedef R (C::*Method)(T1 a1, T2 a2);
+ MethodCall2(C* c, Method m, T1 a1, T2 a2) : c_(c), m_(m), a1_(a1), a2_(a2) {}
+
+ R Marshal(rtc::Thread* t) {
+ internal::SynchronousMethodCall(this).Invoke(t);
+ return r_.value();
+ }
+
+ private:
+ void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_, a2_); }
+
+ C* c_;
+ Method m_;
+ ReturnType<R> r_;
+ T1 a1_;
+ T2 a2_;
+};
+
+template <typename C, typename R, typename T1, typename T2, typename T3>
+class MethodCall3 : public rtc::Message,
+ public rtc::MessageHandler {
+ public:
+ typedef R (C::*Method)(T1 a1, T2 a2, T3 a3);
+ MethodCall3(C* c, Method m, T1 a1, T2 a2, T3 a3)
+ : c_(c), m_(m), a1_(a1), a2_(a2), a3_(a3) {}
+
+ R Marshal(rtc::Thread* t) {
+ internal::SynchronousMethodCall(this).Invoke(t);
+ return r_.value();
+ }
+
+ private:
+ void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_, a2_, a3_); }
+
+ C* c_;
+ Method m_;
+ ReturnType<R> r_;
+ T1 a1_;
+ T2 a2_;
+ T3 a3_;
+};
+
+template <typename C, typename R, typename T1, typename T2, typename T3,
+ typename T4>
+class MethodCall4 : public rtc::Message,
+ public rtc::MessageHandler {
+ public:
+ typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4);
+ MethodCall4(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4)
+ : c_(c), m_(m), a1_(a1), a2_(a2), a3_(a3), a4_(a4) {}
+
+ R Marshal(rtc::Thread* t) {
+ internal::SynchronousMethodCall(this).Invoke(t);
+ return r_.value();
+ }
+
+ private:
+ void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_, a2_, a3_, a4_); }
+
+ C* c_;
+ Method m_;
+ ReturnType<R> r_;
+ T1 a1_;
+ T2 a2_;
+ T3 a3_;
+ T4 a4_;
+};
+
+template <typename C, typename R, typename T1, typename T2, typename T3,
+ typename T4, typename T5>
+class MethodCall5 : public rtc::Message,
+ public rtc::MessageHandler {
+ public:
+ typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4, T5 a5);
+ MethodCall5(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5)
+ : c_(c), m_(m), a1_(a1), a2_(a2), a3_(a3), a4_(a4), a5_(a5) {}
+
+ R Marshal(rtc::Thread* t) {
+ internal::SynchronousMethodCall(this).Invoke(t);
+ return r_.value();
+ }
+
+ private:
+ void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_, a2_, a3_, a4_, a5_); }
+
+ C* c_;
+ Method m_;
+ ReturnType<R> r_;
+ T1 a1_;
+ T2 a2_;
+ T3 a3_;
+ T4 a4_;
+ T5 a5_;
+};
+
+#define BEGIN_PROXY_MAP(c) \
+ class c##Proxy : public c##Interface { \
+ protected: \
+ typedef c##Interface C; \
+ c##Proxy(rtc::Thread* thread, C* c) : owner_thread_(thread), c_(c) {} \
+ ~c##Proxy() { \
+ MethodCall0<c##Proxy, void> call(this, &c##Proxy::Release_s); \
+ call.Marshal(owner_thread_); \
+ } \
+ \
+ public: \
+ static rtc::scoped_refptr<C> Create(rtc::Thread* thread, C* c) { \
+ return new rtc::RefCountedObject<c##Proxy>(thread, c); \
+ }
+
+#define PROXY_METHOD0(r, method) \
+ r method() override { \
+ MethodCall0<C, r> call(c_.get(), &C::method); \
+ return call.Marshal(owner_thread_); \
+ }
+
+#define PROXY_CONSTMETHOD0(r, method) \
+ r method() const override { \
+ ConstMethodCall0<C, r> call(c_.get(), &C::method); \
+ return call.Marshal(owner_thread_); \
+ }
+
+#define PROXY_METHOD1(r, method, t1) \
+ r method(t1 a1) override { \
+ MethodCall1<C, r, t1> call(c_.get(), &C::method, a1); \
+ return call.Marshal(owner_thread_); \
+ }
+
+#define PROXY_CONSTMETHOD1(r, method, t1) \
+ r method(t1 a1) const override { \
+ ConstMethodCall1<C, r, t1> call(c_.get(), &C::method, a1); \
+ return call.Marshal(owner_thread_); \
+ }
+
+#define PROXY_METHOD2(r, method, t1, t2) \
+ r method(t1 a1, t2 a2) override { \
+ MethodCall2<C, r, t1, t2> call(c_.get(), &C::method, a1, a2); \
+ return call.Marshal(owner_thread_); \
+ }
+
+#define PROXY_METHOD3(r, method, t1, t2, t3) \
+ r method(t1 a1, t2 a2, t3 a3) override { \
+ MethodCall3<C, r, t1, t2, t3> call(c_.get(), &C::method, a1, a2, a3); \
+ return call.Marshal(owner_thread_); \
+ }
+
+#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \
+ r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \
+ MethodCall4<C, r, t1, t2, t3, t4> call(c_.get(), &C::method, a1, a2, a3, \
+ a4); \
+ return call.Marshal(owner_thread_); \
+ }
+
+#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \
+ r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \
+ MethodCall5<C, r, t1, t2, t3, t4, t5> call(c_.get(), &C::method, a1, a2, \
+ a3, a4, a5); \
+ return call.Marshal(owner_thread_); \
+ }
+
+#define END_PROXY() \
+ private:\
+ void Release_s() {\
+ c_ = NULL;\
+ }\
+ mutable rtc::Thread* owner_thread_;\
+ rtc::scoped_refptr<C> c_;\
+ };\
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_PROXY_H_
diff --git a/talk/app/webrtc/proxy_unittest.cc b/talk/app/webrtc/proxy_unittest.cc
new file mode 100644
index 0000000000..6fc89a5d10
--- /dev/null
+++ b/talk/app/webrtc/proxy_unittest.cc
@@ -0,0 +1,170 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/proxy.h"
+
+#include <string>
+
+#include "testing/base/public/gmock.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread.h"
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Exactly;
+using ::testing::InvokeWithoutArgs;
+using ::testing::Return;
+
+namespace webrtc {
+
+// Interface used for testing here.
+class FakeInterface : public rtc::RefCountInterface {
+ public:
+ virtual void VoidMethod0() = 0;
+ virtual std::string Method0() = 0;
+ virtual std::string ConstMethod0() const = 0;
+ virtual std::string Method1(std::string s) = 0;
+ virtual std::string ConstMethod1(std::string s) const = 0;
+ virtual std::string Method2(std::string s1, std::string s2) = 0;
+
+ protected:
+ ~FakeInterface() {}
+};
+
+// Proxy for the test interface.
+BEGIN_PROXY_MAP(Fake)
+ PROXY_METHOD0(void, VoidMethod0)
+ PROXY_METHOD0(std::string, Method0)
+ PROXY_CONSTMETHOD0(std::string, ConstMethod0)
+ PROXY_METHOD1(std::string, Method1, std::string)
+ PROXY_CONSTMETHOD1(std::string, ConstMethod1, std::string)
+ PROXY_METHOD2(std::string, Method2, std::string, std::string)
+END_PROXY()
+
+// Implementation of the test interface.
+class Fake : public FakeInterface {
+ public:
+ static rtc::scoped_refptr<Fake> Create() {
+ return new rtc::RefCountedObject<Fake>();
+ }
+
+ MOCK_METHOD0(VoidMethod0, void());
+ MOCK_METHOD0(Method0, std::string());
+ MOCK_CONST_METHOD0(ConstMethod0, std::string());
+
+ MOCK_METHOD1(Method1, std::string(std::string));
+ MOCK_CONST_METHOD1(ConstMethod1, std::string(std::string));
+
+ MOCK_METHOD2(Method2, std::string(std::string, std::string));
+
+ protected:
+ Fake() {}
+ ~Fake() {}
+};
+
+class ProxyTest: public testing::Test {
+ public:
+ // Checks that the functions is called on the |signaling_thread_|.
+ void CheckThread() {
+ EXPECT_EQ(rtc::Thread::Current(), signaling_thread_.get());
+ }
+
+ protected:
+ virtual void SetUp() {
+ signaling_thread_.reset(new rtc::Thread());
+ ASSERT_TRUE(signaling_thread_->Start());
+ fake_ = Fake::Create();
+ fake_proxy_ = FakeProxy::Create(signaling_thread_.get(), fake_.get());
+ }
+
+ protected:
+ rtc::scoped_ptr<rtc::Thread> signaling_thread_;
+ rtc::scoped_refptr<FakeInterface> fake_proxy_;
+ rtc::scoped_refptr<Fake> fake_;
+};
+
+TEST_F(ProxyTest, VoidMethod0) {
+ EXPECT_CALL(*fake_, VoidMethod0())
+ .Times(Exactly(1))
+ .WillOnce(InvokeWithoutArgs(this, &ProxyTest::CheckThread));
+ fake_proxy_->VoidMethod0();
+}
+
+TEST_F(ProxyTest, Method0) {
+ EXPECT_CALL(*fake_, Method0())
+ .Times(Exactly(1))
+ .WillOnce(
+ DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+ Return("Method0")));
+ EXPECT_EQ("Method0",
+ fake_proxy_->Method0());
+}
+
+TEST_F(ProxyTest, ConstMethod0) {
+ EXPECT_CALL(*fake_, ConstMethod0())
+ .Times(Exactly(1))
+ .WillOnce(
+ DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+ Return("ConstMethod0")));
+ EXPECT_EQ("ConstMethod0",
+ fake_proxy_->ConstMethod0());
+}
+
+TEST_F(ProxyTest, Method1) {
+ const std::string arg1 = "arg1";
+ EXPECT_CALL(*fake_, Method1(arg1))
+ .Times(Exactly(1))
+ .WillOnce(
+ DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+ Return("Method1")));
+ EXPECT_EQ("Method1", fake_proxy_->Method1(arg1));
+}
+
+TEST_F(ProxyTest, ConstMethod1) {
+ const std::string arg1 = "arg1";
+ EXPECT_CALL(*fake_, ConstMethod1(arg1))
+ .Times(Exactly(1))
+ .WillOnce(
+ DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+ Return("ConstMethod1")));
+ EXPECT_EQ("ConstMethod1", fake_proxy_->ConstMethod1(arg1));
+}
+
+TEST_F(ProxyTest, Method2) {
+ const std::string arg1 = "arg1";
+ const std::string arg2 = "arg2";
+ EXPECT_CALL(*fake_, Method2(arg1, arg2))
+ .Times(Exactly(1))
+ .WillOnce(
+ DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+ Return("Method2")));
+ EXPECT_EQ("Method2", fake_proxy_->Method2(arg1, arg2));
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/remoteaudiosource.cc b/talk/app/webrtc/remoteaudiosource.cc
new file mode 100644
index 0000000000..41f3d8798a
--- /dev/null
+++ b/talk/app/webrtc/remoteaudiosource.cc
@@ -0,0 +1,72 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/remoteaudiosource.h"
+
+#include <algorithm>
+#include <functional>
+
+#include "webrtc/base/logging.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<RemoteAudioSource> RemoteAudioSource::Create() {
+ return new rtc::RefCountedObject<RemoteAudioSource>();
+}
+
+RemoteAudioSource::RemoteAudioSource() {
+}
+
+RemoteAudioSource::~RemoteAudioSource() {
+ ASSERT(audio_observers_.empty());
+}
+
+MediaSourceInterface::SourceState RemoteAudioSource::state() const {
+ return MediaSourceInterface::kLive;
+}
+
+void RemoteAudioSource::SetVolume(double volume) {
+ ASSERT(volume >= 0 && volume <= 10);
+ for (AudioObserverList::iterator it = audio_observers_.begin();
+ it != audio_observers_.end(); ++it) {
+ (*it)->OnSetVolume(volume);
+ }
+}
+
+void RemoteAudioSource::RegisterAudioObserver(AudioObserver* observer) {
+ ASSERT(observer != NULL);
+ ASSERT(std::find(audio_observers_.begin(), audio_observers_.end(),
+ observer) == audio_observers_.end());
+ audio_observers_.push_back(observer);
+}
+
+void RemoteAudioSource::UnregisterAudioObserver(AudioObserver* observer) {
+ ASSERT(observer != NULL);
+ audio_observers_.remove(observer);
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/remoteaudiosource.h b/talk/app/webrtc/remoteaudiosource.h
new file mode 100644
index 0000000000..e49aca5684
--- /dev/null
+++ b/talk/app/webrtc/remoteaudiosource.h
@@ -0,0 +1,66 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_REMOTEAUDIOSOURCE_H_
+#define TALK_APP_WEBRTC_REMOTEAUDIOSOURCE_H_
+
+#include <list>
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/notifier.h"
+
+namespace webrtc {
+
+using webrtc::AudioSourceInterface;
+
+// This class implements the audio source used by the remote audio track.
+class RemoteAudioSource : public Notifier<AudioSourceInterface> {
+ public:
+ // Creates an instance of RemoteAudioSource.
+ static rtc::scoped_refptr<RemoteAudioSource> Create();
+
+ protected:
+ RemoteAudioSource();
+ virtual ~RemoteAudioSource();
+
+ private:
+ typedef std::list<AudioObserver*> AudioObserverList;
+
+ // MediaSourceInterface implementation.
+ MediaSourceInterface::SourceState state() const override;
+
+ // AudioSourceInterface implementation.
+ void SetVolume(double volume) override;
+ void RegisterAudioObserver(AudioObserver* observer) override;
+ void UnregisterAudioObserver(AudioObserver* observer) override;
+
+ AudioObserverList audio_observers_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_REMOTEAUDIOSOURCE_H_
diff --git a/talk/app/webrtc/remotevideocapturer.cc b/talk/app/webrtc/remotevideocapturer.cc
new file mode 100644
index 0000000000..b0c9f9fc08
--- /dev/null
+++ b/talk/app/webrtc/remotevideocapturer.cc
@@ -0,0 +1,95 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/remotevideocapturer.h"
+
+#include "talk/media/base/videoframe.h"
+#include "webrtc/base/logging.h"
+
+namespace webrtc {
+
+RemoteVideoCapturer::RemoteVideoCapturer() {}
+
+RemoteVideoCapturer::~RemoteVideoCapturer() {}
+
+cricket::CaptureState RemoteVideoCapturer::Start(
+ const cricket::VideoFormat& capture_format) {
+ if (capture_state() == cricket::CS_RUNNING) {
+ LOG(LS_WARNING)
+ << "RemoteVideoCapturer::Start called when it's already started.";
+ return capture_state();
+ }
+
+ LOG(LS_INFO) << "RemoteVideoCapturer::Start";
+ SetCaptureFormat(&capture_format);
+ return cricket::CS_RUNNING;
+}
+
+void RemoteVideoCapturer::Stop() {
+ if (capture_state() == cricket::CS_STOPPED) {
+ LOG(LS_WARNING)
+ << "RemoteVideoCapturer::Stop called when it's already stopped.";
+ return;
+ }
+
+ LOG(LS_INFO) << "RemoteVideoCapturer::Stop";
+ SetCaptureFormat(NULL);
+ SetCaptureState(cricket::CS_STOPPED);
+}
+
+bool RemoteVideoCapturer::IsRunning() {
+ return capture_state() == cricket::CS_RUNNING;
+}
+
+bool RemoteVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
+ if (!fourccs)
+ return false;
+ fourccs->push_back(cricket::FOURCC_I420);
+ return true;
+}
+
+bool RemoteVideoCapturer::GetBestCaptureFormat(
+ const cricket::VideoFormat& desired, cricket::VideoFormat* best_format) {
+ if (!best_format) {
+ return false;
+ }
+
+ // RemoteVideoCapturer does not support capability enumeration.
+ // Use the desired format as the best format.
+ best_format->width = desired.width;
+ best_format->height = desired.height;
+ best_format->fourcc = cricket::FOURCC_I420;
+ best_format->interval = desired.interval;
+ return true;
+}
+
+bool RemoteVideoCapturer::IsScreencast() const {
+ // TODO(ronghuawu): what about remote screencast stream.
+ return false;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/remotevideocapturer.h b/talk/app/webrtc/remotevideocapturer.h
new file mode 100644
index 0000000000..b5298d94ee
--- /dev/null
+++ b/talk/app/webrtc/remotevideocapturer.h
@@ -0,0 +1,65 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_REMOTEVIDEOCAPTURER_H_
+#define TALK_APP_WEBRTC_REMOTEVIDEOCAPTURER_H_
+
+#include <vector>
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/media/base/videocapturer.h"
+#include "talk/media/base/videorenderer.h"
+
+namespace webrtc {
+
+// RemoteVideoCapturer implements a simple cricket::VideoCapturer which
+// gets decoded remote video frames from media channel.
+// It's used as the remote video source's VideoCapturer so that the remote video
+// can be used as a cricket::VideoCapturer and in that way a remote video stream
+// can implement the MediaStreamSourceInterface.
+class RemoteVideoCapturer : public cricket::VideoCapturer {
+ public:
+ RemoteVideoCapturer();
+ virtual ~RemoteVideoCapturer();
+
+ // cricket::VideoCapturer implementation.
+ cricket::CaptureState Start(
+ const cricket::VideoFormat& capture_format) override;
+ void Stop() override;
+ bool IsRunning() override;
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override;
+ bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
+ cricket::VideoFormat* best_format) override;
+ bool IsScreencast() const override;
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(RemoteVideoCapturer);
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_REMOTEVIDEOCAPTURER_H_
diff --git a/talk/app/webrtc/remotevideocapturer_unittest.cc b/talk/app/webrtc/remotevideocapturer_unittest.cc
new file mode 100644
index 0000000000..88277b61fc
--- /dev/null
+++ b/talk/app/webrtc/remotevideocapturer_unittest.cc
@@ -0,0 +1,132 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/remotevideocapturer.h"
+#include "talk/media/webrtc/webrtcvideoframe.h"
+#include "webrtc/base/gunit.h"
+
+using cricket::CaptureState;
+using cricket::VideoCapturer;
+using cricket::VideoFormat;
+using cricket::VideoFormatPod;
+using cricket::VideoFrame;
+
+static const int kMaxWaitMs = 1000;
+static const VideoFormatPod kTestFormat =
+ {640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY};
+
+class RemoteVideoCapturerTest : public testing::Test,
+ public sigslot::has_slots<> {
+ protected:
+ RemoteVideoCapturerTest()
+ : captured_frame_num_(0),
+ capture_state_(cricket::CS_STOPPED) {}
+
+ virtual void SetUp() {
+ capturer_.SignalStateChange.connect(
+ this, &RemoteVideoCapturerTest::OnStateChange);
+ capturer_.SignalVideoFrame.connect(
+ this, &RemoteVideoCapturerTest::OnVideoFrame);
+ }
+
+ ~RemoteVideoCapturerTest() {
+ capturer_.SignalStateChange.disconnect(this);
+ capturer_.SignalVideoFrame.disconnect(this);
+ }
+
+ int captured_frame_num() const {
+ return captured_frame_num_;
+ }
+
+ CaptureState capture_state() const {
+ return capture_state_;
+ }
+
+ webrtc::RemoteVideoCapturer capturer_;
+
+ private:
+ void OnStateChange(VideoCapturer* capturer,
+ CaptureState capture_state) {
+ EXPECT_EQ(&capturer_, capturer);
+ capture_state_ = capture_state;
+ }
+
+ void OnVideoFrame(VideoCapturer* capturer, const VideoFrame* frame) {
+ EXPECT_EQ(&capturer_, capturer);
+ ++captured_frame_num_;
+ }
+
+ int captured_frame_num_;
+ CaptureState capture_state_;
+};
+
+TEST_F(RemoteVideoCapturerTest, StartStop) {
+ // Start
+ EXPECT_TRUE(
+ capturer_.StartCapturing(VideoFormat(kTestFormat)));
+ EXPECT_TRUE_WAIT((cricket::CS_RUNNING == capture_state()), kMaxWaitMs);
+ EXPECT_EQ(VideoFormat(kTestFormat),
+ *capturer_.GetCaptureFormat());
+ EXPECT_TRUE(capturer_.IsRunning());
+
+ // Stop
+ capturer_.Stop();
+ EXPECT_TRUE_WAIT((cricket::CS_STOPPED == capture_state()), kMaxWaitMs);
+ EXPECT_TRUE(NULL == capturer_.GetCaptureFormat());
+}
+
+TEST_F(RemoteVideoCapturerTest, GetPreferredFourccs) {
+ EXPECT_FALSE(capturer_.GetPreferredFourccs(NULL));
+
+ std::vector<uint32_t> fourccs;
+ EXPECT_TRUE(capturer_.GetPreferredFourccs(&fourccs));
+ EXPECT_EQ(1u, fourccs.size());
+ EXPECT_EQ(cricket::FOURCC_I420, fourccs.at(0));
+}
+
+TEST_F(RemoteVideoCapturerTest, GetBestCaptureFormat) {
+ VideoFormat desired = VideoFormat(kTestFormat);
+ EXPECT_FALSE(capturer_.GetBestCaptureFormat(desired, NULL));
+
+ VideoFormat expected_format = VideoFormat(kTestFormat);
+ expected_format.fourcc = cricket::FOURCC_I420;
+ VideoFormat best_format;
+ EXPECT_TRUE(capturer_.GetBestCaptureFormat(desired, &best_format));
+ EXPECT_EQ(expected_format, best_format);
+}
+
+TEST_F(RemoteVideoCapturerTest, InputFrame) {
+ EXPECT_EQ(0, captured_frame_num());
+
+ cricket::WebRtcVideoFrame test_frame;
+ capturer_.SignalVideoFrame(&capturer_, &test_frame);
+ EXPECT_EQ(1, captured_frame_num());
+ capturer_.SignalVideoFrame(&capturer_, &test_frame);
+ EXPECT_EQ(2, captured_frame_num());
+}
diff --git a/talk/app/webrtc/rtpreceiver.cc b/talk/app/webrtc/rtpreceiver.cc
new file mode 100644
index 0000000000..b88554f0ac
--- /dev/null
+++ b/talk/app/webrtc/rtpreceiver.cc
@@ -0,0 +1,106 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/rtpreceiver.h"
+
+#include "talk/app/webrtc/videosourceinterface.h"
+
+namespace webrtc {
+
+AudioRtpReceiver::AudioRtpReceiver(AudioTrackInterface* track,
+ uint32_t ssrc,
+ AudioProviderInterface* provider)
+ : id_(track->id()),
+ track_(track),
+ ssrc_(ssrc),
+ provider_(provider),
+ cached_track_enabled_(track->enabled()) {
+ track_->RegisterObserver(this);
+ track_->GetSource()->RegisterAudioObserver(this);
+ Reconfigure();
+}
+
+AudioRtpReceiver::~AudioRtpReceiver() {
+ track_->GetSource()->UnregisterAudioObserver(this);
+ track_->UnregisterObserver(this);
+ Stop();
+}
+
+void AudioRtpReceiver::OnChanged() {
+ if (cached_track_enabled_ != track_->enabled()) {
+ cached_track_enabled_ = track_->enabled();
+ Reconfigure();
+ }
+}
+
+void AudioRtpReceiver::OnSetVolume(double volume) {
+ // When the track is disabled, the volume of the source, which is the
+ // corresponding WebRtc Voice Engine channel will be 0. So we do not allow
+ // setting the volume to the source when the track is disabled.
+ if (provider_ && track_->enabled())
+ provider_->SetAudioPlayoutVolume(ssrc_, volume);
+}
+
+void AudioRtpReceiver::Stop() {
+ // TODO(deadbeef): Need to do more here to fully stop receiving packets.
+ if (!provider_) {
+ return;
+ }
+ provider_->SetAudioPlayout(ssrc_, false);
+ provider_ = nullptr;
+}
+
+void AudioRtpReceiver::Reconfigure() {
+ if (!provider_) {
+ return;
+ }
+ provider_->SetAudioPlayout(ssrc_, track_->enabled());
+}
+
+VideoRtpReceiver::VideoRtpReceiver(VideoTrackInterface* track,
+ uint32_t ssrc,
+ VideoProviderInterface* provider)
+ : id_(track->id()), track_(track), ssrc_(ssrc), provider_(provider) {
+ provider_->SetVideoPlayout(ssrc_, true, track_->GetSource()->FrameInput());
+}
+
+VideoRtpReceiver::~VideoRtpReceiver() {
+ // Since cricket::VideoRenderer is not reference counted,
+ // we need to remove it from the provider before we are deleted.
+ Stop();
+}
+
+void VideoRtpReceiver::Stop() {
+ // TODO(deadbeef): Need to do more here to fully stop receiving packets.
+ if (!provider_) {
+ return;
+ }
+ provider_->SetVideoPlayout(ssrc_, false, nullptr);
+ provider_ = nullptr;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/rtpreceiver.h b/talk/app/webrtc/rtpreceiver.h
new file mode 100644
index 0000000000..a93ccbcbfe
--- /dev/null
+++ b/talk/app/webrtc/rtpreceiver.h
@@ -0,0 +1,104 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains classes that implement RtpReceiverInterface.
+// An RtpReceiver associates a MediaStreamTrackInterface with an underlying
+// transport (provided by AudioProviderInterface/VideoProviderInterface)
+
+#ifndef TALK_APP_WEBRTC_RTPRECEIVER_H_
+#define TALK_APP_WEBRTC_RTPRECEIVER_H_
+
+#include <string>
+
+#include "talk/app/webrtc/mediastreamprovider.h"
+#include "talk/app/webrtc/rtpreceiverinterface.h"
+#include "webrtc/base/basictypes.h"
+
+namespace webrtc {
+
+class AudioRtpReceiver : public ObserverInterface,
+ public AudioSourceInterface::AudioObserver,
+ public rtc::RefCountedObject<RtpReceiverInterface> {
+ public:
+ AudioRtpReceiver(AudioTrackInterface* track,
+ uint32_t ssrc,
+ AudioProviderInterface* provider);
+
+ virtual ~AudioRtpReceiver();
+
+ // ObserverInterface implementation
+ void OnChanged() override;
+
+ // AudioSourceInterface::AudioObserver implementation
+ void OnSetVolume(double volume) override;
+
+ // RtpReceiverInterface implementation
+ rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+ return track_.get();
+ }
+
+ std::string id() const override { return id_; }
+
+ void Stop() override;
+
+ private:
+ void Reconfigure();
+
+ std::string id_;
+ rtc::scoped_refptr<AudioTrackInterface> track_;
+ uint32_t ssrc_;
+ AudioProviderInterface* provider_;
+ bool cached_track_enabled_;
+};
+
+class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInterface> {
+ public:
+ VideoRtpReceiver(VideoTrackInterface* track,
+ uint32_t ssrc,
+ VideoProviderInterface* provider);
+
+ virtual ~VideoRtpReceiver();
+
+ // RtpReceiverInterface implementation
+ rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+ return track_.get();
+ }
+
+ std::string id() const override { return id_; }
+
+ void Stop() override;
+
+ private:
+ std::string id_;
+ rtc::scoped_refptr<VideoTrackInterface> track_;
+ uint32_t ssrc_;
+ VideoProviderInterface* provider_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_RTPRECEIVER_H_
diff --git a/talk/app/webrtc/rtpreceiverinterface.h b/talk/app/webrtc/rtpreceiverinterface.h
new file mode 100644
index 0000000000..099699efc4
--- /dev/null
+++ b/talk/app/webrtc/rtpreceiverinterface.h
@@ -0,0 +1,66 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains interfaces for RtpReceivers
+// http://w3c.github.io/webrtc-pc/#rtcrtpreceiver-interface
+
+#ifndef TALK_APP_WEBRTC_RTPRECEIVERINTERFACE_H_
+#define TALK_APP_WEBRTC_RTPRECEIVERINTERFACE_H_
+
+#include <string>
+
+#include "talk/app/webrtc/proxy.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace webrtc {
+
+class RtpReceiverInterface : public rtc::RefCountInterface {
+ public:
+ virtual rtc::scoped_refptr<MediaStreamTrackInterface> track() const = 0;
+
+ // Not to be confused with "mid", this is a field we can temporarily use
+ // to uniquely identify a receiver until we implement Unified Plan SDP.
+ virtual std::string id() const = 0;
+
+ virtual void Stop() = 0;
+
+ protected:
+ virtual ~RtpReceiverInterface() {}
+};
+
+// Define proxy for RtpReceiverInterface.
+BEGIN_PROXY_MAP(RtpReceiver)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
+PROXY_CONSTMETHOD0(std::string, id)
+PROXY_METHOD0(void, Stop)
+END_PROXY()
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_RTPRECEIVERINTERFACE_H_
diff --git a/talk/app/webrtc/rtpsender.cc b/talk/app/webrtc/rtpsender.cc
new file mode 100644
index 0000000000..3a78f4598a
--- /dev/null
+++ b/talk/app/webrtc/rtpsender.cc
@@ -0,0 +1,207 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/rtpsender.h"
+
+#include "talk/app/webrtc/localaudiosource.h"
+#include "talk/app/webrtc/videosourceinterface.h"
+
+namespace webrtc {
+
+LocalAudioSinkAdapter::LocalAudioSinkAdapter() : sink_(nullptr) {}
+
+LocalAudioSinkAdapter::~LocalAudioSinkAdapter() {
+ rtc::CritScope lock(&lock_);
+ if (sink_)
+ sink_->OnClose();
+}
+
+void LocalAudioSinkAdapter::OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ int number_of_channels,
+ size_t number_of_frames) {
+ rtc::CritScope lock(&lock_);
+ if (sink_) {
+ sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels,
+ number_of_frames);
+ }
+}
+
+void LocalAudioSinkAdapter::SetSink(cricket::AudioRenderer::Sink* sink) {
+ rtc::CritScope lock(&lock_);
+ ASSERT(!sink || !sink_);
+ sink_ = sink;
+}
+
+AudioRtpSender::AudioRtpSender(AudioTrackInterface* track,
+ uint32_t ssrc,
+ AudioProviderInterface* provider)
+ : id_(track->id()),
+ track_(track),
+ ssrc_(ssrc),
+ provider_(provider),
+ cached_track_enabled_(track->enabled()),
+ sink_adapter_(new LocalAudioSinkAdapter()) {
+ track_->RegisterObserver(this);
+ track_->AddSink(sink_adapter_.get());
+ Reconfigure();
+}
+
+AudioRtpSender::~AudioRtpSender() {
+ track_->RemoveSink(sink_adapter_.get());
+ track_->UnregisterObserver(this);
+ Stop();
+}
+
+void AudioRtpSender::OnChanged() {
+ if (cached_track_enabled_ != track_->enabled()) {
+ cached_track_enabled_ = track_->enabled();
+ Reconfigure();
+ }
+}
+
+bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
+ if (track->kind() != "audio") {
+ LOG(LS_ERROR) << "SetTrack called on audio RtpSender with " << track->kind()
+ << " track.";
+ return false;
+ }
+ AudioTrackInterface* audio_track = static_cast<AudioTrackInterface*>(track);
+
+ // Detach from old track.
+ track_->RemoveSink(sink_adapter_.get());
+ track_->UnregisterObserver(this);
+
+ // Attach to new track.
+ track_ = audio_track;
+ cached_track_enabled_ = track_->enabled();
+ track_->RegisterObserver(this);
+ track_->AddSink(sink_adapter_.get());
+ Reconfigure();
+ return true;
+}
+
+void AudioRtpSender::Stop() {
+ // TODO(deadbeef): Need to do more here to fully stop sending packets.
+ if (!provider_) {
+ return;
+ }
+ cricket::AudioOptions options;
+ provider_->SetAudioSend(ssrc_, false, options, nullptr);
+ provider_ = nullptr;
+}
+
+void AudioRtpSender::Reconfigure() {
+ if (!provider_) {
+ return;
+ }
+ cricket::AudioOptions options;
+ if (track_->enabled() && track_->GetSource()) {
+ // TODO(xians): Remove this static_cast since we should be able to connect
+ // a remote audio track to peer connection.
+ options = static_cast<LocalAudioSource*>(track_->GetSource())->options();
+ }
+
+ // Use the renderer if the audio track has one, otherwise use the sink
+ // adapter owned by this class.
+ cricket::AudioRenderer* renderer =
+ track_->GetRenderer() ? track_->GetRenderer() : sink_adapter_.get();
+ ASSERT(renderer != nullptr);
+ provider_->SetAudioSend(ssrc_, track_->enabled(), options, renderer);
+}
+
+VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
+ uint32_t ssrc,
+ VideoProviderInterface* provider)
+ : id_(track->id()),
+ track_(track),
+ ssrc_(ssrc),
+ provider_(provider),
+ cached_track_enabled_(track->enabled()) {
+ track_->RegisterObserver(this);
+ VideoSourceInterface* source = track_->GetSource();
+ if (source) {
+ provider_->SetCaptureDevice(ssrc_, source->GetVideoCapturer());
+ }
+ Reconfigure();
+}
+
+VideoRtpSender::~VideoRtpSender() {
+ track_->UnregisterObserver(this);
+ Stop();
+}
+
+void VideoRtpSender::OnChanged() {
+ if (cached_track_enabled_ != track_->enabled()) {
+ cached_track_enabled_ = track_->enabled();
+ Reconfigure();
+ }
+}
+
+bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
+ if (track->kind() != "video") {
+ LOG(LS_ERROR) << "SetTrack called on video RtpSender with " << track->kind()
+ << " track.";
+ return false;
+ }
+ VideoTrackInterface* video_track = static_cast<VideoTrackInterface*>(track);
+
+ // Detach from old track.
+ track_->UnregisterObserver(this);
+
+ // Attach to new track.
+ track_ = video_track;
+ cached_track_enabled_ = track_->enabled();
+ track_->RegisterObserver(this);
+ Reconfigure();
+ return true;
+}
+
+void VideoRtpSender::Stop() {
+ // TODO(deadbeef): Need to do more here to fully stop sending packets.
+ if (!provider_) {
+ return;
+ }
+ provider_->SetCaptureDevice(ssrc_, nullptr);
+ provider_->SetVideoSend(ssrc_, false, nullptr);
+ provider_ = nullptr;
+}
+
+void VideoRtpSender::Reconfigure() {
+ if (!provider_) {
+ return;
+ }
+ const cricket::VideoOptions* options = nullptr;
+ VideoSourceInterface* source = track_->GetSource();
+ if (track_->enabled() && source) {
+ options = source->options();
+ }
+ provider_->SetVideoSend(ssrc_, track_->enabled(), options);
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/rtpsender.h b/talk/app/webrtc/rtpsender.h
new file mode 100644
index 0000000000..3741909323
--- /dev/null
+++ b/talk/app/webrtc/rtpsender.h
@@ -0,0 +1,140 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains classes that implement RtpSenderInterface.
+// An RtpSender associates a MediaStreamTrackInterface with an underlying
+// transport (provided by AudioProviderInterface/VideoProviderInterface)
+
+#ifndef TALK_APP_WEBRTC_RTPSENDER_H_
+#define TALK_APP_WEBRTC_RTPSENDER_H_
+
+#include <string>
+
+#include "talk/app/webrtc/mediastreamprovider.h"
+#include "talk/app/webrtc/rtpsenderinterface.h"
+#include "talk/media/base/audiorenderer.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace webrtc {
+
+// LocalAudioSinkAdapter receives data callback as a sink to the local
+// AudioTrack, and passes the data to the sink of AudioRenderer.
+class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
+ public cricket::AudioRenderer {
+ public:
+ LocalAudioSinkAdapter();
+ virtual ~LocalAudioSinkAdapter();
+
+ private:
+ // AudioSinkInterface implementation.
+ void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ int number_of_channels,
+ size_t number_of_frames) override;
+
+ // cricket::AudioRenderer implementation.
+ void SetSink(cricket::AudioRenderer::Sink* sink) override;
+
+ cricket::AudioRenderer::Sink* sink_;
+ // Critical section protecting |sink_|.
+ rtc::CriticalSection lock_;
+};
+
+class AudioRtpSender : public ObserverInterface,
+ public rtc::RefCountedObject<RtpSenderInterface> {
+ public:
+ AudioRtpSender(AudioTrackInterface* track,
+ uint32_t ssrc,
+ AudioProviderInterface* provider);
+
+ virtual ~AudioRtpSender();
+
+ // ObserverInterface implementation
+ void OnChanged() override;
+
+ // RtpSenderInterface implementation
+ bool SetTrack(MediaStreamTrackInterface* track) override;
+ rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+ return track_.get();
+ }
+
+ std::string id() const override { return id_; }
+
+ void Stop() override;
+
+ private:
+ void Reconfigure();
+
+ std::string id_;
+ rtc::scoped_refptr<AudioTrackInterface> track_;
+ uint32_t ssrc_;
+ AudioProviderInterface* provider_;
+ bool cached_track_enabled_;
+
+ // Used to pass the data callback from the |track_| to the other end of
+ // cricket::AudioRenderer.
+ rtc::scoped_ptr<LocalAudioSinkAdapter> sink_adapter_;
+};
+
+class VideoRtpSender : public ObserverInterface,
+ public rtc::RefCountedObject<RtpSenderInterface> {
+ public:
+ VideoRtpSender(VideoTrackInterface* track,
+ uint32_t ssrc,
+ VideoProviderInterface* provider);
+
+ virtual ~VideoRtpSender();
+
+ // ObserverInterface implementation
+ void OnChanged() override;
+
+ // RtpSenderInterface implementation
+ bool SetTrack(MediaStreamTrackInterface* track) override;
+ rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+ return track_.get();
+ }
+
+ std::string id() const override { return id_; }
+
+ void Stop() override;
+
+ private:
+ void Reconfigure();
+
+ std::string id_;
+ rtc::scoped_refptr<VideoTrackInterface> track_;
+ uint32_t ssrc_;
+ VideoProviderInterface* provider_;
+ bool cached_track_enabled_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_RTPSENDER_H_
diff --git a/talk/app/webrtc/rtpsenderinterface.h b/talk/app/webrtc/rtpsenderinterface.h
new file mode 100644
index 0000000000..fca98f21db
--- /dev/null
+++ b/talk/app/webrtc/rtpsenderinterface.h
@@ -0,0 +1,70 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains interfaces for RtpSenders
+// http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface
+
+#ifndef TALK_APP_WEBRTC_RTPSENDERINTERFACE_H_
+#define TALK_APP_WEBRTC_RTPSENDERINTERFACE_H_
+
+#include <string>
+
+#include "talk/app/webrtc/proxy.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace webrtc {
+
+class RtpSenderInterface : public rtc::RefCountInterface {
+ public:
+ // Returns true if successful in setting the track.
+ // Fails if an audio track is set on a video RtpSender, or vice-versa.
+ virtual bool SetTrack(MediaStreamTrackInterface* track) = 0;
+ virtual rtc::scoped_refptr<MediaStreamTrackInterface> track() const = 0;
+
+ // Not to be confused with "mid", this is a field we can temporarily use
+ // to uniquely identify a receiver until we implement Unified Plan SDP.
+ virtual std::string id() const = 0;
+
+ virtual void Stop() = 0;
+
+ protected:
+ virtual ~RtpSenderInterface() {}
+};
+
+// Define proxy for RtpSenderInterface.
+BEGIN_PROXY_MAP(RtpSender)
+PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
+PROXY_CONSTMETHOD0(std::string, id)
+PROXY_METHOD0(void, Stop)
+END_PROXY()
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_RTPSENDERINTERFACE_H_
diff --git a/talk/app/webrtc/rtpsenderreceiver_unittest.cc b/talk/app/webrtc/rtpsenderreceiver_unittest.cc
new file mode 100644
index 0000000000..c9d7e008c3
--- /dev/null
+++ b/talk/app/webrtc/rtpsenderreceiver_unittest.cc
@@ -0,0 +1,283 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/audiotrack.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/remoteaudiosource.h"
+#include "talk/app/webrtc/rtpreceiver.h"
+#include "talk/app/webrtc/rtpsender.h"
+#include "talk/app/webrtc/streamcollection.h"
+#include "talk/app/webrtc/videosource.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "talk/media/base/fakevideocapturer.h"
+#include "talk/media/base/mediachannel.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/gunit.h"
+
+using ::testing::_;
+using ::testing::Exactly;
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kVideoTrackId[] = "video_1";
+static const char kAudioTrackId[] = "audio_1";
+static const uint32_t kVideoSsrc = 98;
+static const uint32_t kAudioSsrc = 99;
+
+namespace webrtc {
+
+// Helper class to test RtpSender/RtpReceiver.
+class MockAudioProvider : public AudioProviderInterface {
+ public:
+ virtual ~MockAudioProvider() {}
+ MOCK_METHOD2(SetAudioPlayout,
+ void(uint32_t ssrc,
+ bool enable));
+ MOCK_METHOD4(SetAudioSend,
+ void(uint32_t ssrc,
+ bool enable,
+ const cricket::AudioOptions& options,
+ cricket::AudioRenderer* renderer));
+ MOCK_METHOD2(SetAudioPlayoutVolume, void(uint32_t ssrc, double volume));
+};
+
+// Helper class to test RtpSender/RtpReceiver.
+class MockVideoProvider : public VideoProviderInterface {
+ public:
+ virtual ~MockVideoProvider() {}
+ MOCK_METHOD2(SetCaptureDevice,
+ bool(uint32_t ssrc, cricket::VideoCapturer* camera));
+ MOCK_METHOD3(SetVideoPlayout,
+ void(uint32_t ssrc,
+ bool enable,
+ cricket::VideoRenderer* renderer));
+ MOCK_METHOD3(SetVideoSend,
+ void(uint32_t ssrc,
+ bool enable,
+ const cricket::VideoOptions* options));
+};
+
+class FakeVideoSource : public Notifier<VideoSourceInterface> {
+ public:
+ static rtc::scoped_refptr<FakeVideoSource> Create() {
+ return new rtc::RefCountedObject<FakeVideoSource>();
+ }
+ virtual cricket::VideoCapturer* GetVideoCapturer() { return &fake_capturer_; }
+ virtual void Stop() {}
+ virtual void Restart() {}
+ virtual void AddSink(cricket::VideoRenderer* output) {}
+ virtual void RemoveSink(cricket::VideoRenderer* output) {}
+ virtual SourceState state() const { return state_; }
+ virtual const cricket::VideoOptions* options() const { return &options_; }
+ virtual cricket::VideoRenderer* FrameInput() { return NULL; }
+
+ protected:
+ FakeVideoSource() : state_(kLive) {}
+ ~FakeVideoSource() {}
+
+ private:
+ cricket::FakeVideoCapturer fake_capturer_;
+ SourceState state_;
+ cricket::VideoOptions options_;
+};
+
+class RtpSenderReceiverTest : public testing::Test {
+ public:
+ virtual void SetUp() {
+ stream_ = MediaStream::Create(kStreamLabel1);
+ rtc::scoped_refptr<VideoSourceInterface> source(FakeVideoSource::Create());
+ video_track_ = VideoTrack::Create(kVideoTrackId, source);
+ EXPECT_TRUE(stream_->AddTrack(video_track_));
+ }
+
+ void CreateAudioRtpSender() {
+ audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
+ EXPECT_TRUE(stream_->AddTrack(audio_track_));
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+ audio_rtp_sender_ = new AudioRtpSender(stream_->GetAudioTracks()[0],
+ kAudioSsrc, &audio_provider_);
+ }
+
+ void CreateVideoRtpSender() {
+ EXPECT_CALL(video_provider_,
+ SetCaptureDevice(
+ kVideoSsrc, video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+ video_rtp_sender_ = new VideoRtpSender(stream_->GetVideoTracks()[0],
+ kVideoSsrc, &video_provider_);
+ }
+
+ void DestroyAudioRtpSender() {
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _))
+ .Times(1);
+ audio_rtp_sender_ = nullptr;
+ }
+
+ void DestroyVideoRtpSender() {
+ EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, NULL)).Times(1);
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+ video_rtp_sender_ = nullptr;
+ }
+
+ void CreateAudioRtpReceiver() {
+ audio_track_ =
+ AudioTrack::Create(kAudioTrackId, RemoteAudioSource::Create().get());
+ EXPECT_TRUE(stream_->AddTrack(audio_track_));
+ EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
+ audio_rtp_receiver_ = new AudioRtpReceiver(stream_->GetAudioTracks()[0],
+ kAudioSsrc, &audio_provider_);
+ }
+
+ void CreateVideoRtpReceiver() {
+ EXPECT_CALL(video_provider_,
+ SetVideoPlayout(kVideoSsrc, true,
+ video_track_->GetSource()->FrameInput()));
+ video_rtp_receiver_ = new VideoRtpReceiver(stream_->GetVideoTracks()[0],
+ kVideoSsrc, &video_provider_);
+ }
+
+ void DestroyAudioRtpReceiver() {
+ EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
+ audio_rtp_receiver_ = nullptr;
+ }
+
+ void DestroyVideoRtpReceiver() {
+ EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, NULL));
+ video_rtp_receiver_ = nullptr;
+ }
+
+ protected:
+ MockAudioProvider audio_provider_;
+ MockVideoProvider video_provider_;
+ rtc::scoped_refptr<AudioRtpSender> audio_rtp_sender_;
+ rtc::scoped_refptr<VideoRtpSender> video_rtp_sender_;
+ rtc::scoped_refptr<AudioRtpReceiver> audio_rtp_receiver_;
+ rtc::scoped_refptr<VideoRtpReceiver> video_rtp_receiver_;
+ rtc::scoped_refptr<MediaStreamInterface> stream_;
+ rtc::scoped_refptr<VideoTrackInterface> video_track_;
+ rtc::scoped_refptr<AudioTrackInterface> audio_track_;
+};
+
+// Test that |audio_provider_| is notified when an audio track is associated
+// and disassociated with an AudioRtpSender.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpSender) {
+ CreateAudioRtpSender();
+ DestroyAudioRtpSender();
+}
+
+// Test that |video_provider_| is notified when a video track is associated and
+// disassociated with a VideoRtpSender.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpSender) {
+ CreateVideoRtpSender();
+ DestroyVideoRtpSender();
+}
+
+// Test that |audio_provider_| is notified when a remote audio and track is
+// associated and disassociated with an AudioRtpReceiver.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpReceiver) {
+ CreateAudioRtpReceiver();
+ DestroyAudioRtpReceiver();
+}
+
+// Test that |video_provider_| is notified when a remote
+// video track is associated and disassociated with a VideoRtpReceiver.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpReceiver) {
+ CreateVideoRtpReceiver();
+ DestroyVideoRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, LocalAudioTrackDisable) {
+ CreateAudioRtpSender();
+
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _));
+ audio_track_->set_enabled(false);
+
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+ audio_track_->set_enabled(true);
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, RemoteAudioTrackDisable) {
+ CreateAudioRtpReceiver();
+
+ EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
+ audio_track_->set_enabled(false);
+
+ EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
+ audio_track_->set_enabled(true);
+
+ DestroyAudioRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, LocalVideoTrackDisable) {
+ CreateVideoRtpSender();
+
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _));
+ video_track_->set_enabled(false);
+
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+ video_track_->set_enabled(true);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, RemoteVideoTrackDisable) {
+ CreateVideoRtpReceiver();
+
+ video_track_->set_enabled(false);
+
+ video_track_->set_enabled(true);
+
+ DestroyVideoRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
+ CreateAudioRtpReceiver();
+
+ double volume = 0.5;
+ EXPECT_CALL(audio_provider_, SetAudioPlayoutVolume(kAudioSsrc, volume));
+ audio_track_->GetSource()->SetVolume(volume);
+
+ // Disable the audio track, this should prevent setting the volume.
+ EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
+ audio_track_->set_enabled(false);
+ audio_track_->GetSource()->SetVolume(1.0);
+
+ EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
+ audio_track_->set_enabled(true);
+
+ double new_volume = 0.8;
+ EXPECT_CALL(audio_provider_, SetAudioPlayoutVolume(kAudioSsrc, new_volume));
+ audio_track_->GetSource()->SetVolume(new_volume);
+
+ DestroyAudioRtpReceiver();
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/sctputils.cc b/talk/app/webrtc/sctputils.cc
new file mode 100644
index 0000000000..2239599511
--- /dev/null
+++ b/talk/app/webrtc/sctputils.cc
@@ -0,0 +1,205 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/sctputils.h"
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/bytebuffer.h"
+#include "webrtc/base/logging.h"
+
+namespace webrtc {
+
+// Format defined at
+// http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-01#section
+
+static const uint8_t DATA_CHANNEL_OPEN_MESSAGE_TYPE = 0x03;
+static const uint8_t DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE = 0x02;
+
+enum DataChannelOpenMessageChannelType {
+ DCOMCT_ORDERED_RELIABLE = 0x00,
+ DCOMCT_ORDERED_PARTIAL_RTXS = 0x01,
+ DCOMCT_ORDERED_PARTIAL_TIME = 0x02,
+ DCOMCT_UNORDERED_RELIABLE = 0x80,
+ DCOMCT_UNORDERED_PARTIAL_RTXS = 0x81,
+ DCOMCT_UNORDERED_PARTIAL_TIME = 0x82,
+};
+
+bool IsOpenMessage(const rtc::Buffer& payload) {
+ // Format defined at
+ // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
+
+ rtc::ByteBuffer buffer(payload);
+ uint8_t message_type;
+ if (!buffer.ReadUInt8(&message_type)) {
+ LOG(LS_WARNING) << "Could not read OPEN message type.";
+ return false;
+ }
+ return message_type == DATA_CHANNEL_OPEN_MESSAGE_TYPE;
+}
+
+bool ParseDataChannelOpenMessage(const rtc::Buffer& payload,
+ std::string* label,
+ DataChannelInit* config) {
+ // Format defined at
+ // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
+
+ rtc::ByteBuffer buffer(payload);
+ uint8_t message_type;
+ if (!buffer.ReadUInt8(&message_type)) {
+ LOG(LS_WARNING) << "Could not read OPEN message type.";
+ return false;
+ }
+ if (message_type != DATA_CHANNEL_OPEN_MESSAGE_TYPE) {
+ LOG(LS_WARNING) << "Data Channel OPEN message of unexpected type: "
+ << message_type;
+ return false;
+ }
+
+ uint8_t channel_type;
+ if (!buffer.ReadUInt8(&channel_type)) {
+ LOG(LS_WARNING) << "Could not read OPEN message channel type.";
+ return false;
+ }
+
+ uint16_t priority;
+ if (!buffer.ReadUInt16(&priority)) {
+ LOG(LS_WARNING) << "Could not read OPEN message reliabilility prioirty.";
+ return false;
+ }
+ uint32_t reliability_param;
+ if (!buffer.ReadUInt32(&reliability_param)) {
+ LOG(LS_WARNING) << "Could not read OPEN message reliabilility param.";
+ return false;
+ }
+ uint16_t label_length;
+ if (!buffer.ReadUInt16(&label_length)) {
+ LOG(LS_WARNING) << "Could not read OPEN message label length.";
+ return false;
+ }
+ uint16_t protocol_length;
+ if (!buffer.ReadUInt16(&protocol_length)) {
+ LOG(LS_WARNING) << "Could not read OPEN message protocol length.";
+ return false;
+ }
+ if (!buffer.ReadString(label, (size_t) label_length)) {
+ LOG(LS_WARNING) << "Could not read OPEN message label";
+ return false;
+ }
+ if (!buffer.ReadString(&config->protocol, protocol_length)) {
+ LOG(LS_WARNING) << "Could not read OPEN message protocol.";
+ return false;
+ }
+
+ config->ordered = true;
+ switch (channel_type) {
+ case DCOMCT_UNORDERED_RELIABLE:
+ case DCOMCT_UNORDERED_PARTIAL_RTXS:
+ case DCOMCT_UNORDERED_PARTIAL_TIME:
+ config->ordered = false;
+ }
+
+ config->maxRetransmits = -1;
+ config->maxRetransmitTime = -1;
+ switch (channel_type) {
+ case DCOMCT_ORDERED_PARTIAL_RTXS:
+ case DCOMCT_UNORDERED_PARTIAL_RTXS:
+ config->maxRetransmits = reliability_param;
+ break;
+ case DCOMCT_ORDERED_PARTIAL_TIME:
+ case DCOMCT_UNORDERED_PARTIAL_TIME:
+ config->maxRetransmitTime = reliability_param;
+ break;
+ }
+ return true;
+}
+
+bool ParseDataChannelOpenAckMessage(const rtc::Buffer& payload) {
+ rtc::ByteBuffer buffer(payload);
+ uint8_t message_type;
+ if (!buffer.ReadUInt8(&message_type)) {
+ LOG(LS_WARNING) << "Could not read OPEN_ACK message type.";
+ return false;
+ }
+ if (message_type != DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE) {
+ LOG(LS_WARNING) << "Data Channel OPEN_ACK message of unexpected type: "
+ << message_type;
+ return false;
+ }
+ return true;
+}
+
+bool WriteDataChannelOpenMessage(const std::string& label,
+ const DataChannelInit& config,
+ rtc::Buffer* payload) {
+ // Format defined at
+ // http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-00#section-6.1
+ uint8_t channel_type = 0;
+ uint32_t reliability_param = 0;
+ uint16_t priority = 0;
+ if (config.ordered) {
+ if (config.maxRetransmits > -1) {
+ channel_type = DCOMCT_ORDERED_PARTIAL_RTXS;
+ reliability_param = config.maxRetransmits;
+ } else if (config.maxRetransmitTime > -1) {
+ channel_type = DCOMCT_ORDERED_PARTIAL_TIME;
+ reliability_param = config.maxRetransmitTime;
+ } else {
+ channel_type = DCOMCT_ORDERED_RELIABLE;
+ }
+ } else {
+ if (config.maxRetransmits > -1) {
+ channel_type = DCOMCT_UNORDERED_PARTIAL_RTXS;
+ reliability_param = config.maxRetransmits;
+ } else if (config.maxRetransmitTime > -1) {
+ channel_type = DCOMCT_UNORDERED_PARTIAL_TIME;
+ reliability_param = config.maxRetransmitTime;
+ } else {
+ channel_type = DCOMCT_UNORDERED_RELIABLE;
+ }
+ }
+
+ rtc::ByteBuffer buffer(
+ NULL, 20 + label.length() + config.protocol.length(),
+ rtc::ByteBuffer::ORDER_NETWORK);
+ buffer.WriteUInt8(DATA_CHANNEL_OPEN_MESSAGE_TYPE);
+ buffer.WriteUInt8(channel_type);
+ buffer.WriteUInt16(priority);
+ buffer.WriteUInt32(reliability_param);
+ buffer.WriteUInt16(static_cast<uint16_t>(label.length()));
+ buffer.WriteUInt16(static_cast<uint16_t>(config.protocol.length()));
+ buffer.WriteString(label);
+ buffer.WriteString(config.protocol);
+ payload->SetData(buffer.Data(), buffer.Length());
+ return true;
+}
+
+void WriteDataChannelOpenAckMessage(rtc::Buffer* payload) {
+ rtc::ByteBuffer buffer(rtc::ByteBuffer::ORDER_NETWORK);
+ buffer.WriteUInt8(DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE);
+ payload->SetData(buffer.Data(), buffer.Length());
+}
+} // namespace webrtc
diff --git a/talk/app/webrtc/sctputils.h b/talk/app/webrtc/sctputils.h
new file mode 100644
index 0000000000..f16873c4c3
--- /dev/null
+++ b/talk/app/webrtc/sctputils.h
@@ -0,0 +1,58 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_SCTPUTILS_H_
+#define TALK_APP_WEBRTC_SCTPUTILS_H_
+
+#include <string>
+
+#include "talk/app/webrtc/datachannelinterface.h"
+
+namespace rtc {
+class Buffer;
+} // namespace rtc
+
+namespace webrtc {
+struct DataChannelInit;
+
+// Read the message type and return true if it's an OPEN message.
+bool IsOpenMessage(const rtc::Buffer& payload);
+
+bool ParseDataChannelOpenMessage(const rtc::Buffer& payload,
+ std::string* label,
+ DataChannelInit* config);
+
+bool ParseDataChannelOpenAckMessage(const rtc::Buffer& payload);
+
+bool WriteDataChannelOpenMessage(const std::string& label,
+ const DataChannelInit& config,
+ rtc::Buffer* payload);
+
+void WriteDataChannelOpenAckMessage(rtc::Buffer* payload);
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_SCTPUTILS_H_
diff --git a/talk/app/webrtc/sctputils_unittest.cc b/talk/app/webrtc/sctputils_unittest.cc
new file mode 100644
index 0000000000..e0e203f5cd
--- /dev/null
+++ b/talk/app/webrtc/sctputils_unittest.cc
@@ -0,0 +1,178 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/sctputils.h"
+#include "webrtc/base/bytebuffer.h"
+#include "webrtc/base/gunit.h"
+
+class SctpUtilsTest : public testing::Test {
+ public:
+ void VerifyOpenMessageFormat(const rtc::Buffer& packet,
+ const std::string& label,
+ const webrtc::DataChannelInit& config) {
+ uint8_t message_type;
+ uint8_t channel_type;
+ uint32_t reliability;
+ uint16_t priority;
+ uint16_t label_length;
+ uint16_t protocol_length;
+
+ rtc::ByteBuffer buffer(packet.data(), packet.length());
+ ASSERT_TRUE(buffer.ReadUInt8(&message_type));
+ EXPECT_EQ(0x03, message_type);
+
+ ASSERT_TRUE(buffer.ReadUInt8(&channel_type));
+ if (config.ordered) {
+ EXPECT_EQ(config.maxRetransmits > -1 ?
+ 0x01 : (config.maxRetransmitTime > -1 ? 0x02 : 0),
+ channel_type);
+ } else {
+ EXPECT_EQ(config.maxRetransmits > -1 ?
+ 0x81 : (config.maxRetransmitTime > -1 ? 0x82 : 0x80),
+ channel_type);
+ }
+
+ ASSERT_TRUE(buffer.ReadUInt16(&priority));
+
+ ASSERT_TRUE(buffer.ReadUInt32(&reliability));
+ if (config.maxRetransmits > -1 || config.maxRetransmitTime > -1) {
+ EXPECT_EQ(config.maxRetransmits > -1 ?
+ config.maxRetransmits : config.maxRetransmitTime,
+ static_cast<int>(reliability));
+ }
+
+ ASSERT_TRUE(buffer.ReadUInt16(&label_length));
+ ASSERT_TRUE(buffer.ReadUInt16(&protocol_length));
+ EXPECT_EQ(label.size(), label_length);
+ EXPECT_EQ(config.protocol.size(), protocol_length);
+
+ std::string label_output;
+ ASSERT_TRUE(buffer.ReadString(&label_output, label_length));
+ EXPECT_EQ(label, label_output);
+ std::string protocol_output;
+ ASSERT_TRUE(buffer.ReadString(&protocol_output, protocol_length));
+ EXPECT_EQ(config.protocol, protocol_output);
+ }
+};
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithOrderedReliable) {
+ webrtc::DataChannelInit config;
+ std::string label = "abc";
+ config.protocol = "y";
+
+ rtc::Buffer packet;
+ ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+ VerifyOpenMessageFormat(packet, label, config);
+
+ std::string output_label;
+ webrtc::DataChannelInit output_config;
+ ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(
+ packet, &output_label, &output_config));
+
+ EXPECT_EQ(label, output_label);
+ EXPECT_EQ(config.protocol, output_config.protocol);
+ EXPECT_EQ(config.ordered, output_config.ordered);
+ EXPECT_EQ(config.maxRetransmitTime, output_config.maxRetransmitTime);
+ EXPECT_EQ(config.maxRetransmits, output_config.maxRetransmits);
+}
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmitTime) {
+ webrtc::DataChannelInit config;
+ std::string label = "abc";
+ config.ordered = false;
+ config.maxRetransmitTime = 10;
+ config.protocol = "y";
+
+ rtc::Buffer packet;
+ ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+ VerifyOpenMessageFormat(packet, label, config);
+
+ std::string output_label;
+ webrtc::DataChannelInit output_config;
+ ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(
+ packet, &output_label, &output_config));
+
+ EXPECT_EQ(label, output_label);
+ EXPECT_EQ(config.protocol, output_config.protocol);
+ EXPECT_EQ(config.ordered, output_config.ordered);
+ EXPECT_EQ(config.maxRetransmitTime, output_config.maxRetransmitTime);
+ EXPECT_EQ(-1, output_config.maxRetransmits);
+}
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmits) {
+ webrtc::DataChannelInit config;
+ std::string label = "abc";
+ config.maxRetransmits = 10;
+ config.protocol = "y";
+
+ rtc::Buffer packet;
+ ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+ VerifyOpenMessageFormat(packet, label, config);
+
+ std::string output_label;
+ webrtc::DataChannelInit output_config;
+ ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(
+ packet, &output_label, &output_config));
+
+ EXPECT_EQ(label, output_label);
+ EXPECT_EQ(config.protocol, output_config.protocol);
+ EXPECT_EQ(config.ordered, output_config.ordered);
+ EXPECT_EQ(config.maxRetransmits, output_config.maxRetransmits);
+ EXPECT_EQ(-1, output_config.maxRetransmitTime);
+}
+
+TEST_F(SctpUtilsTest, WriteParseAckMessage) {
+ rtc::Buffer packet;
+ webrtc::WriteDataChannelOpenAckMessage(&packet);
+
+ uint8_t message_type;
+ rtc::ByteBuffer buffer(packet.data(), packet.length());
+ ASSERT_TRUE(buffer.ReadUInt8(&message_type));
+ EXPECT_EQ(0x02, message_type);
+
+ EXPECT_TRUE(webrtc::ParseDataChannelOpenAckMessage(packet));
+}
+
+TEST_F(SctpUtilsTest, TestIsOpenMessage) {
+ rtc::ByteBuffer open;
+ open.WriteUInt8(0x03);
+ EXPECT_TRUE(webrtc::IsOpenMessage(open));
+
+ rtc::ByteBuffer openAck;
+ openAck.WriteUInt8(0x02);
+ EXPECT_FALSE(webrtc::IsOpenMessage(open));
+
+ rtc::ByteBuffer invalid;
+ openAck.WriteUInt8(0x01);
+ EXPECT_FALSE(webrtc::IsOpenMessage(invalid));
+
+ rtc::ByteBuffer empty;
+ EXPECT_FALSE(webrtc::IsOpenMessage(empty));
+}
diff --git a/talk/app/webrtc/statscollector.cc b/talk/app/webrtc/statscollector.cc
new file mode 100644
index 0000000000..347a84640c
--- /dev/null
+++ b/talk/app/webrtc/statscollector.cc
@@ -0,0 +1,945 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/statscollector.h"
+
+#include <utility>
+#include <vector>
+
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/session/media/channel.h"
+#include "webrtc/base/base64.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/timing.h"
+
+using rtc::scoped_ptr;
+
+namespace webrtc {
+namespace {
+
+// The following is the enum RTCStatsIceCandidateType from
+// http://w3c.github.io/webrtc-stats/#rtcstatsicecandidatetype-enum such that
+// our stats report for ice candidate type could conform to that.
+const char STATSREPORT_LOCAL_PORT_TYPE[] = "host";
+const char STATSREPORT_STUN_PORT_TYPE[] = "serverreflexive";
+const char STATSREPORT_PRFLX_PORT_TYPE[] = "peerreflexive";
+const char STATSREPORT_RELAY_PORT_TYPE[] = "relayed";
+
+// Strings used by the stats collector to report adapter types. This fits the
+// general stype of http://w3c.github.io/webrtc-stats than what
+// AdapterTypeToString does.
+const char* STATSREPORT_ADAPTER_TYPE_ETHERNET = "lan";
+const char* STATSREPORT_ADAPTER_TYPE_WIFI = "wlan";
+const char* STATSREPORT_ADAPTER_TYPE_WWAN = "wwan";
+const char* STATSREPORT_ADAPTER_TYPE_VPN = "vpn";
+const char* STATSREPORT_ADAPTER_TYPE_LOOPBACK = "loopback";
+
+template<typename ValueType>
+struct TypeForAdd {
+ const StatsReport::StatsValueName name;
+ const ValueType& value;
+};
+
+typedef TypeForAdd<bool> BoolForAdd;
+typedef TypeForAdd<float> FloatForAdd;
+typedef TypeForAdd<int64_t> Int64ForAdd;
+typedef TypeForAdd<int> IntForAdd;
+
+StatsReport::Id GetTransportIdFromProxy(const ProxyTransportMap& map,
+ const std::string& proxy) {
+ RTC_DCHECK(!proxy.empty());
+ auto found = map.find(proxy);
+ if (found == map.end()) {
+ return StatsReport::Id();
+ }
+
+ return StatsReport::NewComponentId(
+ found->second, cricket::ICE_CANDIDATE_COMPONENT_RTP);
+}
+
+StatsReport* AddTrackReport(StatsCollection* reports,
+ const std::string& track_id) {
+ // Adds an empty track report.
+ StatsReport::Id id(
+ StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, track_id));
+ StatsReport* report = reports->ReplaceOrAddNew(id);
+ report->AddString(StatsReport::kStatsValueNameTrackId, track_id);
+ return report;
+}
+
+template <class TrackVector>
+void CreateTrackReports(const TrackVector& tracks, StatsCollection* reports,
+ TrackIdMap& track_ids) {
+ for (const auto& track : tracks) {
+ const std::string& track_id = track->id();
+ StatsReport* report = AddTrackReport(reports, track_id);
+ RTC_DCHECK(report != nullptr);
+ track_ids[track_id] = report;
+ }
+}
+
+void ExtractCommonSendProperties(const cricket::MediaSenderInfo& info,
+ StatsReport* report) {
+ report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name);
+ report->AddInt64(StatsReport::kStatsValueNameBytesSent, info.bytes_sent);
+ report->AddInt64(StatsReport::kStatsValueNameRtt, info.rtt_ms);
+}
+
+void ExtractCommonReceiveProperties(const cricket::MediaReceiverInfo& info,
+ StatsReport* report) {
+ report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name);
+}
+
+void SetAudioProcessingStats(StatsReport* report, int signal_level,
+ bool typing_noise_detected, int echo_return_loss,
+ int echo_return_loss_enhancement, int echo_delay_median_ms,
+ float aec_quality_min, int echo_delay_std_ms) {
+ report->AddBoolean(StatsReport::kStatsValueNameTypingNoiseState,
+ typing_noise_detected);
+ report->AddFloat(StatsReport::kStatsValueNameEchoCancellationQualityMin,
+ aec_quality_min);
+ // Don't overwrite the previous signal level if it's not available now.
+ if (signal_level >= 0)
+ report->AddInt(StatsReport::kStatsValueNameAudioInputLevel, signal_level);
+ const IntForAdd ints[] = {
+ { StatsReport::kStatsValueNameEchoReturnLoss, echo_return_loss },
+ { StatsReport::kStatsValueNameEchoReturnLossEnhancement,
+ echo_return_loss_enhancement },
+ { StatsReport::kStatsValueNameEchoDelayMedian, echo_delay_median_ms },
+ { StatsReport::kStatsValueNameEchoDelayStdDev, echo_delay_std_ms },
+ };
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+}
+
+void ExtractStats(const cricket::VoiceReceiverInfo& info, StatsReport* report) {
+ ExtractCommonReceiveProperties(info, report);
+ const FloatForAdd floats[] = {
+ { StatsReport::kStatsValueNameExpandRate, info.expand_rate },
+ { StatsReport::kStatsValueNameSecondaryDecodedRate,
+ info.secondary_decoded_rate },
+ { StatsReport::kStatsValueNameSpeechExpandRate, info.speech_expand_rate },
+ { StatsReport::kStatsValueNameAccelerateRate, info.accelerate_rate },
+ { StatsReport::kStatsValueNamePreemptiveExpandRate,
+ info.preemptive_expand_rate },
+ };
+
+ const IntForAdd ints[] = {
+ { StatsReport::kStatsValueNameAudioOutputLevel, info.audio_level },
+ { StatsReport::kStatsValueNameCurrentDelayMs, info.delay_estimate_ms },
+ { StatsReport::kStatsValueNameDecodingCNG, info.decoding_cng },
+ { StatsReport::kStatsValueNameDecodingCTN, info.decoding_calls_to_neteq },
+ { StatsReport::kStatsValueNameDecodingCTSG,
+ info.decoding_calls_to_silence_generator },
+ { StatsReport::kStatsValueNameDecodingNormal, info.decoding_normal },
+ { StatsReport::kStatsValueNameDecodingPLC, info.decoding_plc },
+ { StatsReport::kStatsValueNameDecodingPLCCNG, info.decoding_plc_cng },
+ { StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms },
+ { StatsReport::kStatsValueNameJitterReceived, info.jitter_ms },
+ { StatsReport::kStatsValueNamePacketsLost, info.packets_lost },
+ { StatsReport::kStatsValueNamePacketsReceived, info.packets_rcvd },
+ { StatsReport::kStatsValueNamePreferredJitterBufferMs,
+ info.jitter_buffer_preferred_ms },
+ };
+
+ for (const auto& f : floats)
+ report->AddFloat(f.name, f.value);
+
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+
+ report->AddInt64(StatsReport::kStatsValueNameBytesReceived,
+ info.bytes_rcvd);
+ report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ info.capture_start_ntp_time_ms);
+}
+
+void ExtractStats(const cricket::VoiceSenderInfo& info, StatsReport* report) {
+ ExtractCommonSendProperties(info, report);
+
+ SetAudioProcessingStats(report, info.audio_level, info.typing_noise_detected,
+ info.echo_return_loss, info.echo_return_loss_enhancement,
+ info.echo_delay_median_ms, info.aec_quality_min, info.echo_delay_std_ms);
+
+ const IntForAdd ints[] = {
+ { StatsReport::kStatsValueNameJitterReceived, info.jitter_ms },
+ { StatsReport::kStatsValueNamePacketsLost, info.packets_lost },
+ { StatsReport::kStatsValueNamePacketsSent, info.packets_sent },
+ };
+
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+}
+
+void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) {
+ ExtractCommonReceiveProperties(info, report);
+ report->AddInt64(StatsReport::kStatsValueNameBytesReceived,
+ info.bytes_rcvd);
+ report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ info.capture_start_ntp_time_ms);
+ const IntForAdd ints[] = {
+ { StatsReport::kStatsValueNameCurrentDelayMs, info.current_delay_ms },
+ { StatsReport::kStatsValueNameDecodeMs, info.decode_ms },
+ { StatsReport::kStatsValueNameFirsSent, info.firs_sent },
+ { StatsReport::kStatsValueNameFrameHeightReceived, info.frame_height },
+ { StatsReport::kStatsValueNameFrameRateDecoded, info.framerate_decoded },
+ { StatsReport::kStatsValueNameFrameRateOutput, info.framerate_output },
+ { StatsReport::kStatsValueNameFrameRateReceived, info.framerate_rcvd },
+ { StatsReport::kStatsValueNameFrameWidthReceived, info.frame_width },
+ { StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms },
+ { StatsReport::kStatsValueNameMaxDecodeMs, info.max_decode_ms },
+ { StatsReport::kStatsValueNameMinPlayoutDelayMs,
+ info.min_playout_delay_ms },
+ { StatsReport::kStatsValueNameNacksSent, info.nacks_sent },
+ { StatsReport::kStatsValueNamePacketsLost, info.packets_lost },
+ { StatsReport::kStatsValueNamePacketsReceived, info.packets_rcvd },
+ { StatsReport::kStatsValueNamePlisSent, info.plis_sent },
+ { StatsReport::kStatsValueNameRenderDelayMs, info.render_delay_ms },
+ { StatsReport::kStatsValueNameTargetDelayMs, info.target_delay_ms },
+ };
+
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+}
+
+void ExtractStats(const cricket::VideoSenderInfo& info, StatsReport* report) {
+ ExtractCommonSendProperties(info, report);
+
+ report->AddBoolean(StatsReport::kStatsValueNameBandwidthLimitedResolution,
+ (info.adapt_reason & 0x2) > 0);
+ report->AddBoolean(StatsReport::kStatsValueNameCpuLimitedResolution,
+ (info.adapt_reason & 0x1) > 0);
+ report->AddBoolean(StatsReport::kStatsValueNameViewLimitedResolution,
+ (info.adapt_reason & 0x4) > 0);
+
+ const IntForAdd ints[] = {
+ { StatsReport::kStatsValueNameAdaptationChanges, info.adapt_changes },
+ { StatsReport::kStatsValueNameAvgEncodeMs, info.avg_encode_ms },
+ { StatsReport::kStatsValueNameEncodeUsagePercent,
+ info.encode_usage_percent },
+ { StatsReport::kStatsValueNameFirsReceived, info.firs_rcvd },
+ { StatsReport::kStatsValueNameFrameHeightInput, info.input_frame_height },
+ { StatsReport::kStatsValueNameFrameHeightSent, info.send_frame_height },
+ { StatsReport::kStatsValueNameFrameRateInput, info.framerate_input },
+ { StatsReport::kStatsValueNameFrameRateSent, info.framerate_sent },
+ { StatsReport::kStatsValueNameFrameWidthInput, info.input_frame_width },
+ { StatsReport::kStatsValueNameFrameWidthSent, info.send_frame_width },
+ { StatsReport::kStatsValueNameNacksReceived, info.nacks_rcvd },
+ { StatsReport::kStatsValueNamePacketsLost, info.packets_lost },
+ { StatsReport::kStatsValueNamePacketsSent, info.packets_sent },
+ { StatsReport::kStatsValueNamePlisReceived, info.plis_rcvd },
+ };
+
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+}
+
+void ExtractStats(const cricket::BandwidthEstimationInfo& info,
+ double stats_gathering_started,
+ PeerConnectionInterface::StatsOutputLevel level,
+ StatsReport* report) {
+ RTC_DCHECK(report->type() == StatsReport::kStatsReportTypeBwe);
+
+ report->set_timestamp(stats_gathering_started);
+ const IntForAdd ints[] = {
+ { StatsReport::kStatsValueNameAvailableSendBandwidth,
+ info.available_send_bandwidth },
+ { StatsReport::kStatsValueNameAvailableReceiveBandwidth,
+ info.available_recv_bandwidth },
+ { StatsReport::kStatsValueNameTargetEncBitrate, info.target_enc_bitrate },
+ { StatsReport::kStatsValueNameActualEncBitrate, info.actual_enc_bitrate },
+ { StatsReport::kStatsValueNameRetransmitBitrate, info.retransmit_bitrate },
+ { StatsReport::kStatsValueNameTransmitBitrate, info.transmit_bitrate },
+ };
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+ report->AddInt64(StatsReport::kStatsValueNameBucketDelay, info.bucket_delay);
+}
+
+void ExtractRemoteStats(const cricket::MediaSenderInfo& info,
+ StatsReport* report) {
+ report->set_timestamp(info.remote_stats[0].timestamp);
+ // TODO(hta): Extract some stats here.
+}
+
+void ExtractRemoteStats(const cricket::MediaReceiverInfo& info,
+ StatsReport* report) {
+ report->set_timestamp(info.remote_stats[0].timestamp);
+ // TODO(hta): Extract some stats here.
+}
+
+// Template to extract stats from a data vector.
+// In order to use the template, the functions that are called from it,
+// ExtractStats and ExtractRemoteStats, must be defined and overloaded
+// for each type.
+template<typename T>
+void ExtractStatsFromList(const std::vector<T>& data,
+ const StatsReport::Id& transport_id,
+ StatsCollector* collector,
+ StatsReport::Direction direction) {
+ for (const auto& d : data) {
+ uint32_t ssrc = d.ssrc();
+ // Each track can have stats for both local and remote objects.
+ // TODO(hta): Handle the case of multiple SSRCs per object.
+ StatsReport* report = collector->PrepareReport(true, ssrc, transport_id,
+ direction);
+ if (report)
+ ExtractStats(d, report);
+
+ if (!d.remote_stats.empty()) {
+ report = collector->PrepareReport(false, ssrc, transport_id, direction);
+ if (report)
+ ExtractRemoteStats(d, report);
+ }
+ }
+}
+
+} // namespace
+
+const char* IceCandidateTypeToStatsType(const std::string& candidate_type) {
+ if (candidate_type == cricket::LOCAL_PORT_TYPE) {
+ return STATSREPORT_LOCAL_PORT_TYPE;
+ }
+ if (candidate_type == cricket::STUN_PORT_TYPE) {
+ return STATSREPORT_STUN_PORT_TYPE;
+ }
+ if (candidate_type == cricket::PRFLX_PORT_TYPE) {
+ return STATSREPORT_PRFLX_PORT_TYPE;
+ }
+ if (candidate_type == cricket::RELAY_PORT_TYPE) {
+ return STATSREPORT_RELAY_PORT_TYPE;
+ }
+ RTC_DCHECK(false);
+ return "unknown";
+}
+
+const char* AdapterTypeToStatsType(rtc::AdapterType type) {
+ switch (type) {
+ case rtc::ADAPTER_TYPE_UNKNOWN:
+ return "unknown";
+ case rtc::ADAPTER_TYPE_ETHERNET:
+ return STATSREPORT_ADAPTER_TYPE_ETHERNET;
+ case rtc::ADAPTER_TYPE_WIFI:
+ return STATSREPORT_ADAPTER_TYPE_WIFI;
+ case rtc::ADAPTER_TYPE_CELLULAR:
+ return STATSREPORT_ADAPTER_TYPE_WWAN;
+ case rtc::ADAPTER_TYPE_VPN:
+ return STATSREPORT_ADAPTER_TYPE_VPN;
+ case rtc::ADAPTER_TYPE_LOOPBACK:
+ return STATSREPORT_ADAPTER_TYPE_LOOPBACK;
+ default:
+ RTC_DCHECK(false);
+ return "";
+ }
+}
+
+StatsCollector::StatsCollector(PeerConnection* pc)
+ : pc_(pc), stats_gathering_started_(0) {
+ RTC_DCHECK(pc_);
+}
+
+StatsCollector::~StatsCollector() {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+}
+
+double StatsCollector::GetTimeNow() {
+ return rtc::Timing::WallTimeNow() * rtc::kNumMillisecsPerSec;
+}
+
+// Adds a MediaStream with tracks that can be used as a |selector| in a call
+// to GetStats.
+void StatsCollector::AddStream(MediaStreamInterface* stream) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ RTC_DCHECK(stream != NULL);
+
+ CreateTrackReports<AudioTrackVector>(stream->GetAudioTracks(),
+ &reports_, track_ids_);
+ CreateTrackReports<VideoTrackVector>(stream->GetVideoTracks(),
+ &reports_, track_ids_);
+}
+
+void StatsCollector::AddLocalAudioTrack(AudioTrackInterface* audio_track,
+ uint32_t ssrc) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ RTC_DCHECK(audio_track != NULL);
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ for (const auto& track : local_audio_tracks_)
+ RTC_DCHECK(track.first != audio_track || track.second != ssrc);
+#endif
+
+ local_audio_tracks_.push_back(std::make_pair(audio_track, ssrc));
+
+ // Create the kStatsReportTypeTrack report for the new track if there is no
+ // report yet.
+ StatsReport::Id id(StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack,
+ audio_track->id()));
+ StatsReport* report = reports_.Find(id);
+ if (!report) {
+ report = reports_.InsertNew(id);
+ report->AddString(StatsReport::kStatsValueNameTrackId, audio_track->id());
+ }
+}
+
+void StatsCollector::RemoveLocalAudioTrack(AudioTrackInterface* audio_track,
+ uint32_t ssrc) {
+ RTC_DCHECK(audio_track != NULL);
+ local_audio_tracks_.erase(std::remove_if(local_audio_tracks_.begin(),
+ local_audio_tracks_.end(),
+ [audio_track, ssrc](const LocalAudioTrackVector::value_type& track) {
+ return track.first == audio_track && track.second == ssrc;
+ }));
+}
+
+void StatsCollector::GetStats(MediaStreamTrackInterface* track,
+ StatsReports* reports) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ RTC_DCHECK(reports != NULL);
+ RTC_DCHECK(reports->empty());
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ if (!track) {
+ reports->reserve(reports_.size());
+ for (auto* r : reports_)
+ reports->push_back(r);
+ return;
+ }
+
+ StatsReport* report = reports_.Find(StatsReport::NewTypedId(
+ StatsReport::kStatsReportTypeSession, pc_->session()->id()));
+ if (report)
+ reports->push_back(report);
+
+ report = reports_.Find(StatsReport::NewTypedId(
+ StatsReport::kStatsReportTypeTrack, track->id()));
+
+ if (!report)
+ return;
+
+ reports->push_back(report);
+
+ std::string track_id;
+ for (const auto* r : reports_) {
+ if (r->type() != StatsReport::kStatsReportTypeSsrc)
+ continue;
+
+ const StatsReport::Value* v =
+ r->FindValue(StatsReport::kStatsValueNameTrackId);
+ if (v && v->string_val() == track->id())
+ reports->push_back(r);
+ }
+}
+
+void
+StatsCollector::UpdateStats(PeerConnectionInterface::StatsOutputLevel level) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ double time_now = GetTimeNow();
+ // Calls to UpdateStats() that occur less than kMinGatherStatsPeriod number of
+ // ms apart will be ignored.
+ const double kMinGatherStatsPeriod = 50;
+ if (stats_gathering_started_ != 0 &&
+ stats_gathering_started_ + kMinGatherStatsPeriod > time_now) {
+ return;
+ }
+ stats_gathering_started_ = time_now;
+
+ if (pc_->session()) {
+ // TODO(tommi): All of these hop over to the worker thread to fetch
+ // information. We could use an AsyncInvoker to run all of these and post
+ // the information back to the signaling thread where we can create and
+ // update stats reports. That would also clean up the threading story a bit
+ // since we'd be creating/updating the stats report objects consistently on
+ // the same thread (this class has no locks right now).
+ ExtractSessionInfo();
+ ExtractVoiceInfo();
+ ExtractVideoInfo(level);
+ ExtractDataInfo();
+ UpdateTrackReports();
+ }
+}
+
+StatsReport* StatsCollector::PrepareReport(
+ bool local,
+ uint32_t ssrc,
+ const StatsReport::Id& transport_id,
+ StatsReport::Direction direction) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ StatsReport::Id id(StatsReport::NewIdWithDirection(
+ local ? StatsReport::kStatsReportTypeSsrc
+ : StatsReport::kStatsReportTypeRemoteSsrc,
+ rtc::ToString<uint32_t>(ssrc), direction));
+ StatsReport* report = reports_.Find(id);
+
+ // Use the ID of the track that is currently mapped to the SSRC, if any.
+ std::string track_id;
+ if (!GetTrackIdBySsrc(ssrc, &track_id, direction)) {
+ if (!report) {
+ // The ssrc is not used by any track or existing report, return NULL
+ // in such case to indicate no report is prepared for the ssrc.
+ return NULL;
+ }
+
+ // The ssrc is not used by any existing track. Keeps the old track id
+ // since we want to report the stats for inactive ssrc.
+ const StatsReport::Value* v =
+ report->FindValue(StatsReport::kStatsValueNameTrackId);
+ if (v)
+ track_id = v->string_val();
+ }
+
+ if (!report)
+ report = reports_.InsertNew(id);
+
+ // FYI - for remote reports, the timestamp will be overwritten later.
+ report->set_timestamp(stats_gathering_started_);
+
+ report->AddInt64(StatsReport::kStatsValueNameSsrc, ssrc);
+ report->AddString(StatsReport::kStatsValueNameTrackId, track_id);
+ // Add the mapping of SSRC to transport.
+ report->AddId(StatsReport::kStatsValueNameTransportId, transport_id);
+ return report;
+}
+
+StatsReport* StatsCollector::AddOneCertificateReport(
+ const rtc::SSLCertificate* cert, const StatsReport* issuer) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+ // TODO(bemasc): Move this computation to a helper class that caches these
+ // values to reduce CPU use in GetStats. This will require adding a fast
+ // SSLCertificate::Equals() method to detect certificate changes.
+
+ std::string digest_algorithm;
+ if (!cert->GetSignatureDigestAlgorithm(&digest_algorithm))
+ return nullptr;
+
+ rtc::scoped_ptr<rtc::SSLFingerprint> ssl_fingerprint(
+ rtc::SSLFingerprint::Create(digest_algorithm, cert));
+
+ // SSLFingerprint::Create can fail if the algorithm returned by
+ // SSLCertificate::GetSignatureDigestAlgorithm is not supported by the
+ // implementation of SSLCertificate::ComputeDigest. This currently happens
+ // with MD5- and SHA-224-signed certificates when linked to libNSS.
+ if (!ssl_fingerprint)
+ return nullptr;
+
+ std::string fingerprint = ssl_fingerprint->GetRfc4572Fingerprint();
+
+ rtc::Buffer der_buffer;
+ cert->ToDER(&der_buffer);
+ std::string der_base64;
+ rtc::Base64::EncodeFromArray(der_buffer.data(), der_buffer.size(),
+ &der_base64);
+
+ StatsReport::Id id(StatsReport::NewTypedId(
+ StatsReport::kStatsReportTypeCertificate, fingerprint));
+ StatsReport* report = reports_.ReplaceOrAddNew(id);
+ report->set_timestamp(stats_gathering_started_);
+ report->AddString(StatsReport::kStatsValueNameFingerprint, fingerprint);
+ report->AddString(StatsReport::kStatsValueNameFingerprintAlgorithm,
+ digest_algorithm);
+ report->AddString(StatsReport::kStatsValueNameDer, der_base64);
+ if (issuer)
+ report->AddId(StatsReport::kStatsValueNameIssuerId, issuer->id());
+ return report;
+}
+
+StatsReport* StatsCollector::AddCertificateReports(
+ const rtc::SSLCertificate* cert) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ // Produces a chain of StatsReports representing this certificate and the rest
+ // of its chain, and adds those reports to |reports_|. The return value is
+ // the id of the leaf report. The provided cert must be non-null, so at least
+ // one report will always be provided and the returned string will never be
+ // empty.
+ RTC_DCHECK(cert != NULL);
+
+ StatsReport* issuer = nullptr;
+ rtc::scoped_ptr<rtc::SSLCertChain> chain;
+ if (cert->GetChain(chain.accept())) {
+ // This loop runs in reverse, i.e. from root to leaf, so that each
+ // certificate's issuer's report ID is known before the child certificate's
+ // report is generated. The root certificate does not have an issuer ID
+ // value.
+ for (ptrdiff_t i = chain->GetSize() - 1; i >= 0; --i) {
+ const rtc::SSLCertificate& cert_i = chain->Get(i);
+ issuer = AddOneCertificateReport(&cert_i, issuer);
+ }
+ }
+ // Add the leaf certificate.
+ return AddOneCertificateReport(cert, issuer);
+}
+
+StatsReport* StatsCollector::AddConnectionInfoReport(
+ const std::string& content_name, int component, int connection_id,
+ const StatsReport::Id& channel_report_id,
+ const cricket::ConnectionInfo& info) {
+ StatsReport::Id id(StatsReport::NewCandidatePairId(content_name, component,
+ connection_id));
+ StatsReport* report = reports_.ReplaceOrAddNew(id);
+ report->set_timestamp(stats_gathering_started_);
+
+ const BoolForAdd bools[] = {
+ {StatsReport::kStatsValueNameActiveConnection, info.best_connection},
+ {StatsReport::kStatsValueNameReceiving, info.receiving},
+ {StatsReport::kStatsValueNameWritable, info.writable},
+ };
+ for (const auto& b : bools)
+ report->AddBoolean(b.name, b.value);
+
+ report->AddId(StatsReport::kStatsValueNameChannelId, channel_report_id);
+ report->AddId(StatsReport::kStatsValueNameLocalCandidateId,
+ AddCandidateReport(info.local_candidate, true)->id());
+ report->AddId(StatsReport::kStatsValueNameRemoteCandidateId,
+ AddCandidateReport(info.remote_candidate, false)->id());
+
+ const Int64ForAdd int64s[] = {
+ { StatsReport::kStatsValueNameBytesReceived, info.recv_total_bytes },
+ { StatsReport::kStatsValueNameBytesSent, info.sent_total_bytes },
+ { StatsReport::kStatsValueNamePacketsSent, info.sent_total_packets },
+ { StatsReport::kStatsValueNameRtt, info.rtt },
+ { StatsReport::kStatsValueNameSendPacketsDiscarded,
+ info.sent_discarded_packets },
+ };
+ for (const auto& i : int64s)
+ report->AddInt64(i.name, i.value);
+
+ report->AddString(StatsReport::kStatsValueNameLocalAddress,
+ info.local_candidate.address().ToString());
+ report->AddString(StatsReport::kStatsValueNameLocalCandidateType,
+ info.local_candidate.type());
+ report->AddString(StatsReport::kStatsValueNameRemoteAddress,
+ info.remote_candidate.address().ToString());
+ report->AddString(StatsReport::kStatsValueNameRemoteCandidateType,
+ info.remote_candidate.type());
+ report->AddString(StatsReport::kStatsValueNameTransportType,
+ info.local_candidate.protocol());
+
+ return report;
+}
+
+StatsReport* StatsCollector::AddCandidateReport(
+ const cricket::Candidate& candidate,
+ bool local) {
+ StatsReport::Id id(StatsReport::NewCandidateId(local, candidate.id()));
+ StatsReport* report = reports_.Find(id);
+ if (!report) {
+ report = reports_.InsertNew(id);
+ report->set_timestamp(stats_gathering_started_);
+ if (local) {
+ report->AddString(StatsReport::kStatsValueNameCandidateNetworkType,
+ AdapterTypeToStatsType(candidate.network_type()));
+ }
+ report->AddString(StatsReport::kStatsValueNameCandidateIPAddress,
+ candidate.address().ipaddr().ToString());
+ report->AddString(StatsReport::kStatsValueNameCandidatePortNumber,
+ candidate.address().PortAsString());
+ report->AddInt(StatsReport::kStatsValueNameCandidatePriority,
+ candidate.priority());
+ report->AddString(StatsReport::kStatsValueNameCandidateType,
+ IceCandidateTypeToStatsType(candidate.type()));
+ report->AddString(StatsReport::kStatsValueNameCandidateTransportType,
+ candidate.protocol());
+ }
+
+ return report;
+}
+
+void StatsCollector::ExtractSessionInfo() {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+ // Extract information from the base session.
+ StatsReport::Id id(StatsReport::NewTypedId(
+ StatsReport::kStatsReportTypeSession, pc_->session()->id()));
+ StatsReport* report = reports_.ReplaceOrAddNew(id);
+ report->set_timestamp(stats_gathering_started_);
+ report->AddBoolean(StatsReport::kStatsValueNameInitiator,
+ pc_->session()->initial_offerer());
+
+ SessionStats stats;
+ if (!pc_->session()->GetTransportStats(&stats)) {
+ return;
+ }
+
+ // Store the proxy map away for use in SSRC reporting.
+ // TODO(tommi): This shouldn't be necessary if we post the stats back to the
+ // signaling thread after fetching them on the worker thread, then just use
+ // the proxy map directly from the session stats.
+ // As is, if GetStats() failed, we could be using old (incorrect?) proxy
+ // data.
+ proxy_to_transport_ = stats.proxy_to_transport;
+
+ for (const auto& transport_iter : stats.transport_stats) {
+ // Attempt to get a copy of the certificates from the transport and
+ // expose them in stats reports. All channels in a transport share the
+ // same local and remote certificates.
+ //
+ StatsReport::Id local_cert_report_id, remote_cert_report_id;
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate;
+ if (pc_->session()->GetLocalCertificate(
+ transport_iter.second.transport_name, &certificate)) {
+ StatsReport* r = AddCertificateReports(&(certificate->ssl_certificate()));
+ if (r)
+ local_cert_report_id = r->id();
+ }
+
+ rtc::scoped_ptr<rtc::SSLCertificate> cert;
+ if (pc_->session()->GetRemoteSSLCertificate(
+ transport_iter.second.transport_name, cert.accept())) {
+ StatsReport* r = AddCertificateReports(cert.get());
+ if (r)
+ remote_cert_report_id = r->id();
+ }
+
+ for (const auto& channel_iter : transport_iter.second.channel_stats) {
+ StatsReport::Id id(StatsReport::NewComponentId(
+ transport_iter.second.transport_name, channel_iter.component));
+ StatsReport* channel_report = reports_.ReplaceOrAddNew(id);
+ channel_report->set_timestamp(stats_gathering_started_);
+ channel_report->AddInt(StatsReport::kStatsValueNameComponent,
+ channel_iter.component);
+ if (local_cert_report_id.get()) {
+ channel_report->AddId(StatsReport::kStatsValueNameLocalCertificateId,
+ local_cert_report_id);
+ }
+ if (remote_cert_report_id.get()) {
+ channel_report->AddId(StatsReport::kStatsValueNameRemoteCertificateId,
+ remote_cert_report_id);
+ }
+ const std::string& srtp_cipher = channel_iter.srtp_cipher;
+ if (!srtp_cipher.empty()) {
+ channel_report->AddString(StatsReport::kStatsValueNameSrtpCipher,
+ srtp_cipher);
+ }
+ int ssl_cipher = channel_iter.ssl_cipher;
+ if (ssl_cipher &&
+ rtc::SSLStreamAdapter::GetSslCipherSuiteName(ssl_cipher).length()) {
+ channel_report->AddString(
+ StatsReport::kStatsValueNameDtlsCipher,
+ rtc::SSLStreamAdapter::GetSslCipherSuiteName(ssl_cipher));
+ }
+
+ int connection_id = 0;
+ for (const cricket::ConnectionInfo& info :
+ channel_iter.connection_infos) {
+ StatsReport* connection_report = AddConnectionInfoReport(
+ transport_iter.first, channel_iter.component, connection_id++,
+ channel_report->id(), info);
+ if (info.best_connection) {
+ channel_report->AddId(
+ StatsReport::kStatsValueNameSelectedCandidatePairId,
+ connection_report->id());
+ }
+ }
+ }
+ }
+}
+
+void StatsCollector::ExtractVoiceInfo() {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+ if (!pc_->session()->voice_channel()) {
+ return;
+ }
+ cricket::VoiceMediaInfo voice_info;
+ if (!pc_->session()->voice_channel()->GetStats(&voice_info)) {
+ LOG(LS_ERROR) << "Failed to get voice channel stats.";
+ return;
+ }
+
+ // TODO(tommi): The above code should run on the worker thread and post the
+ // results back to the signaling thread, where we can add data to the reports.
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ StatsReport::Id transport_id(GetTransportIdFromProxy(
+ proxy_to_transport_, pc_->session()->voice_channel()->content_name()));
+ if (!transport_id.get()) {
+ LOG(LS_ERROR) << "Failed to get transport name for proxy "
+ << pc_->session()->voice_channel()->content_name();
+ return;
+ }
+
+ ExtractStatsFromList(voice_info.receivers, transport_id, this,
+ StatsReport::kReceive);
+ ExtractStatsFromList(voice_info.senders, transport_id, this,
+ StatsReport::kSend);
+
+ UpdateStatsFromExistingLocalAudioTracks();
+}
+
+void StatsCollector::ExtractVideoInfo(
+ PeerConnectionInterface::StatsOutputLevel level) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+ if (!pc_->session()->video_channel())
+ return;
+
+ cricket::VideoMediaInfo video_info;
+ if (!pc_->session()->video_channel()->GetStats(&video_info)) {
+ LOG(LS_ERROR) << "Failed to get video channel stats.";
+ return;
+ }
+
+ // TODO(tommi): The above code should run on the worker thread and post the
+ // results back to the signaling thread, where we can add data to the reports.
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ StatsReport::Id transport_id(GetTransportIdFromProxy(
+ proxy_to_transport_, pc_->session()->video_channel()->content_name()));
+ if (!transport_id.get()) {
+ LOG(LS_ERROR) << "Failed to get transport name for proxy "
+ << pc_->session()->video_channel()->content_name();
+ return;
+ }
+ ExtractStatsFromList(video_info.receivers, transport_id, this,
+ StatsReport::kReceive);
+ ExtractStatsFromList(video_info.senders, transport_id, this,
+ StatsReport::kSend);
+ if (video_info.bw_estimations.size() != 1) {
+ LOG(LS_ERROR) << "BWEs count: " << video_info.bw_estimations.size();
+ } else {
+ StatsReport::Id report_id(StatsReport::NewBandwidthEstimationId());
+ StatsReport* report = reports_.FindOrAddNew(report_id);
+ ExtractStats(
+ video_info.bw_estimations[0], stats_gathering_started_, level, report);
+ }
+}
+
+void StatsCollector::ExtractDataInfo() {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const auto& dc : pc_->sctp_data_channels()) {
+ StatsReport::Id id(StatsReport::NewTypedIntId(
+ StatsReport::kStatsReportTypeDataChannel, dc->id()));
+ StatsReport* report = reports_.ReplaceOrAddNew(id);
+ report->set_timestamp(stats_gathering_started_);
+ report->AddString(StatsReport::kStatsValueNameLabel, dc->label());
+ report->AddInt(StatsReport::kStatsValueNameDataChannelId, dc->id());
+ report->AddString(StatsReport::kStatsValueNameProtocol, dc->protocol());
+ report->AddString(StatsReport::kStatsValueNameState,
+ DataChannelInterface::DataStateString(dc->state()));
+ }
+}
+
+StatsReport* StatsCollector::GetReport(const StatsReport::StatsType& type,
+ const std::string& id,
+ StatsReport::Direction direction) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ RTC_DCHECK(type == StatsReport::kStatsReportTypeSsrc ||
+ type == StatsReport::kStatsReportTypeRemoteSsrc);
+ return reports_.Find(StatsReport::NewIdWithDirection(type, id, direction));
+}
+
+void StatsCollector::UpdateStatsFromExistingLocalAudioTracks() {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ // Loop through the existing local audio tracks.
+ for (const auto& it : local_audio_tracks_) {
+ AudioTrackInterface* track = it.first;
+ uint32_t ssrc = it.second;
+ StatsReport* report =
+ GetReport(StatsReport::kStatsReportTypeSsrc,
+ rtc::ToString<uint32_t>(ssrc), StatsReport::kSend);
+ if (report == NULL) {
+ // This can happen if a local audio track is added to a stream on the
+ // fly and the report has not been set up yet. Do nothing in this case.
+ LOG(LS_ERROR) << "Stats report does not exist for ssrc " << ssrc;
+ continue;
+ }
+
+ // The same ssrc can be used by both local and remote audio tracks.
+ const StatsReport::Value* v =
+ report->FindValue(StatsReport::kStatsValueNameTrackId);
+ if (!v || v->string_val() != track->id())
+ continue;
+
+ report->set_timestamp(stats_gathering_started_);
+ UpdateReportFromAudioTrack(track, report);
+ }
+}
+
+void StatsCollector::UpdateReportFromAudioTrack(AudioTrackInterface* track,
+ StatsReport* report) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ RTC_DCHECK(track != NULL);
+
+ int signal_level = 0;
+ if (!track->GetSignalLevel(&signal_level))
+ signal_level = -1;
+
+ rtc::scoped_refptr<AudioProcessorInterface> audio_processor(
+ track->GetAudioProcessor());
+
+ AudioProcessorInterface::AudioProcessorStats stats;
+ if (audio_processor.get())
+ audio_processor->GetStats(&stats);
+
+ SetAudioProcessingStats(report, signal_level, stats.typing_noise_detected,
+ stats.echo_return_loss, stats.echo_return_loss_enhancement,
+ stats.echo_delay_median_ms, stats.aec_quality_min,
+ stats.echo_delay_std_ms);
+}
+
+bool StatsCollector::GetTrackIdBySsrc(uint32_t ssrc,
+ std::string* track_id,
+ StatsReport::Direction direction) {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+ if (direction == StatsReport::kSend) {
+ if (!pc_->session()->GetLocalTrackIdBySsrc(ssrc, track_id)) {
+ LOG(LS_WARNING) << "The SSRC " << ssrc
+ << " is not associated with a sending track";
+ return false;
+ }
+ } else {
+ RTC_DCHECK(direction == StatsReport::kReceive);
+ if (!pc_->session()->GetRemoteTrackIdBySsrc(ssrc, track_id)) {
+ LOG(LS_WARNING) << "The SSRC " << ssrc
+ << " is not associated with a receiving track";
+ return false;
+ }
+ }
+
+ return true;
+}
+
+void StatsCollector::UpdateTrackReports() {
+ RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const auto& entry : track_ids_) {
+ StatsReport* report = entry.second;
+ report->set_timestamp(stats_gathering_started_);
+ }
+}
+
+void StatsCollector::ClearUpdateStatsCacheForTest() {
+ stats_gathering_started_ = 0;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/statscollector.h b/talk/app/webrtc/statscollector.h
new file mode 100644
index 0000000000..18a345d71d
--- /dev/null
+++ b/talk/app/webrtc/statscollector.h
@@ -0,0 +1,170 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains a class used for gathering statistics from an ongoing
+// libjingle PeerConnection.
+
+#ifndef TALK_APP_WEBRTC_STATSCOLLECTOR_H_
+#define TALK_APP_WEBRTC_STATSCOLLECTOR_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/mediastreamsignaling.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/statstypes.h"
+#include "talk/app/webrtc/webrtcsession.h"
+
+namespace webrtc {
+
+class PeerConnection;
+
+// Conversion function to convert candidate type string to the corresponding one
+// from enum RTCStatsIceCandidateType.
+const char* IceCandidateTypeToStatsType(const std::string& candidate_type);
+
+// Conversion function to convert adapter type to report string which are more
+// fitting to the general style of http://w3c.github.io/webrtc-stats. This is
+// only used by stats collector.
+const char* AdapterTypeToStatsType(rtc::AdapterType type);
+
+// A mapping between track ids and their StatsReport.
+typedef std::map<std::string, StatsReport*> TrackIdMap;
+
+class StatsCollector {
+ public:
+ // The caller is responsible for ensuring that the pc outlives the
+ // StatsCollector instance.
+ explicit StatsCollector(PeerConnection* pc);
+ virtual ~StatsCollector();
+
+ // Adds a MediaStream with tracks that can be used as a |selector| in a call
+ // to GetStats.
+ void AddStream(MediaStreamInterface* stream);
+
+ // Adds a local audio track that is used for getting some voice statistics.
+ void AddLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc);
+
+ // Removes a local audio tracks that is used for getting some voice
+ // statistics.
+ void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc);
+
+ // Gather statistics from the session and store them for future use.
+ void UpdateStats(PeerConnectionInterface::StatsOutputLevel level);
+
+ // Gets a StatsReports of the last collected stats. Note that UpdateStats must
+ // be called before this function to get the most recent stats. |selector| is
+ // a track label or empty string. The most recent reports are stored in
+ // |reports|.
+ // TODO(tommi): Change this contract to accept a callback object instead
+ // of filling in |reports|. As is, there's a requirement that the caller
+ // uses |reports| immediately without allowing any async activity on
+ // the thread (message handling etc) and then discard the results.
+ void GetStats(MediaStreamTrackInterface* track,
+ StatsReports* reports);
+
+ // Prepare a local or remote SSRC report for the given ssrc. Used internally
+ // in the ExtractStatsFromList template.
+ StatsReport* PrepareReport(bool local,
+ uint32_t ssrc,
+ const StatsReport::Id& transport_id,
+ StatsReport::Direction direction);
+
+ // Method used by the unittest to force a update of stats since UpdateStats()
+ // that occur less than kMinGatherStatsPeriod number of ms apart will be
+ // ignored.
+ void ClearUpdateStatsCacheForTest();
+
+ private:
+ friend class StatsCollectorTest;
+
+ // Overridden in unit tests to fake timing.
+ virtual double GetTimeNow();
+
+ bool CopySelectedReports(const std::string& selector, StatsReports* reports);
+
+ // Helper method for AddCertificateReports.
+ StatsReport* AddOneCertificateReport(
+ const rtc::SSLCertificate* cert, const StatsReport* issuer);
+
+ // Helper method for creating IceCandidate report. |is_local| indicates
+ // whether this candidate is local or remote.
+ StatsReport* AddCandidateReport(const cricket::Candidate& candidate,
+ bool local);
+
+ // Adds a report for this certificate and every certificate in its chain, and
+ // returns the leaf certificate's report.
+ StatsReport* AddCertificateReports(const rtc::SSLCertificate* cert);
+
+ StatsReport* AddConnectionInfoReport(const std::string& content_name,
+ int component, int connection_id,
+ const StatsReport::Id& channel_report_id,
+ const cricket::ConnectionInfo& info);
+
+ void ExtractDataInfo();
+ void ExtractSessionInfo();
+ void ExtractVoiceInfo();
+ void ExtractVideoInfo(PeerConnectionInterface::StatsOutputLevel level);
+ void BuildSsrcToTransportId();
+ webrtc::StatsReport* GetReport(const StatsReport::StatsType& type,
+ const std::string& id,
+ StatsReport::Direction direction);
+
+ // Helper method to get stats from the local audio tracks.
+ void UpdateStatsFromExistingLocalAudioTracks();
+ void UpdateReportFromAudioTrack(AudioTrackInterface* track,
+ StatsReport* report);
+
+ // Helper method to get the id for the track identified by ssrc.
+ // |direction| tells if the track is for sending or receiving.
+ bool GetTrackIdBySsrc(uint32_t ssrc,
+ std::string* track_id,
+ StatsReport::Direction direction);
+
+ // Helper method to update the timestamp of track records.
+ void UpdateTrackReports();
+
+ // A collection for all of our stats reports.
+ StatsCollection reports_;
+ TrackIdMap track_ids_;
+ // Raw pointer to the peer connection the statistics are gathered from.
+ PeerConnection* const pc_;
+ double stats_gathering_started_;
+ ProxyTransportMap proxy_to_transport_;
+
+ // TODO(tommi): We appear to be holding on to raw pointers to reference
+ // counted objects? We should be using scoped_refptr here.
+ typedef std::vector<std::pair<AudioTrackInterface*, uint32_t> >
+ LocalAudioTrackVector;
+ LocalAudioTrackVector local_audio_tracks_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_STATSCOLLECTOR_H_
diff --git a/talk/app/webrtc/statscollector_unittest.cc b/talk/app/webrtc/statscollector_unittest.cc
new file mode 100644
index 0000000000..9121c691b1
--- /dev/null
+++ b/talk/app/webrtc/statscollector_unittest.cc
@@ -0,0 +1,1753 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <stdio.h>
+
+#include <algorithm>
+
+#include "talk/app/webrtc/statscollector.h"
+
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/peerconnectionfactory.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/mediastreamsignaling.h"
+#include "talk/app/webrtc/mediastreamtrack.h"
+#include "talk/app/webrtc/test/fakedatachannelprovider.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "talk/media/base/fakemediaengine.h"
+#include "talk/session/media/channelmanager.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/base64.h"
+#include "webrtc/base/fakesslidentity.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/network.h"
+#include "webrtc/p2p/base/faketransportcontroller.h"
+
+using rtc::scoped_ptr;
+using testing::_;
+using testing::DoAll;
+using testing::Field;
+using testing::Return;
+using testing::ReturnNull;
+using testing::ReturnRef;
+using testing::SetArgPointee;
+using webrtc::PeerConnectionInterface;
+using webrtc::StatsReport;
+using webrtc::StatsReports;
+
+namespace {
+// This value comes from openssl/tls1.h
+const int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014;
+} // namespace
+
+namespace cricket {
+
+class ChannelManager;
+
+} // namespace cricket
+
+namespace webrtc {
+
+// Error return values
+const char kNotFound[] = "NOT FOUND";
+
+// Constant names for track identification.
+const char kLocalTrackId[] = "local_track_id";
+const char kRemoteTrackId[] = "remote_track_id";
+const uint32_t kSsrcOfTrack = 1234;
+
+class MockWebRtcSession : public webrtc::WebRtcSession {
+ public:
+ explicit MockWebRtcSession(webrtc::MediaControllerInterface* media_controller)
+ : WebRtcSession(media_controller,
+ rtc::Thread::Current(),
+ rtc::Thread::Current(),
+ nullptr) {}
+ MOCK_METHOD0(voice_channel, cricket::VoiceChannel*());
+ MOCK_METHOD0(video_channel, cricket::VideoChannel*());
+ // Libjingle uses "local" for a outgoing track, and "remote" for a incoming
+ // track.
+ MOCK_METHOD2(GetLocalTrackIdBySsrc, bool(uint32_t, std::string*));
+ MOCK_METHOD2(GetRemoteTrackIdBySsrc, bool(uint32_t, std::string*));
+ MOCK_METHOD1(GetTransportStats, bool(SessionStats*));
+ MOCK_METHOD2(GetLocalCertificate,
+ bool(const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate));
+ MOCK_METHOD2(GetRemoteSSLCertificate,
+ bool(const std::string& transport_name,
+ rtc::SSLCertificate** cert));
+};
+
+// The factory isn't really used; it just satisfies the base PeerConnection.
+class FakePeerConnectionFactory
+ : public rtc::RefCountedObject<PeerConnectionFactory> {};
+
+class MockPeerConnection
+ : public rtc::RefCountedObject<webrtc::PeerConnection> {
+ public:
+ MockPeerConnection()
+ : rtc::RefCountedObject<webrtc::PeerConnection>(
+ new FakePeerConnectionFactory()) {}
+ MOCK_METHOD0(session, WebRtcSession*());
+ MOCK_CONST_METHOD0(sctp_data_channels,
+ const std::vector<rtc::scoped_refptr<DataChannel>>&());
+};
+
+class MockVideoMediaChannel : public cricket::FakeVideoMediaChannel {
+ public:
+ MockVideoMediaChannel() :
+ cricket::FakeVideoMediaChannel(NULL, cricket::VideoOptions()) {}
+ MOCK_METHOD1(GetStats, bool(cricket::VideoMediaInfo*));
+};
+
+class MockVoiceMediaChannel : public cricket::FakeVoiceMediaChannel {
+ public:
+ MockVoiceMediaChannel() :
+ cricket::FakeVoiceMediaChannel(NULL, cricket::AudioOptions()) {}
+ MOCK_METHOD1(GetStats, bool(cricket::VoiceMediaInfo*));
+};
+
+class FakeAudioProcessor : public webrtc::AudioProcessorInterface {
+ public:
+ FakeAudioProcessor() {}
+ ~FakeAudioProcessor() {}
+
+ private:
+ void GetStats(AudioProcessorInterface::AudioProcessorStats* stats) override {
+ stats->typing_noise_detected = true;
+ stats->echo_return_loss = 2;
+ stats->echo_return_loss_enhancement = 3;
+ stats->echo_delay_median_ms = 4;
+ stats->aec_quality_min = 5.1f;
+ stats->echo_delay_std_ms = 6;
+ }
+};
+
+class FakeAudioTrack
+ : public webrtc::MediaStreamTrack<webrtc::AudioTrackInterface> {
+ public:
+ explicit FakeAudioTrack(const std::string& id)
+ : webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>(id),
+ processor_(new rtc::RefCountedObject<FakeAudioProcessor>()) {}
+ std::string kind() const override { return "audio"; }
+ webrtc::AudioSourceInterface* GetSource() const override { return NULL; }
+ void AddSink(webrtc::AudioTrackSinkInterface* sink) override {}
+ void RemoveSink(webrtc::AudioTrackSinkInterface* sink) override {}
+ bool GetSignalLevel(int* level) override {
+ *level = 1;
+ return true;
+ }
+ rtc::scoped_refptr<webrtc::AudioProcessorInterface> GetAudioProcessor()
+ override {
+ return processor_;
+ }
+
+ private:
+ rtc::scoped_refptr<FakeAudioProcessor> processor_;
+};
+
+bool GetValue(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ std::string* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (!v)
+ return false;
+ *value = v->ToString();
+ return true;
+}
+
+std::string ExtractStatsValue(const StatsReport::StatsType& type,
+ const StatsReports& reports,
+ StatsReport::StatsValueName name) {
+ for (const auto* r : reports) {
+ std::string ret;
+ if (r->type() == type && GetValue(r, name, &ret))
+ return ret;
+ }
+
+ return kNotFound;
+}
+
+StatsReport::Id TypedIdFromIdString(StatsReport::StatsType type,
+ const std::string& value) {
+ EXPECT_FALSE(value.empty());
+ StatsReport::Id id;
+ if (value.empty())
+ return id;
+
+ // This has assumptions about how the ID is constructed. As is, this is
+ // OK since this is for testing purposes only, but if we ever need this
+ // in production, we should add a generic method that does this.
+ size_t index = value.find('_');
+ EXPECT_NE(index, std::string::npos);
+ if (index == std::string::npos || index == (value.length() - 1))
+ return id;
+
+ id = StatsReport::NewTypedId(type, value.substr(index + 1));
+ EXPECT_EQ(id->ToString(), value);
+ return id;
+}
+
+StatsReport::Id IdFromCertIdString(const std::string& cert_id) {
+ return TypedIdFromIdString(StatsReport::kStatsReportTypeCertificate, cert_id);
+}
+
+// Finds the |n|-th report of type |type| in |reports|.
+// |n| starts from 1 for finding the first report.
+const StatsReport* FindNthReportByType(
+ const StatsReports& reports, const StatsReport::StatsType& type, int n) {
+ for (size_t i = 0; i < reports.size(); ++i) {
+ if (reports[i]->type() == type) {
+ n--;
+ if (n == 0)
+ return reports[i];
+ }
+ }
+ return nullptr;
+}
+
+const StatsReport* FindReportById(const StatsReports& reports,
+ const StatsReport::Id& id) {
+ for (const auto* r : reports) {
+ if (r->id()->Equals(id))
+ return r;
+ }
+ return nullptr;
+}
+
+std::string ExtractSsrcStatsValue(StatsReports reports,
+ StatsReport::StatsValueName name) {
+ return ExtractStatsValue(StatsReport::kStatsReportTypeSsrc, reports, name);
+}
+
+std::string ExtractBweStatsValue(StatsReports reports,
+ StatsReport::StatsValueName name) {
+ return ExtractStatsValue(
+ StatsReport::kStatsReportTypeBwe, reports, name);
+}
+
+std::string DerToPem(const std::string& der) {
+ return rtc::SSLIdentity::DerToPem(
+ rtc::kPemTypeCertificate,
+ reinterpret_cast<const unsigned char*>(der.c_str()),
+ der.length());
+}
+
+std::vector<std::string> DersToPems(
+ const std::vector<std::string>& ders) {
+ std::vector<std::string> pems(ders.size());
+ std::transform(ders.begin(), ders.end(), pems.begin(), DerToPem);
+ return pems;
+}
+
+void CheckCertChainReports(const StatsReports& reports,
+ const std::vector<std::string>& ders,
+ const StatsReport::Id& start_id) {
+ StatsReport::Id cert_id;
+ const StatsReport::Id* certificate_id = &start_id;
+ size_t i = 0;
+ while (true) {
+ const StatsReport* report = FindReportById(reports, *certificate_id);
+ ASSERT_TRUE(report != NULL);
+
+ std::string der_base64;
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameDer, &der_base64));
+ std::string der = rtc::Base64::Decode(der_base64, rtc::Base64::DO_STRICT);
+ EXPECT_EQ(ders[i], der);
+
+ std::string fingerprint_algorithm;
+ EXPECT_TRUE(GetValue(
+ report,
+ StatsReport::kStatsValueNameFingerprintAlgorithm,
+ &fingerprint_algorithm));
+ // The digest algorithm for a FakeSSLCertificate is always SHA-1.
+ std::string sha_1_str = rtc::DIGEST_SHA_1;
+ EXPECT_EQ(sha_1_str, fingerprint_algorithm);
+
+ std::string fingerprint;
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameFingerprint,
+ &fingerprint));
+ EXPECT_FALSE(fingerprint.empty());
+
+ ++i;
+ std::string issuer_id;
+ if (!GetValue(report, StatsReport::kStatsValueNameIssuerId,
+ &issuer_id)) {
+ break;
+ }
+
+ cert_id = IdFromCertIdString(issuer_id);
+ certificate_id = &cert_id;
+ }
+ EXPECT_EQ(ders.size(), i);
+}
+
+void VerifyVoiceReceiverInfoReport(
+ const StatsReport* report,
+ const cricket::VoiceReceiverInfo& info) {
+ std::string value_in_report;
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameAudioOutputLevel, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.audio_level), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameBytesReceived, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int64_t>(info.bytes_rcvd), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameJitterReceived, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.jitter_ms), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameJitterBufferMs, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.jitter_buffer_ms), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNamePreferredJitterBufferMs,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.jitter_buffer_preferred_ms),
+ value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameCurrentDelayMs, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.delay_estimate_ms), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameExpandRate, &value_in_report));
+ EXPECT_EQ(rtc::ToString<float>(info.expand_rate), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameSpeechExpandRate, &value_in_report));
+ EXPECT_EQ(rtc::ToString<float>(info.speech_expand_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAccelerateRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString<float>(info.accelerate_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePreemptiveExpandRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString<float>(info.preemptive_expand_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameSecondaryDecodedRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString<float>(info.secondary_decoded_rate), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNamePacketsReceived, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.packets_rcvd), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameDecodingCTSG, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.decoding_calls_to_silence_generator),
+ value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameDecodingCTN, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.decoding_calls_to_neteq),
+ value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameDecodingNormal, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.decoding_normal), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameDecodingPLC, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.decoding_plc), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameDecodingCNG, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.decoding_cng), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameDecodingPLCCNG, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(info.decoding_plc_cng), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameCodecName, &value_in_report));
+}
+
+
+void VerifyVoiceSenderInfoReport(const StatsReport* report,
+ const cricket::VoiceSenderInfo& sinfo) {
+ std::string value_in_report;
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameCodecName, &value_in_report));
+ EXPECT_EQ(sinfo.codec_name, value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameBytesSent, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int64_t>(sinfo.bytes_sent), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNamePacketsSent, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.packets_sent), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNamePacketsLost, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.packets_lost), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameRtt, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.rtt_ms), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameRtt, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.rtt_ms), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameJitterReceived, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.jitter_ms), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameEchoCancellationQualityMin,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString<float>(sinfo.aec_quality_min), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameEchoDelayMedian, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.echo_delay_median_ms),
+ value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameEchoDelayStdDev, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.echo_delay_std_ms),
+ value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameEchoReturnLoss, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.echo_return_loss),
+ value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameEchoReturnLossEnhancement,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.echo_return_loss_enhancement),
+ value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameAudioInputLevel, &value_in_report));
+ EXPECT_EQ(rtc::ToString<int>(sinfo.audio_level), value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameTypingNoiseState, &value_in_report));
+ std::string typing_detected = sinfo.typing_noise_detected ? "true" : "false";
+ EXPECT_EQ(typing_detected, value_in_report);
+}
+
+// Helper methods to avoid duplication of code.
+void InitVoiceSenderInfo(cricket::VoiceSenderInfo* voice_sender_info) {
+ voice_sender_info->add_ssrc(kSsrcOfTrack);
+ voice_sender_info->codec_name = "fake_codec";
+ voice_sender_info->bytes_sent = 100;
+ voice_sender_info->packets_sent = 101;
+ voice_sender_info->rtt_ms = 102;
+ voice_sender_info->fraction_lost = 103;
+ voice_sender_info->jitter_ms = 104;
+ voice_sender_info->packets_lost = 105;
+ voice_sender_info->ext_seqnum = 106;
+ voice_sender_info->audio_level = 107;
+ voice_sender_info->echo_return_loss = 108;
+ voice_sender_info->echo_return_loss_enhancement = 109;
+ voice_sender_info->echo_delay_median_ms = 110;
+ voice_sender_info->echo_delay_std_ms = 111;
+ voice_sender_info->aec_quality_min = 112.0f;
+ voice_sender_info->typing_noise_detected = false;
+}
+
+void UpdateVoiceSenderInfoFromAudioTrack(
+ FakeAudioTrack* audio_track, cricket::VoiceSenderInfo* voice_sender_info) {
+ audio_track->GetSignalLevel(&voice_sender_info->audio_level);
+ webrtc::AudioProcessorInterface::AudioProcessorStats audio_processor_stats;
+ audio_track->GetAudioProcessor()->GetStats(&audio_processor_stats);
+ voice_sender_info->typing_noise_detected =
+ audio_processor_stats.typing_noise_detected;
+ voice_sender_info->echo_return_loss = audio_processor_stats.echo_return_loss;
+ voice_sender_info->echo_return_loss_enhancement =
+ audio_processor_stats.echo_return_loss_enhancement;
+ voice_sender_info->echo_delay_median_ms =
+ audio_processor_stats.echo_delay_median_ms;
+ voice_sender_info->aec_quality_min = audio_processor_stats.aec_quality_min;
+ voice_sender_info->echo_delay_std_ms =
+ audio_processor_stats.echo_delay_std_ms;
+}
+
+void InitVoiceReceiverInfo(cricket::VoiceReceiverInfo* voice_receiver_info) {
+ voice_receiver_info->add_ssrc(kSsrcOfTrack);
+ voice_receiver_info->bytes_rcvd = 110;
+ voice_receiver_info->packets_rcvd = 111;
+ voice_receiver_info->packets_lost = 112;
+ voice_receiver_info->fraction_lost = 113;
+ voice_receiver_info->packets_lost = 114;
+ voice_receiver_info->ext_seqnum = 115;
+ voice_receiver_info->jitter_ms = 116;
+ voice_receiver_info->jitter_buffer_ms = 117;
+ voice_receiver_info->jitter_buffer_preferred_ms = 118;
+ voice_receiver_info->delay_estimate_ms = 119;
+ voice_receiver_info->audio_level = 120;
+ voice_receiver_info->expand_rate = 121;
+ voice_receiver_info->speech_expand_rate = 122;
+ voice_receiver_info->secondary_decoded_rate = 123;
+ voice_receiver_info->accelerate_rate = 124;
+ voice_receiver_info->preemptive_expand_rate = 125;
+}
+
+class StatsCollectorForTest : public webrtc::StatsCollector {
+ public:
+ explicit StatsCollectorForTest(PeerConnection* pc)
+ : StatsCollector(pc), time_now_(19477) {}
+
+ double GetTimeNow() override {
+ return time_now_;
+ }
+
+ private:
+ double time_now_;
+};
+
+class StatsCollectorTest : public testing::Test {
+ protected:
+ StatsCollectorTest()
+ : media_engine_(new cricket::FakeMediaEngine()),
+ channel_manager_(
+ new cricket::ChannelManager(media_engine_, rtc::Thread::Current())),
+ media_controller_(
+ webrtc::MediaControllerInterface::Create(rtc::Thread::Current(),
+ channel_manager_.get())),
+ session_(media_controller_.get()) {
+ // By default, we ignore session GetStats calls.
+ EXPECT_CALL(session_, GetTransportStats(_)).WillRepeatedly(Return(false));
+ // Add default returns for mock classes.
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(pc_, session()).WillRepeatedly(Return(&session_));
+ EXPECT_CALL(pc_, sctp_data_channels())
+ .WillRepeatedly(ReturnRef(data_channels_));
+ }
+
+ ~StatsCollectorTest() {}
+
+ // This creates a standard setup with a transport called "trspname"
+ // having one transport channel
+ // and the specified virtual connection name.
+ void InitSessionStats(const std::string& vc_name) {
+ const std::string kTransportName("trspname");
+ cricket::TransportStats transport_stats;
+ cricket::TransportChannelStats channel_stats;
+ channel_stats.component = 1;
+ transport_stats.transport_name = kTransportName;
+ transport_stats.channel_stats.push_back(channel_stats);
+
+ session_stats_.transport_stats[kTransportName] = transport_stats;
+ session_stats_.proxy_to_transport[vc_name] = kTransportName;
+ }
+
+ // Adds a outgoing video track with a given SSRC into the stats.
+ void AddOutgoingVideoTrackStats() {
+ stream_ = webrtc::MediaStream::Create("streamlabel");
+ track_= webrtc::VideoTrack::Create(kLocalTrackId, NULL);
+ stream_->AddTrack(track_);
+ EXPECT_CALL(session_, GetLocalTrackIdBySsrc(kSsrcOfTrack, _))
+ .WillRepeatedly(DoAll(SetArgPointee<1>(kLocalTrackId), Return(true)));
+ }
+
+ // Adds a incoming video track with a given SSRC into the stats.
+ void AddIncomingVideoTrackStats() {
+ stream_ = webrtc::MediaStream::Create("streamlabel");
+ track_= webrtc::VideoTrack::Create(kRemoteTrackId, NULL);
+ stream_->AddTrack(track_);
+ EXPECT_CALL(session_, GetRemoteTrackIdBySsrc(kSsrcOfTrack, _))
+ .WillRepeatedly(DoAll(SetArgPointee<1>(kRemoteTrackId), Return(true)));
+ }
+
+ // Adds a outgoing audio track with a given SSRC into the stats.
+ void AddOutgoingAudioTrackStats() {
+ if (stream_ == NULL)
+ stream_ = webrtc::MediaStream::Create("streamlabel");
+
+ audio_track_ = new rtc::RefCountedObject<FakeAudioTrack>(
+ kLocalTrackId);
+ stream_->AddTrack(audio_track_);
+ EXPECT_CALL(session_, GetLocalTrackIdBySsrc(kSsrcOfTrack, _))
+ .WillOnce(DoAll(SetArgPointee<1>(kLocalTrackId), Return(true)));
+ }
+
+ // Adds a incoming audio track with a given SSRC into the stats.
+ void AddIncomingAudioTrackStats() {
+ if (stream_ == NULL)
+ stream_ = webrtc::MediaStream::Create("streamlabel");
+
+ audio_track_ = new rtc::RefCountedObject<FakeAudioTrack>(
+ kRemoteTrackId);
+ stream_->AddTrack(audio_track_);
+ EXPECT_CALL(session_, GetRemoteTrackIdBySsrc(kSsrcOfTrack, _))
+ .WillOnce(DoAll(SetArgPointee<1>(kRemoteTrackId), Return(true)));
+ }
+
+ void AddDataChannel(cricket::DataChannelType type,
+ const std::string& label,
+ int id) {
+ InternalDataChannelInit config;
+ config.id = id;
+
+ data_channels_.push_back(DataChannel::Create(
+ &data_channel_provider_, cricket::DCT_SCTP, label, config));
+ }
+
+ StatsReport* AddCandidateReport(StatsCollector* collector,
+ const cricket::Candidate& candidate,
+ bool local) {
+ return collector->AddCandidateReport(candidate, local);
+ }
+
+ void SetupAndVerifyAudioTrackStats(
+ FakeAudioTrack* audio_track,
+ webrtc::MediaStream* stream,
+ webrtc::StatsCollector* stats,
+ cricket::VoiceChannel* voice_channel,
+ const std::string& vc_name,
+ MockVoiceMediaChannel* media_channel,
+ cricket::VoiceSenderInfo* voice_sender_info,
+ cricket::VoiceReceiverInfo* voice_receiver_info,
+ cricket::VoiceMediaInfo* stats_read,
+ StatsReports* reports) {
+ // A track can't have both sender report and recv report at the same time
+ // for now, this might change in the future though.
+ ASSERT((voice_sender_info == NULL) ^ (voice_receiver_info == NULL));
+
+ // Instruct the session to return stats containing the transport channel.
+ InitSessionStats(vc_name);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+ Return(true)));
+
+ // Constructs an ssrc stats update.
+ if (voice_sender_info)
+ stats_read->senders.push_back(*voice_sender_info);
+ if (voice_receiver_info)
+ stats_read->receivers.push_back(*voice_receiver_info);
+
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(
+ Return(voice_channel));
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillOnce(DoAll(SetArgPointee<0>(*stats_read), Return(true)));
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats->ClearUpdateStatsCacheForTest();
+ stats->GetStats(NULL, reports);
+
+ // Verify the existence of the track report.
+ const StatsReport* report = FindNthReportByType(
+ *reports, StatsReport::kStatsReportTypeSsrc, 1);
+ EXPECT_FALSE(report == NULL);
+ EXPECT_EQ(stats->GetTimeNow(), report->timestamp());
+ std::string track_id = ExtractSsrcStatsValue(
+ *reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(audio_track->id(), track_id);
+ std::string ssrc_id = ExtractSsrcStatsValue(
+ *reports, StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+
+ // Verifies the values in the track report.
+ if (voice_sender_info) {
+ UpdateVoiceSenderInfoFromAudioTrack(audio_track, voice_sender_info);
+ VerifyVoiceSenderInfoReport(report, *voice_sender_info);
+ }
+ if (voice_receiver_info) {
+ VerifyVoiceReceiverInfoReport(report, *voice_receiver_info);
+ }
+
+ // Verify we get the same result by passing a track to GetStats().
+ StatsReports track_reports; // returned values.
+ stats->GetStats(audio_track, &track_reports);
+ const StatsReport* track_report = FindNthReportByType(
+ track_reports, StatsReport::kStatsReportTypeSsrc, 1);
+ EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats->GetTimeNow(), track_report->timestamp());
+ track_id = ExtractSsrcStatsValue(track_reports,
+ StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(audio_track->id(), track_id);
+ ssrc_id = ExtractSsrcStatsValue(track_reports,
+ StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+ if (voice_sender_info)
+ VerifyVoiceSenderInfoReport(track_report, *voice_sender_info);
+ if (voice_receiver_info)
+ VerifyVoiceReceiverInfoReport(track_report, *voice_receiver_info);
+ }
+
+ void TestCertificateReports(const rtc::FakeSSLCertificate& local_cert,
+ const std::vector<std::string>& local_ders,
+ const rtc::FakeSSLCertificate& remote_cert,
+ const std::vector<std::string>& remote_ders) {
+ StatsCollectorForTest stats(&pc_);
+
+ StatsReports reports; // returned values.
+
+ // Fake stats to process.
+ cricket::TransportChannelStats channel_stats;
+ channel_stats.component = 1;
+ channel_stats.srtp_cipher = "the-srtp-cipher";
+ channel_stats.ssl_cipher = TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA;
+
+ cricket::TransportStats transport_stats;
+ transport_stats.transport_name = "audio";
+ transport_stats.channel_stats.push_back(channel_stats);
+
+ SessionStats session_stats;
+ session_stats.transport_stats[transport_stats.transport_name] =
+ transport_stats;
+
+ // Fake certificate to report
+ rtc::scoped_refptr<rtc::RTCCertificate> local_certificate(
+ rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::FakeSSLIdentity>(
+ new rtc::FakeSSLIdentity(local_cert))
+ .Pass()));
+
+ // Configure MockWebRtcSession
+ EXPECT_CALL(session_,
+ GetLocalCertificate(transport_stats.transport_name, _))
+ .WillOnce(DoAll(SetArgPointee<1>(local_certificate), Return(true)));
+ EXPECT_CALL(session_,
+ GetRemoteSSLCertificate(transport_stats.transport_name, _))
+ .WillOnce(
+ DoAll(SetArgPointee<1>(remote_cert.GetReference()), Return(true)));
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillOnce(DoAll(SetArgPointee<0>(session_stats),
+ Return(true)));
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+ stats.GetStats(NULL, &reports);
+
+ const StatsReport* channel_report = FindNthReportByType(
+ reports, StatsReport::kStatsReportTypeComponent, 1);
+ EXPECT_TRUE(channel_report != NULL);
+
+ // Check local certificate chain.
+ std::string local_certificate_id = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameLocalCertificateId);
+ if (local_ders.size() > 0) {
+ EXPECT_NE(kNotFound, local_certificate_id);
+ StatsReport::Id id(IdFromCertIdString(local_certificate_id));
+ CheckCertChainReports(reports, local_ders, id);
+ } else {
+ EXPECT_EQ(kNotFound, local_certificate_id);
+ }
+
+ // Check remote certificate chain.
+ std::string remote_certificate_id = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameRemoteCertificateId);
+ if (remote_ders.size() > 0) {
+ EXPECT_NE(kNotFound, remote_certificate_id);
+ StatsReport::Id id(IdFromCertIdString(remote_certificate_id));
+ CheckCertChainReports(reports, remote_ders, id);
+ } else {
+ EXPECT_EQ(kNotFound, remote_certificate_id);
+ }
+
+ // Check negotiated ciphers.
+ std::string dtls_cipher = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameDtlsCipher);
+ EXPECT_EQ(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA),
+ dtls_cipher);
+ std::string srtp_cipher = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameSrtpCipher);
+ EXPECT_EQ("the-srtp-cipher", srtp_cipher);
+ }
+
+ cricket::FakeMediaEngine* media_engine_;
+ rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+ rtc::scoped_ptr<webrtc::MediaControllerInterface> media_controller_;
+ MockWebRtcSession session_;
+ MockPeerConnection pc_;
+ FakeDataChannelProvider data_channel_provider_;
+ SessionStats session_stats_;
+ rtc::scoped_refptr<webrtc::MediaStream> stream_;
+ rtc::scoped_refptr<webrtc::VideoTrack> track_;
+ rtc::scoped_refptr<FakeAudioTrack> audio_track_;
+ std::vector<rtc::scoped_refptr<DataChannel>> data_channels_;
+};
+
+// Verify that ExtractDataInfo populates reports.
+TEST_F(StatsCollectorTest, ExtractDataInfo) {
+ const std::string label = "hacks";
+ const int id = 31337;
+ const std::string state = DataChannelInterface::DataStateString(
+ DataChannelInterface::DataState::kConnecting);
+
+ AddDataChannel(cricket::DCT_SCTP, label, id);
+ StatsCollectorForTest stats(&pc_);
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+ StatsReports reports;
+ stats.GetStats(NULL, &reports);
+
+ const StatsReport* report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeDataChannel, 1);
+
+ StatsReport::Id reportId = StatsReport::NewTypedIntId(
+ StatsReport::kStatsReportTypeDataChannel, id);
+
+ EXPECT_TRUE(reportId->Equals(report->id()));
+
+ EXPECT_EQ(stats.GetTimeNow(), report->timestamp());
+ EXPECT_EQ(label, ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel,
+ reports,
+ StatsReport::kStatsValueNameLabel));
+ EXPECT_EQ(rtc::ToString<int64_t>(id),
+ ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel, reports,
+ StatsReport::kStatsValueNameDataChannelId));
+ EXPECT_EQ(state, ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel,
+ reports,
+ StatsReport::kStatsValueNameState));
+ EXPECT_EQ("", ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel,
+ reports,
+ StatsReport::kStatsValueNameProtocol));
+}
+
+// This test verifies that 64-bit counters are passed successfully.
+TEST_F(StatsCollectorTest, BytesCounterHandles64Bits) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ const char kVideoChannelName[] = "video";
+
+ InitSessionStats(kVideoChannelName);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+ Return(true)));
+
+ MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+ cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+ nullptr, kVideoChannelName, false);
+ StatsReports reports; // returned values.
+ cricket::VideoSenderInfo video_sender_info;
+ cricket::VideoMediaInfo stats_read;
+ // The number of bytes must be larger than 0xFFFFFFFF for this test.
+ const int64_t kBytesSent = 12345678901234LL;
+ const std::string kBytesSentString("12345678901234");
+
+ AddOutgoingVideoTrackStats();
+ stats.AddStream(stream_);
+
+ // Construct a stats value to read.
+ video_sender_info.add_ssrc(1234);
+ video_sender_info.bytes_sent = kBytesSent;
+ stats_read.senders.push_back(video_sender_info);
+
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillOnce(DoAll(SetArgPointee<0>(stats_read),
+ Return(true)));
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats.GetStats(NULL, &reports);
+ std::string result = ExtractSsrcStatsValue(reports,
+ StatsReport::kStatsValueNameBytesSent);
+ EXPECT_EQ(kBytesSentString, result);
+}
+
+// Test that BWE information is reported via stats.
+TEST_F(StatsCollectorTest, BandwidthEstimationInfoIsReported) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ const char kVideoChannelName[] = "video";
+
+ InitSessionStats(kVideoChannelName);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+ Return(true)));
+
+ MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+ cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+ nullptr, kVideoChannelName, false);
+
+ StatsReports reports; // returned values.
+ cricket::VideoSenderInfo video_sender_info;
+ cricket::VideoMediaInfo stats_read;
+ // Set up an SSRC just to test that we get both kinds of stats back: SSRC and
+ // BWE.
+ const int64_t kBytesSent = 12345678901234LL;
+ const std::string kBytesSentString("12345678901234");
+
+ AddOutgoingVideoTrackStats();
+ stats.AddStream(stream_);
+
+ // Construct a stats value to read.
+ video_sender_info.add_ssrc(1234);
+ video_sender_info.bytes_sent = kBytesSent;
+ stats_read.senders.push_back(video_sender_info);
+ cricket::BandwidthEstimationInfo bwe;
+ const int kTargetEncBitrate = 123456;
+ const std::string kTargetEncBitrateString("123456");
+ bwe.target_enc_bitrate = kTargetEncBitrate;
+ stats_read.bw_estimations.push_back(bwe);
+
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillOnce(DoAll(SetArgPointee<0>(stats_read), Return(true)));
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats.GetStats(NULL, &reports);
+ std::string result = ExtractSsrcStatsValue(reports,
+ StatsReport::kStatsValueNameBytesSent);
+ EXPECT_EQ(kBytesSentString, result);
+ result = ExtractBweStatsValue(reports,
+ StatsReport::kStatsValueNameTargetEncBitrate);
+ EXPECT_EQ(kTargetEncBitrateString, result);
+}
+
+// This test verifies that an object of type "googSession" always
+// exists in the returned stats.
+TEST_F(StatsCollectorTest, SessionObjectExists) {
+ StatsCollectorForTest stats(&pc_);
+
+ StatsReports reports; // returned values.
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats.GetStats(NULL, &reports);
+ const StatsReport* session_report = FindNthReportByType(
+ reports, StatsReport::kStatsReportTypeSession, 1);
+ EXPECT_FALSE(session_report == NULL);
+}
+
+// This test verifies that only one object of type "googSession" exists
+// in the returned stats.
+TEST_F(StatsCollectorTest, OnlyOneSessionObjectExists) {
+ StatsCollectorForTest stats(&pc_);
+
+ StatsReports reports; // returned values.
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats.GetStats(NULL, &reports);
+ const StatsReport* session_report = FindNthReportByType(
+ reports, StatsReport::kStatsReportTypeSession, 1);
+ EXPECT_FALSE(session_report == NULL);
+ session_report = FindNthReportByType(
+ reports, StatsReport::kStatsReportTypeSession, 2);
+ EXPECT_EQ(NULL, session_report);
+}
+
+// This test verifies that the empty track report exists in the returned stats
+// without calling StatsCollector::UpdateStats.
+TEST_F(StatsCollectorTest, TrackObjectExistsWithoutUpdateStats) {
+ StatsCollectorForTest stats(&pc_);
+
+ MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+ cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+ nullptr, "video", false);
+ AddOutgoingVideoTrackStats();
+ stats.AddStream(stream_);
+
+ // Verfies the existence of the track report.
+ StatsReports reports;
+ stats.GetStats(NULL, &reports);
+ EXPECT_EQ((size_t)1, reports.size());
+ EXPECT_EQ(StatsReport::kStatsReportTypeTrack, reports[0]->type());
+ EXPECT_EQ(0, reports[0]->timestamp());
+
+ std::string trackValue =
+ ExtractStatsValue(StatsReport::kStatsReportTypeTrack,
+ reports,
+ StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kLocalTrackId, trackValue);
+}
+
+// This test verifies that the empty track report exists in the returned stats
+// when StatsCollector::UpdateStats is called with ssrc stats.
+TEST_F(StatsCollectorTest, TrackAndSsrcObjectExistAfterUpdateSsrcStats) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ const char kVideoChannelName[] = "video";
+ InitSessionStats(kVideoChannelName);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+ Return(true)));
+
+ MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+ cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+ nullptr, kVideoChannelName, false);
+ AddOutgoingVideoTrackStats();
+ stats.AddStream(stream_);
+
+ // Constructs an ssrc stats update.
+ cricket::VideoSenderInfo video_sender_info;
+ cricket::VideoMediaInfo stats_read;
+ const int64_t kBytesSent = 12345678901234LL;
+
+ // Construct a stats value to read.
+ video_sender_info.add_ssrc(1234);
+ video_sender_info.bytes_sent = kBytesSent;
+ stats_read.senders.push_back(video_sender_info);
+
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillOnce(DoAll(SetArgPointee<0>(stats_read),
+ Return(true)));
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats.GetStats(NULL, &reports);
+ // |reports| should contain at least one session report, one track report,
+ // and one ssrc report.
+ EXPECT_LE((size_t)3, reports.size());
+ const StatsReport* track_report = FindNthReportByType(
+ reports, StatsReport::kStatsReportTypeTrack, 1);
+ EXPECT_TRUE(track_report);
+
+ // Get report for the specific |track|.
+ reports.clear();
+ stats.GetStats(track_, &reports);
+ // |reports| should contain at least one session report, one track report,
+ // and one ssrc report.
+ EXPECT_LE((size_t)3, reports.size());
+ track_report = FindNthReportByType(
+ reports, StatsReport::kStatsReportTypeTrack, 1);
+ EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
+
+ std::string ssrc_id = ExtractSsrcStatsValue(
+ reports, StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+
+ std::string track_id = ExtractSsrcStatsValue(
+ reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kLocalTrackId, track_id);
+}
+
+// This test verifies that an SSRC object has the identifier of a Transport
+// stats object, and that this transport stats object exists in stats.
+TEST_F(StatsCollectorTest, TransportObjectLinkedFromSsrcObject) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+ // The transport_name known by the video channel.
+ const std::string kVcName("vcname");
+ cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+ nullptr, kVcName, false);
+ AddOutgoingVideoTrackStats();
+ stats.AddStream(stream_);
+
+ // Constructs an ssrc stats update.
+ cricket::VideoSenderInfo video_sender_info;
+ cricket::VideoMediaInfo stats_read;
+ const int64_t kBytesSent = 12345678901234LL;
+
+ // Construct a stats value to read.
+ video_sender_info.add_ssrc(1234);
+ video_sender_info.bytes_sent = kBytesSent;
+ stats_read.senders.push_back(video_sender_info);
+
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(stats_read),
+ Return(true)));
+
+ InitSessionStats(kVcName);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+ Return(true)));
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats.GetStats(NULL, &reports);
+ std::string transport_id = ExtractStatsValue(
+ StatsReport::kStatsReportTypeSsrc,
+ reports,
+ StatsReport::kStatsValueNameTransportId);
+ ASSERT_NE(kNotFound, transport_id);
+ // Transport id component ID will always be 1.
+ // This has assumptions about how the ID is constructed. As is, this is
+ // OK since this is for testing purposes only, but if we ever need this
+ // in production, we should add a generic method that does this.
+ size_t index = transport_id.find('-');
+ ASSERT_NE(std::string::npos, index);
+ std::string content = transport_id.substr(index + 1);
+ index = content.rfind('-');
+ ASSERT_NE(std::string::npos, index);
+ content = content.substr(0, index);
+ StatsReport::Id id(StatsReport::NewComponentId(content, 1));
+ ASSERT_EQ(transport_id, id->ToString());
+ const StatsReport* transport_report = FindReportById(reports, id);
+ ASSERT_FALSE(transport_report == NULL);
+}
+
+// This test verifies that a remote stats object will not be created for
+// an outgoing SSRC where remote stats are not returned.
+TEST_F(StatsCollectorTest, RemoteSsrcInfoIsAbsent) {
+ StatsCollectorForTest stats(&pc_);
+
+ MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+ // The transport_name known by the video channel.
+ const std::string kVcName("vcname");
+ cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+ nullptr, kVcName, false);
+ AddOutgoingVideoTrackStats();
+ stats.AddStream(stream_);
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats.GetStats(NULL, &reports);
+ const StatsReport* remote_report = FindNthReportByType(reports,
+ StatsReport::kStatsReportTypeRemoteSsrc, 1);
+ EXPECT_TRUE(remote_report == NULL);
+}
+
+// This test verifies that a remote stats object will be created for
+// an outgoing SSRC where stats are returned.
+TEST_F(StatsCollectorTest, RemoteSsrcInfoIsPresent) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+ // The transport_name known by the video channel.
+ const std::string kVcName("vcname");
+ cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+ nullptr, kVcName, false);
+ AddOutgoingVideoTrackStats();
+ stats.AddStream(stream_);
+
+ // Instruct the session to return stats containing the transport channel.
+ InitSessionStats(kVcName);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+ Return(true)));
+
+ // Constructs an ssrc stats update.
+ cricket::VideoMediaInfo stats_read;
+
+ cricket::SsrcReceiverInfo remote_ssrc_stats;
+ remote_ssrc_stats.timestamp = 12345.678;
+ remote_ssrc_stats.ssrc = kSsrcOfTrack;
+ cricket::VideoSenderInfo video_sender_info;
+ video_sender_info.add_ssrc(kSsrcOfTrack);
+ video_sender_info.remote_stats.push_back(remote_ssrc_stats);
+ stats_read.senders.push_back(video_sender_info);
+
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(stats_read),
+ Return(true)));
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats.GetStats(NULL, &reports);
+
+ const StatsReport* remote_report = FindNthReportByType(reports,
+ StatsReport::kStatsReportTypeRemoteSsrc, 1);
+ EXPECT_FALSE(remote_report == NULL);
+ EXPECT_EQ(12345.678, remote_report->timestamp());
+}
+
+// This test verifies that the empty track report exists in the returned stats
+// when StatsCollector::UpdateStats is called with ssrc stats.
+TEST_F(StatsCollectorTest, ReportsFromRemoteTrack) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ const char kVideoChannelName[] = "video";
+ InitSessionStats(kVideoChannelName);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+ Return(true)));
+
+ MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+ cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+ nullptr, kVideoChannelName, false);
+ AddIncomingVideoTrackStats();
+ stats.AddStream(stream_);
+
+ // Constructs an ssrc stats update.
+ cricket::VideoReceiverInfo video_receiver_info;
+ cricket::VideoMediaInfo stats_read;
+ const int64_t kNumOfPacketsConcealed = 54321;
+
+ // Construct a stats value to read.
+ video_receiver_info.add_ssrc(1234);
+ video_receiver_info.packets_concealed = kNumOfPacketsConcealed;
+ stats_read.receivers.push_back(video_receiver_info);
+
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillOnce(DoAll(SetArgPointee<0>(stats_read),
+ Return(true)));
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats.GetStats(NULL, &reports);
+ // |reports| should contain at least one session report, one track report,
+ // and one ssrc report.
+ EXPECT_LE(static_cast<size_t>(3), reports.size());
+ const StatsReport* track_report = FindNthReportByType(
+ reports, StatsReport::kStatsReportTypeTrack, 1);
+ EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
+
+ std::string ssrc_id = ExtractSsrcStatsValue(
+ reports, StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+
+ std::string track_id = ExtractSsrcStatsValue(
+ reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kRemoteTrackId, track_id);
+}
+
+// This test verifies the Ice Candidate report should contain the correct
+// information from local/remote candidates.
+TEST_F(StatsCollectorTest, IceCandidateReport) {
+ StatsCollectorForTest stats(&pc_);
+
+ StatsReports reports; // returned values.
+
+ const int local_port = 2000;
+ const char local_ip[] = "192.168.0.1";
+ const int remote_port = 2001;
+ const char remote_ip[] = "192.168.0.2";
+
+ rtc::SocketAddress local_address(local_ip, local_port);
+ rtc::SocketAddress remote_address(remote_ip, remote_port);
+ rtc::AdapterType network_type = rtc::ADAPTER_TYPE_ETHERNET;
+ uint32_t priority = 1000;
+
+ cricket::Candidate c;
+ ASSERT(c.id().length() > 0);
+ c.set_type(cricket::LOCAL_PORT_TYPE);
+ c.set_protocol(cricket::UDP_PROTOCOL_NAME);
+ c.set_address(local_address);
+ c.set_priority(priority);
+ c.set_network_type(network_type);
+ std::string report_id = AddCandidateReport(&stats, c, true)->id()->ToString();
+ EXPECT_EQ("Cand-" + c.id(), report_id);
+
+ c = cricket::Candidate();
+ ASSERT(c.id().length() > 0);
+ c.set_type(cricket::PRFLX_PORT_TYPE);
+ c.set_protocol(cricket::UDP_PROTOCOL_NAME);
+ c.set_address(remote_address);
+ c.set_priority(priority);
+ c.set_network_type(network_type);
+ report_id = AddCandidateReport(&stats, c, false)->id()->ToString();
+ EXPECT_EQ("Cand-" + c.id(), report_id);
+
+ stats.GetStats(NULL, &reports);
+
+ // Verify the local candidate report is populated correctly.
+ EXPECT_EQ(
+ local_ip,
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidateIPAddress));
+ EXPECT_EQ(
+ rtc::ToString<int>(local_port),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidatePortNumber));
+ EXPECT_EQ(
+ cricket::UDP_PROTOCOL_NAME,
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidateTransportType));
+ EXPECT_EQ(
+ rtc::ToString<int>(priority),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidatePriority));
+ EXPECT_EQ(
+ IceCandidateTypeToStatsType(cricket::LOCAL_PORT_TYPE),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidateType));
+ EXPECT_EQ(
+ AdapterTypeToStatsType(network_type),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidateNetworkType));
+
+ // Verify the remote candidate report is populated correctly.
+ EXPECT_EQ(remote_ip,
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+ reports,
+ StatsReport::kStatsValueNameCandidateIPAddress));
+ EXPECT_EQ(rtc::ToString<int>(remote_port),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+ reports,
+ StatsReport::kStatsValueNameCandidatePortNumber));
+ EXPECT_EQ(cricket::UDP_PROTOCOL_NAME,
+ ExtractStatsValue(
+ StatsReport::kStatsReportTypeIceRemoteCandidate, reports,
+ StatsReport::kStatsValueNameCandidateTransportType));
+ EXPECT_EQ(rtc::ToString<int>(priority),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+ reports,
+ StatsReport::kStatsValueNameCandidatePriority));
+ EXPECT_EQ(
+ IceCandidateTypeToStatsType(cricket::PRFLX_PORT_TYPE),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+ reports, StatsReport::kStatsValueNameCandidateType));
+ EXPECT_EQ(kNotFound,
+ ExtractStatsValue(
+ StatsReport::kStatsReportTypeIceRemoteCandidate, reports,
+ StatsReport::kStatsValueNameCandidateNetworkType));
+}
+
+// This test verifies that all chained certificates are correctly
+// reported
+TEST_F(StatsCollectorTest, ChainedCertificateReportsCreated) {
+ // Build local certificate chain.
+ std::vector<std::string> local_ders(5);
+ local_ders[0] = "These";
+ local_ders[1] = "are";
+ local_ders[2] = "some";
+ local_ders[3] = "der";
+ local_ders[4] = "values";
+ rtc::FakeSSLCertificate local_cert(DersToPems(local_ders));
+
+ // Build remote certificate chain
+ std::vector<std::string> remote_ders(4);
+ remote_ders[0] = "A";
+ remote_ders[1] = "non-";
+ remote_ders[2] = "intersecting";
+ remote_ders[3] = "set";
+ rtc::FakeSSLCertificate remote_cert(DersToPems(remote_ders));
+
+ TestCertificateReports(local_cert, local_ders, remote_cert, remote_ders);
+}
+
+// This test verifies that all certificates without chains are correctly
+// reported.
+TEST_F(StatsCollectorTest, ChainlessCertificateReportsCreated) {
+ // Build local certificate.
+ std::string local_der = "This is the local der.";
+ rtc::FakeSSLCertificate local_cert(DerToPem(local_der));
+
+ // Build remote certificate.
+ std::string remote_der = "This is somebody else's der.";
+ rtc::FakeSSLCertificate remote_cert(DerToPem(remote_der));
+
+ TestCertificateReports(local_cert, std::vector<std::string>(1, local_der),
+ remote_cert, std::vector<std::string>(1, remote_der));
+}
+
+// This test verifies that the stats are generated correctly when no
+// transport is present.
+TEST_F(StatsCollectorTest, NoTransport) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ StatsReports reports; // returned values.
+
+ // Fake stats to process.
+ cricket::TransportChannelStats channel_stats;
+ channel_stats.component = 1;
+
+ cricket::TransportStats transport_stats;
+ transport_stats.transport_name = "audio";
+ transport_stats.channel_stats.push_back(channel_stats);
+
+ SessionStats session_stats;
+ session_stats.transport_stats[transport_stats.transport_name] =
+ transport_stats;
+
+ // Configure MockWebRtcSession
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillOnce(DoAll(SetArgPointee<0>(session_stats),
+ Return(true)));
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats.GetStats(NULL, &reports);
+
+ // Check that the local certificate is absent.
+ std::string local_certificate_id = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameLocalCertificateId);
+ ASSERT_EQ(kNotFound, local_certificate_id);
+
+ // Check that the remote certificate is absent.
+ std::string remote_certificate_id = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameRemoteCertificateId);
+ ASSERT_EQ(kNotFound, remote_certificate_id);
+
+ // Check that the negotiated ciphers are absent.
+ std::string dtls_cipher = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameDtlsCipher);
+ ASSERT_EQ(kNotFound, dtls_cipher);
+ std::string srtp_cipher = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameSrtpCipher);
+ ASSERT_EQ(kNotFound, srtp_cipher);
+}
+
+// This test verifies that the stats are generated correctly when the transport
+// does not have any certificates.
+TEST_F(StatsCollectorTest, NoCertificates) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ StatsReports reports; // returned values.
+
+ // Fake stats to process.
+ cricket::TransportChannelStats channel_stats;
+ channel_stats.component = 1;
+
+ cricket::TransportStats transport_stats;
+ transport_stats.transport_name = "audio";
+ transport_stats.channel_stats.push_back(channel_stats);
+
+ SessionStats session_stats;
+ session_stats.transport_stats[transport_stats.transport_name] =
+ transport_stats;
+
+ // Fake transport object.
+ rtc::scoped_ptr<cricket::FakeTransport> transport(
+ new cricket::FakeTransport(transport_stats.transport_name));
+
+ // Configure MockWebRtcSession
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillOnce(DoAll(SetArgPointee<0>(session_stats),
+ Return(true)));
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats.GetStats(NULL, &reports);
+
+ // Check that the local certificate is absent.
+ std::string local_certificate_id = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameLocalCertificateId);
+ ASSERT_EQ(kNotFound, local_certificate_id);
+
+ // Check that the remote certificate is absent.
+ std::string remote_certificate_id = ExtractStatsValue(
+ StatsReport::kStatsReportTypeComponent,
+ reports,
+ StatsReport::kStatsValueNameRemoteCertificateId);
+ ASSERT_EQ(kNotFound, remote_certificate_id);
+}
+
+// This test verifies that a remote certificate with an unsupported digest
+// algorithm is correctly ignored.
+TEST_F(StatsCollectorTest, UnsupportedDigestIgnored) {
+ // Build a local certificate.
+ std::string local_der = "This is the local der.";
+ rtc::FakeSSLCertificate local_cert(DerToPem(local_der));
+
+ // Build a remote certificate with an unsupported digest algorithm.
+ std::string remote_der = "This is somebody else's der.";
+ rtc::FakeSSLCertificate remote_cert(DerToPem(remote_der));
+ remote_cert.set_digest_algorithm("foobar");
+
+ TestCertificateReports(local_cert, std::vector<std::string>(1, local_der),
+ remote_cert, std::vector<std::string>());
+}
+
+// This test verifies that a local stats object can get statistics via
+// AudioTrackInterface::GetStats() method.
+TEST_F(StatsCollectorTest, GetStatsFromLocalAudioTrack) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+ // The transport_name known by the voice channel.
+ const std::string kVcName("vcname");
+ cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+ media_channel, nullptr, kVcName, false);
+ AddOutgoingAudioTrackStats();
+ stats.AddStream(stream_);
+ stats.AddLocalAudioTrack(audio_track_, kSsrcOfTrack);
+
+ cricket::VoiceSenderInfo voice_sender_info;
+ InitVoiceSenderInfo(&voice_sender_info);
+
+ cricket::VoiceMediaInfo stats_read;
+ StatsReports reports; // returned values.
+ SetupAndVerifyAudioTrackStats(
+ audio_track_.get(), stream_.get(), &stats, &voice_channel, kVcName,
+ media_channel, &voice_sender_info, NULL, &stats_read, &reports);
+
+ // Verify that there is no remote report for the local audio track because
+ // we did not set it up.
+ const StatsReport* remote_report = FindNthReportByType(reports,
+ StatsReport::kStatsReportTypeRemoteSsrc, 1);
+ EXPECT_TRUE(remote_report == NULL);
+}
+
+// This test verifies that audio receive streams populate stats reports
+// correctly.
+TEST_F(StatsCollectorTest, GetStatsFromRemoteStream) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+ // The transport_name known by the voice channel.
+ const std::string kVcName("vcname");
+ cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+ media_channel, nullptr, kVcName, false);
+ AddIncomingAudioTrackStats();
+ stats.AddStream(stream_);
+
+ cricket::VoiceReceiverInfo voice_receiver_info;
+ InitVoiceReceiverInfo(&voice_receiver_info);
+ voice_receiver_info.codec_name = "fake_codec";
+
+ cricket::VoiceMediaInfo stats_read;
+ StatsReports reports; // returned values.
+ SetupAndVerifyAudioTrackStats(
+ audio_track_.get(), stream_.get(), &stats, &voice_channel, kVcName,
+ media_channel, NULL, &voice_receiver_info, &stats_read, &reports);
+}
+
+// This test verifies that a local stats object won't update its statistics
+// after a RemoveLocalAudioTrack() call.
+TEST_F(StatsCollectorTest, GetStatsAfterRemoveAudioStream) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+ // The transport_name known by the voice channel.
+ const std::string kVcName("vcname");
+ cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+ media_channel, nullptr, kVcName, false);
+ AddOutgoingAudioTrackStats();
+ stats.AddStream(stream_);
+ stats.AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+ // Instruct the session to return stats containing the transport channel.
+ InitSessionStats(kVcName);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+ Return(true)));
+
+ stats.RemoveLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+ cricket::VoiceSenderInfo voice_sender_info;
+ InitVoiceSenderInfo(&voice_sender_info);
+
+ // Constructs an ssrc stats update.
+ cricket::VoiceMediaInfo stats_read;
+ stats_read.senders.push_back(voice_sender_info);
+
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(Return(&voice_channel));
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(stats_read),
+ Return(true)));
+
+ StatsReports reports; // returned values.
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats.GetStats(NULL, &reports);
+
+ // The report will exist since we don't remove them in RemoveStream().
+ const StatsReport* report = FindNthReportByType(
+ reports, StatsReport::kStatsReportTypeSsrc, 1);
+ EXPECT_FALSE(report == NULL);
+ EXPECT_EQ(stats.GetTimeNow(), report->timestamp());
+ std::string track_id = ExtractSsrcStatsValue(
+ reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kLocalTrackId, track_id);
+ std::string ssrc_id = ExtractSsrcStatsValue(
+ reports, StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+
+ // Verifies the values in the track report, no value will be changed by the
+ // AudioTrackInterface::GetSignalValue() and
+ // AudioProcessorInterface::AudioProcessorStats::GetStats();
+ VerifyVoiceSenderInfoReport(report, voice_sender_info);
+}
+
+// This test verifies that when ongoing and incoming audio tracks are using
+// the same ssrc, they populate stats reports correctly.
+TEST_F(StatsCollectorTest, LocalAndRemoteTracksWithSameSsrc) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+ // The transport_name known by the voice channel.
+ const std::string kVcName("vcname");
+ cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+ media_channel, nullptr, kVcName, false);
+
+ // Create a local stream with a local audio track and adds it to the stats.
+ AddOutgoingAudioTrackStats();
+ stats.AddStream(stream_);
+ stats.AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+ // Create a remote stream with a remote audio track and adds it to the stats.
+ rtc::scoped_refptr<webrtc::MediaStream> remote_stream(
+ webrtc::MediaStream::Create("remotestreamlabel"));
+ rtc::scoped_refptr<FakeAudioTrack> remote_track(
+ new rtc::RefCountedObject<FakeAudioTrack>(kRemoteTrackId));
+ EXPECT_CALL(session_, GetRemoteTrackIdBySsrc(kSsrcOfTrack, _))
+ .WillOnce(DoAll(SetArgPointee<1>(kRemoteTrackId), Return(true)));
+ remote_stream->AddTrack(remote_track);
+ stats.AddStream(remote_stream);
+
+ // Instruct the session to return stats containing the transport channel.
+ InitSessionStats(kVcName);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+ Return(true)));
+
+ cricket::VoiceSenderInfo voice_sender_info;
+ InitVoiceSenderInfo(&voice_sender_info);
+
+ // Some of the contents in |voice_sender_info| needs to be updated from the
+ // |audio_track_|.
+ UpdateVoiceSenderInfoFromAudioTrack(audio_track_.get(), &voice_sender_info);
+
+ cricket::VoiceReceiverInfo voice_receiver_info;
+ InitVoiceReceiverInfo(&voice_receiver_info);
+
+ // Constructs an ssrc stats update.
+ cricket::VoiceMediaInfo stats_read;
+ stats_read.senders.push_back(voice_sender_info);
+ stats_read.receivers.push_back(voice_receiver_info);
+
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(Return(&voice_channel));
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(stats_read),
+ Return(true)));
+
+ StatsReports reports; // returned values.
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+ // Get stats for the local track.
+ stats.GetStats(audio_track_.get(), &reports);
+ const StatsReport* track_report = FindNthReportByType(
+ reports, StatsReport::kStatsReportTypeSsrc, 1);
+ EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
+ std::string track_id = ExtractSsrcStatsValue(
+ reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kLocalTrackId, track_id);
+ VerifyVoiceSenderInfoReport(track_report, voice_sender_info);
+
+ // Get stats for the remote track.
+ reports.clear();
+ stats.GetStats(remote_track.get(), &reports);
+ track_report = FindNthReportByType(reports,
+ StatsReport::kStatsReportTypeSsrc, 1);
+ EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
+ track_id = ExtractSsrcStatsValue(reports,
+ StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kRemoteTrackId, track_id);
+ VerifyVoiceReceiverInfoReport(track_report, voice_receiver_info);
+}
+
+// This test verifies that when two outgoing audio tracks are using the same
+// ssrc at different times, they populate stats reports correctly.
+// TODO(xians): Figure out if it is possible to encapsulate the setup and
+// avoid duplication of code in test cases.
+TEST_F(StatsCollectorTest, TwoLocalTracksWithSameSsrc) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+ .WillRepeatedly(Return(false));
+
+ MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+ // The transport_name known by the voice channel.
+ const std::string kVcName("vcname");
+ cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+ media_channel, nullptr, kVcName, false);
+
+ // Create a local stream with a local audio track and adds it to the stats.
+ AddOutgoingAudioTrackStats();
+ stats.AddStream(stream_);
+ stats.AddLocalAudioTrack(audio_track_, kSsrcOfTrack);
+
+ cricket::VoiceSenderInfo voice_sender_info;
+ voice_sender_info.add_ssrc(kSsrcOfTrack);
+
+ cricket::VoiceMediaInfo stats_read;
+ StatsReports reports; // returned values.
+ SetupAndVerifyAudioTrackStats(
+ audio_track_.get(), stream_.get(), &stats, &voice_channel, kVcName,
+ media_channel, &voice_sender_info, NULL, &stats_read, &reports);
+
+ // Remove the previous audio track from the stream.
+ stream_->RemoveTrack(audio_track_.get());
+ stats.RemoveLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+ // Create a new audio track and adds it to the stream and stats.
+ static const std::string kNewTrackId = "new_track_id";
+ rtc::scoped_refptr<FakeAudioTrack> new_audio_track(
+ new rtc::RefCountedObject<FakeAudioTrack>(kNewTrackId));
+ EXPECT_CALL(session_, GetLocalTrackIdBySsrc(kSsrcOfTrack, _))
+ .WillOnce(DoAll(SetArgPointee<1>(kNewTrackId), Return(true)));
+ stream_->AddTrack(new_audio_track);
+
+ stats.AddLocalAudioTrack(new_audio_track, kSsrcOfTrack);
+ stats.ClearUpdateStatsCacheForTest();
+ cricket::VoiceSenderInfo new_voice_sender_info;
+ InitVoiceSenderInfo(&new_voice_sender_info);
+ cricket::VoiceMediaInfo new_stats_read;
+ reports.clear();
+ SetupAndVerifyAudioTrackStats(
+ new_audio_track.get(), stream_.get(), &stats, &voice_channel, kVcName,
+ media_channel, &new_voice_sender_info, NULL, &new_stats_read, &reports);
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/statstypes.cc b/talk/app/webrtc/statstypes.cc
new file mode 100644
index 0000000000..e45833c668
--- /dev/null
+++ b/talk/app/webrtc/statstypes.cc
@@ -0,0 +1,782 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/statstypes.h"
+
+#include <string.h>
+
+#include "webrtc/base/checks.h"
+
+// TODO(tommi): Could we have a static map of value name -> expected type
+// and use this to RTC_DCHECK on correct usage (somewhat strongly typed values)?
+// Alternatively, we could define the names+type in a separate document and
+// generate strongly typed inline C++ code that forces the correct type to be
+// used for a given name at compile time.
+
+using rtc::RefCountedObject;
+
+namespace webrtc {
+namespace {
+
+// The id of StatsReport of type kStatsReportTypeBwe.
+const char kStatsReportVideoBweId[] = "bweforvideo";
+
+// NOTE: These names need to be consistent with an external
+// specification (W3C Stats Identifiers).
+const char* InternalTypeToString(StatsReport::StatsType type) {
+ switch (type) {
+ case StatsReport::kStatsReportTypeSession:
+ return "googLibjingleSession";
+ case StatsReport::kStatsReportTypeBwe:
+ return "VideoBwe";
+ case StatsReport::kStatsReportTypeRemoteSsrc:
+ return "remoteSsrc";
+ case StatsReport::kStatsReportTypeSsrc:
+ return "ssrc";
+ case StatsReport::kStatsReportTypeTrack:
+ return "googTrack";
+ case StatsReport::kStatsReportTypeIceLocalCandidate:
+ return "localcandidate";
+ case StatsReport::kStatsReportTypeIceRemoteCandidate:
+ return "remotecandidate";
+ case StatsReport::kStatsReportTypeTransport:
+ return "transport";
+ case StatsReport::kStatsReportTypeComponent:
+ return "googComponent";
+ case StatsReport::kStatsReportTypeCandidatePair:
+ return "googCandidatePair";
+ case StatsReport::kStatsReportTypeCertificate:
+ return "googCertificate";
+ case StatsReport::kStatsReportTypeDataChannel:
+ return "datachannel";
+ }
+ RTC_DCHECK(false);
+ return nullptr;
+}
+
+class BandwidthEstimationId : public StatsReport::IdBase {
+ public:
+ BandwidthEstimationId()
+ : StatsReport::IdBase(StatsReport::kStatsReportTypeBwe) {}
+ std::string ToString() const override { return kStatsReportVideoBweId; }
+};
+
+class TypedId : public StatsReport::IdBase {
+ public:
+ TypedId(StatsReport::StatsType type, const std::string& id)
+ : StatsReport::IdBase(type), id_(id) {}
+
+ bool Equals(const IdBase& other) const override {
+ return IdBase::Equals(other) &&
+ static_cast<const TypedId&>(other).id_ == id_;
+ }
+
+ std::string ToString() const override {
+ return std::string(InternalTypeToString(type_)) + kSeparator + id_;
+ }
+
+ protected:
+ const std::string id_;
+};
+
+class TypedIntId : public StatsReport::IdBase {
+ public:
+ TypedIntId(StatsReport::StatsType type, int id)
+ : StatsReport::IdBase(type), id_(id) {}
+
+ bool Equals(const IdBase& other) const override {
+ return IdBase::Equals(other) &&
+ static_cast<const TypedIntId&>(other).id_ == id_;
+ }
+
+ std::string ToString() const override {
+ return std::string(InternalTypeToString(type_)) +
+ kSeparator +
+ rtc::ToString<int>(id_);
+ }
+
+ protected:
+ const int id_;
+};
+
+class IdWithDirection : public TypedId {
+ public:
+ IdWithDirection(StatsReport::StatsType type, const std::string& id,
+ StatsReport::Direction direction)
+ : TypedId(type, id), direction_(direction) {}
+
+ bool Equals(const IdBase& other) const override {
+ return TypedId::Equals(other) &&
+ static_cast<const IdWithDirection&>(other).direction_ == direction_;
+ }
+
+ std::string ToString() const override {
+ std::string ret(TypedId::ToString());
+ ret += kSeparator;
+ ret += direction_ == StatsReport::kSend ? "send" : "recv";
+ return ret;
+ }
+
+ private:
+ const StatsReport::Direction direction_;
+};
+
+class CandidateId : public TypedId {
+ public:
+ CandidateId(bool local, const std::string& id)
+ : TypedId(local ?
+ StatsReport::kStatsReportTypeIceLocalCandidate :
+ StatsReport::kStatsReportTypeIceRemoteCandidate,
+ id) {
+ }
+
+ std::string ToString() const override {
+ return "Cand-" + id_;
+ }
+};
+
+class ComponentId : public StatsReport::IdBase {
+ public:
+ ComponentId(const std::string& content_name, int component)
+ : ComponentId(StatsReport::kStatsReportTypeComponent, content_name,
+ component) {}
+
+ bool Equals(const IdBase& other) const override {
+ return IdBase::Equals(other) &&
+ static_cast<const ComponentId&>(other).component_ == component_ &&
+ static_cast<const ComponentId&>(other).content_name_ == content_name_;
+ }
+
+ std::string ToString() const override {
+ return ToString("Channel-");
+ }
+
+ protected:
+ ComponentId(StatsReport::StatsType type, const std::string& content_name,
+ int component)
+ : IdBase(type),
+ content_name_(content_name),
+ component_(component) {}
+
+ std::string ToString(const char* prefix) const {
+ std::string ret(prefix);
+ ret += content_name_;
+ ret += '-';
+ ret += rtc::ToString<>(component_);
+ return ret;
+ }
+
+ private:
+ const std::string content_name_;
+ const int component_;
+};
+
+class CandidatePairId : public ComponentId {
+ public:
+ CandidatePairId(const std::string& content_name, int component, int index)
+ : ComponentId(StatsReport::kStatsReportTypeCandidatePair, content_name,
+ component),
+ index_(index) {}
+
+ bool Equals(const IdBase& other) const override {
+ return ComponentId::Equals(other) &&
+ static_cast<const CandidatePairId&>(other).index_ == index_;
+ }
+
+ std::string ToString() const override {
+ std::string ret(ComponentId::ToString("Conn-"));
+ ret += '-';
+ ret += rtc::ToString<>(index_);
+ return ret;
+ }
+
+ private:
+ const int index_;
+};
+
+} // namespace
+
+StatsReport::IdBase::IdBase(StatsType type) : type_(type) {}
+StatsReport::IdBase::~IdBase() {}
+
+StatsReport::StatsType StatsReport::IdBase::type() const { return type_; }
+
+bool StatsReport::IdBase::Equals(const IdBase& other) const {
+ return other.type_ == type_;
+}
+
+StatsReport::Value::Value(StatsValueName name, int64_t value, Type int_type)
+ : name(name), type_(int_type) {
+ RTC_DCHECK(type_ == kInt || type_ == kInt64);
+ type_ == kInt ? value_.int_ = static_cast<int>(value) : value_.int64_ = value;
+}
+
+StatsReport::Value::Value(StatsValueName name, float f)
+ : name(name), type_(kFloat) {
+ value_.float_ = f;
+}
+
+StatsReport::Value::Value(StatsValueName name, const std::string& value)
+ : name(name), type_(kString) {
+ value_.string_ = new std::string(value);
+}
+
+StatsReport::Value::Value(StatsValueName name, const char* value)
+ : name(name), type_(kStaticString) {
+ value_.static_string_ = value;
+}
+
+StatsReport::Value::Value(StatsValueName name, bool b)
+ : name(name), type_(kBool) {
+ value_.bool_ = b;
+}
+
+StatsReport::Value::Value(StatsValueName name, const Id& value)
+ : name(name), type_(kId) {
+ value_.id_ = new Id(value);
+}
+
+StatsReport::Value::~Value() {
+ switch (type_) {
+ case kInt:
+ case kInt64:
+ case kFloat:
+ case kBool:
+ case kStaticString:
+ break;
+ case kString:
+ delete value_.string_;
+ break;
+ case kId:
+ delete value_.id_;
+ break;
+ }
+}
+
+bool StatsReport::Value::Equals(const Value& other) const {
+ if (name != other.name)
+ return false;
+
+ // There's a 1:1 relation between a name and a type, so we don't have to
+ // check that.
+ RTC_DCHECK_EQ(type_, other.type_);
+
+ switch (type_) {
+ case kInt:
+ return value_.int_ == other.value_.int_;
+ case kInt64:
+ return value_.int64_ == other.value_.int64_;
+ case kFloat:
+ return value_.float_ == other.value_.float_;
+ case kStaticString: {
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ if (value_.static_string_ != other.value_.static_string_) {
+ RTC_DCHECK(strcmp(value_.static_string_, other.value_.static_string_) !=
+ 0)
+ << "Duplicate global?";
+ }
+#endif
+ return value_.static_string_ == other.value_.static_string_;
+ }
+ case kString:
+ return *value_.string_ == *other.value_.string_;
+ case kBool:
+ return value_.bool_ == other.value_.bool_;
+ case kId:
+ return (*value_.id_)->Equals(*other.value_.id_);
+ }
+ RTC_NOTREACHED();
+ return false;
+}
+
+bool StatsReport::Value::operator==(const std::string& value) const {
+ return (type_ == kString && value_.string_->compare(value) == 0) ||
+ (type_ == kStaticString && value.compare(value_.static_string_) == 0);
+}
+
+bool StatsReport::Value::operator==(const char* value) const {
+ if (type_ == kString)
+ return value_.string_->compare(value) == 0;
+ if (type_ != kStaticString)
+ return false;
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ if (value_.static_string_ != value)
+ RTC_DCHECK(strcmp(value_.static_string_, value) != 0)
+ << "Duplicate global?";
+#endif
+ return value == value_.static_string_;
+}
+
+bool StatsReport::Value::operator==(int64_t value) const {
+ return type_ == kInt ? value_.int_ == static_cast<int>(value) :
+ (type_ == kInt64 ? value_.int64_ == value : false);
+}
+
+bool StatsReport::Value::operator==(bool value) const {
+ return type_ == kBool && value_.bool_ == value;
+}
+
+bool StatsReport::Value::operator==(float value) const {
+ return type_ == kFloat && value_.float_ == value;
+}
+
+bool StatsReport::Value::operator==(const Id& value) const {
+ return type_ == kId && (*value_.id_)->Equals(value);
+}
+
+int StatsReport::Value::int_val() const {
+ RTC_DCHECK(type_ == kInt);
+ return value_.int_;
+}
+
+int64_t StatsReport::Value::int64_val() const {
+ RTC_DCHECK(type_ == kInt64);
+ return value_.int64_;
+}
+
+float StatsReport::Value::float_val() const {
+ RTC_DCHECK(type_ == kFloat);
+ return value_.float_;
+}
+
+const char* StatsReport::Value::static_string_val() const {
+ RTC_DCHECK(type_ == kStaticString);
+ return value_.static_string_;
+}
+
+const std::string& StatsReport::Value::string_val() const {
+ RTC_DCHECK(type_ == kString);
+ return *value_.string_;
+}
+
+bool StatsReport::Value::bool_val() const {
+ RTC_DCHECK(type_ == kBool);
+ return value_.bool_;
+}
+
+const char* StatsReport::Value::display_name() const {
+ switch (name) {
+ case kStatsValueNameAudioOutputLevel:
+ return "audioOutputLevel";
+ case kStatsValueNameAudioInputLevel:
+ return "audioInputLevel";
+ case kStatsValueNameBytesSent:
+ return "bytesSent";
+ case kStatsValueNamePacketsSent:
+ return "packetsSent";
+ case kStatsValueNameBytesReceived:
+ return "bytesReceived";
+ case kStatsValueNameLabel:
+ return "label";
+ case kStatsValueNamePacketsReceived:
+ return "packetsReceived";
+ case kStatsValueNamePacketsLost:
+ return "packetsLost";
+ case kStatsValueNameProtocol:
+ return "protocol";
+ case kStatsValueNameTransportId:
+ return "transportId";
+ case kStatsValueNameSelectedCandidatePairId:
+ return "selectedCandidatePairId";
+ case kStatsValueNameSsrc:
+ return "ssrc";
+ case kStatsValueNameState:
+ return "state";
+ case kStatsValueNameDataChannelId:
+ return "datachannelid";
+
+ // 'goog' prefixed constants.
+ case kStatsValueNameAccelerateRate:
+ return "googAccelerateRate";
+ case kStatsValueNameActiveConnection:
+ return "googActiveConnection";
+ case kStatsValueNameActualEncBitrate:
+ return "googActualEncBitrate";
+ case kStatsValueNameAvailableReceiveBandwidth:
+ return "googAvailableReceiveBandwidth";
+ case kStatsValueNameAvailableSendBandwidth:
+ return "googAvailableSendBandwidth";
+ case kStatsValueNameAvgEncodeMs:
+ return "googAvgEncodeMs";
+ case kStatsValueNameBucketDelay:
+ return "googBucketDelay";
+ case kStatsValueNameBandwidthLimitedResolution:
+ return "googBandwidthLimitedResolution";
+
+ // Candidate related attributes. Values are taken from
+ // http://w3c.github.io/webrtc-stats/#rtcstatstype-enum*.
+ case kStatsValueNameCandidateIPAddress:
+ return "ipAddress";
+ case kStatsValueNameCandidateNetworkType:
+ return "networkType";
+ case kStatsValueNameCandidatePortNumber:
+ return "portNumber";
+ case kStatsValueNameCandidatePriority:
+ return "priority";
+ case kStatsValueNameCandidateTransportType:
+ return "transport";
+ case kStatsValueNameCandidateType:
+ return "candidateType";
+
+ case kStatsValueNameChannelId:
+ return "googChannelId";
+ case kStatsValueNameCodecName:
+ return "googCodecName";
+ case kStatsValueNameComponent:
+ return "googComponent";
+ case kStatsValueNameContentName:
+ return "googContentName";
+ case kStatsValueNameCpuLimitedResolution:
+ return "googCpuLimitedResolution";
+ case kStatsValueNameDecodingCTSG:
+ return "googDecodingCTSG";
+ case kStatsValueNameDecodingCTN:
+ return "googDecodingCTN";
+ case kStatsValueNameDecodingNormal:
+ return "googDecodingNormal";
+ case kStatsValueNameDecodingPLC:
+ return "googDecodingPLC";
+ case kStatsValueNameDecodingCNG:
+ return "googDecodingCNG";
+ case kStatsValueNameDecodingPLCCNG:
+ return "googDecodingPLCCNG";
+ case kStatsValueNameDer:
+ return "googDerBase64";
+ case kStatsValueNameDtlsCipher:
+ return "dtlsCipher";
+ case kStatsValueNameEchoCancellationQualityMin:
+ return "googEchoCancellationQualityMin";
+ case kStatsValueNameEchoDelayMedian:
+ return "googEchoCancellationEchoDelayMedian";
+ case kStatsValueNameEchoDelayStdDev:
+ return "googEchoCancellationEchoDelayStdDev";
+ case kStatsValueNameEchoReturnLoss:
+ return "googEchoCancellationReturnLoss";
+ case kStatsValueNameEchoReturnLossEnhancement:
+ return "googEchoCancellationReturnLossEnhancement";
+ case kStatsValueNameEncodeUsagePercent:
+ return "googEncodeUsagePercent";
+ case kStatsValueNameExpandRate:
+ return "googExpandRate";
+ case kStatsValueNameFingerprint:
+ return "googFingerprint";
+ case kStatsValueNameFingerprintAlgorithm:
+ return "googFingerprintAlgorithm";
+ case kStatsValueNameFirsReceived:
+ return "googFirsReceived";
+ case kStatsValueNameFirsSent:
+ return "googFirsSent";
+ case kStatsValueNameFrameHeightInput:
+ return "googFrameHeightInput";
+ case kStatsValueNameFrameHeightReceived:
+ return "googFrameHeightReceived";
+ case kStatsValueNameFrameHeightSent:
+ return "googFrameHeightSent";
+ case kStatsValueNameFrameRateReceived:
+ return "googFrameRateReceived";
+ case kStatsValueNameFrameRateDecoded:
+ return "googFrameRateDecoded";
+ case kStatsValueNameFrameRateOutput:
+ return "googFrameRateOutput";
+ case kStatsValueNameDecodeMs:
+ return "googDecodeMs";
+ case kStatsValueNameMaxDecodeMs:
+ return "googMaxDecodeMs";
+ case kStatsValueNameCurrentDelayMs:
+ return "googCurrentDelayMs";
+ case kStatsValueNameTargetDelayMs:
+ return "googTargetDelayMs";
+ case kStatsValueNameJitterBufferMs:
+ return "googJitterBufferMs";
+ case kStatsValueNameMinPlayoutDelayMs:
+ return "googMinPlayoutDelayMs";
+ case kStatsValueNameRenderDelayMs:
+ return "googRenderDelayMs";
+ case kStatsValueNameCaptureStartNtpTimeMs:
+ return "googCaptureStartNtpTimeMs";
+ case kStatsValueNameFrameRateInput:
+ return "googFrameRateInput";
+ case kStatsValueNameFrameRateSent:
+ return "googFrameRateSent";
+ case kStatsValueNameFrameWidthInput:
+ return "googFrameWidthInput";
+ case kStatsValueNameFrameWidthReceived:
+ return "googFrameWidthReceived";
+ case kStatsValueNameFrameWidthSent:
+ return "googFrameWidthSent";
+ case kStatsValueNameInitiator:
+ return "googInitiator";
+ case kStatsValueNameIssuerId:
+ return "googIssuerId";
+ case kStatsValueNameJitterReceived:
+ return "googJitterReceived";
+ case kStatsValueNameLocalAddress:
+ return "googLocalAddress";
+ case kStatsValueNameLocalCandidateId:
+ return "localCandidateId";
+ case kStatsValueNameLocalCandidateType:
+ return "googLocalCandidateType";
+ case kStatsValueNameLocalCertificateId:
+ return "localCertificateId";
+ case kStatsValueNameAdaptationChanges:
+ return "googAdaptationChanges";
+ case kStatsValueNameNacksReceived:
+ return "googNacksReceived";
+ case kStatsValueNameNacksSent:
+ return "googNacksSent";
+ case kStatsValueNamePreemptiveExpandRate:
+ return "googPreemptiveExpandRate";
+ case kStatsValueNamePlisReceived:
+ return "googPlisReceived";
+ case kStatsValueNamePlisSent:
+ return "googPlisSent";
+ case kStatsValueNamePreferredJitterBufferMs:
+ return "googPreferredJitterBufferMs";
+ case kStatsValueNameReceiving:
+ return "googReadable";
+ case kStatsValueNameRemoteAddress:
+ return "googRemoteAddress";
+ case kStatsValueNameRemoteCandidateId:
+ return "remoteCandidateId";
+ case kStatsValueNameRemoteCandidateType:
+ return "googRemoteCandidateType";
+ case kStatsValueNameRemoteCertificateId:
+ return "remoteCertificateId";
+ case kStatsValueNameRetransmitBitrate:
+ return "googRetransmitBitrate";
+ case kStatsValueNameRtt:
+ return "googRtt";
+ case kStatsValueNameSecondaryDecodedRate:
+ return "googSecondaryDecodedRate";
+ case kStatsValueNameSendPacketsDiscarded:
+ return "packetsDiscardedOnSend";
+ case kStatsValueNameSpeechExpandRate:
+ return "googSpeechExpandRate";
+ case kStatsValueNameSrtpCipher:
+ return "srtpCipher";
+ case kStatsValueNameTargetEncBitrate:
+ return "googTargetEncBitrate";
+ case kStatsValueNameTransmitBitrate:
+ return "googTransmitBitrate";
+ case kStatsValueNameTransportType:
+ return "googTransportType";
+ case kStatsValueNameTrackId:
+ return "googTrackId";
+ case kStatsValueNameTypingNoiseState:
+ return "googTypingNoiseState";
+ case kStatsValueNameViewLimitedResolution:
+ return "googViewLimitedResolution";
+ case kStatsValueNameWritable:
+ return "googWritable";
+ default:
+ RTC_DCHECK(false);
+ break;
+ }
+
+ return nullptr;
+}
+
+std::string StatsReport::Value::ToString() const {
+ switch (type_) {
+ case kInt:
+ return rtc::ToString(value_.int_);
+ case kInt64:
+ return rtc::ToString(value_.int64_);
+ case kFloat:
+ return rtc::ToString(value_.float_);
+ case kStaticString:
+ return std::string(value_.static_string_);
+ case kString:
+ return *value_.string_;
+ case kBool:
+ return value_.bool_ ? "true" : "false";
+ case kId:
+ return (*value_.id_)->ToString();
+ }
+ RTC_NOTREACHED();
+ return std::string();
+}
+
+StatsReport::StatsReport(const Id& id) : id_(id), timestamp_(0.0) {
+ RTC_DCHECK(id_.get());
+}
+
+// static
+StatsReport::Id StatsReport::NewBandwidthEstimationId() {
+ return Id(new RefCountedObject<BandwidthEstimationId>());
+}
+
+// static
+StatsReport::Id StatsReport::NewTypedId(StatsType type, const std::string& id) {
+ return Id(new RefCountedObject<TypedId>(type, id));
+}
+
+// static
+StatsReport::Id StatsReport::NewTypedIntId(StatsType type, int id) {
+ return Id(new RefCountedObject<TypedIntId>(type, id));
+}
+
+// static
+StatsReport::Id StatsReport::NewIdWithDirection(
+ StatsType type, const std::string& id, StatsReport::Direction direction) {
+ return Id(new RefCountedObject<IdWithDirection>(type, id, direction));
+}
+
+// static
+StatsReport::Id StatsReport::NewCandidateId(bool local, const std::string& id) {
+ return Id(new RefCountedObject<CandidateId>(local, id));
+}
+
+// static
+StatsReport::Id StatsReport::NewComponentId(
+ const std::string& content_name, int component) {
+ return Id(new RefCountedObject<ComponentId>(content_name, component));
+}
+
+// static
+StatsReport::Id StatsReport::NewCandidatePairId(
+ const std::string& content_name, int component, int index) {
+ return Id(new RefCountedObject<CandidatePairId>(
+ content_name, component, index));
+}
+
+const char* StatsReport::TypeToString() const {
+ return InternalTypeToString(id_->type());
+}
+
+void StatsReport::AddString(StatsReport::StatsValueName name,
+ const std::string& value) {
+ const Value* found = FindValue(name);
+ if (!found || !(*found == value))
+ values_[name] = ValuePtr(new Value(name, value));
+}
+
+void StatsReport::AddString(StatsReport::StatsValueName name,
+ const char* value) {
+ const Value* found = FindValue(name);
+ if (!found || !(*found == value))
+ values_[name] = ValuePtr(new Value(name, value));
+}
+
+void StatsReport::AddInt64(StatsReport::StatsValueName name, int64_t value) {
+ const Value* found = FindValue(name);
+ if (!found || !(*found == value))
+ values_[name] = ValuePtr(new Value(name, value, Value::kInt64));
+}
+
+void StatsReport::AddInt(StatsReport::StatsValueName name, int value) {
+ const Value* found = FindValue(name);
+ if (!found || !(*found == static_cast<int64_t>(value)))
+ values_[name] = ValuePtr(new Value(name, value, Value::kInt));
+}
+
+void StatsReport::AddFloat(StatsReport::StatsValueName name, float value) {
+ const Value* found = FindValue(name);
+ if (!found || !(*found == value))
+ values_[name] = ValuePtr(new Value(name, value));
+}
+
+void StatsReport::AddBoolean(StatsReport::StatsValueName name, bool value) {
+ const Value* found = FindValue(name);
+ if (!found || !(*found == value))
+ values_[name] = ValuePtr(new Value(name, value));
+}
+
+void StatsReport::AddId(StatsReport::StatsValueName name,
+ const Id& value) {
+ const Value* found = FindValue(name);
+ if (!found || !(*found == value))
+ values_[name] = ValuePtr(new Value(name, value));
+}
+
+const StatsReport::Value* StatsReport::FindValue(StatsValueName name) const {
+ Values::const_iterator it = values_.find(name);
+ return it == values_.end() ? nullptr : it->second.get();
+}
+
+StatsCollection::StatsCollection() {
+}
+
+StatsCollection::~StatsCollection() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ for (auto* r : list_)
+ delete r;
+}
+
+StatsCollection::const_iterator StatsCollection::begin() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return list_.begin();
+}
+
+StatsCollection::const_iterator StatsCollection::end() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return list_.end();
+}
+
+size_t StatsCollection::size() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return list_.size();
+}
+
+StatsReport* StatsCollection::InsertNew(const StatsReport::Id& id) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(Find(id) == nullptr);
+ StatsReport* report = new StatsReport(id);
+ list_.push_back(report);
+ return report;
+}
+
+StatsReport* StatsCollection::FindOrAddNew(const StatsReport::Id& id) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ StatsReport* ret = Find(id);
+ return ret ? ret : InsertNew(id);
+}
+
+StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(id.get());
+ Container::iterator it = std::find_if(list_.begin(), list_.end(),
+ [&id](const StatsReport* r)->bool { return r->id()->Equals(id); });
+ if (it != end()) {
+ StatsReport* report = new StatsReport((*it)->id());
+ delete *it;
+ *it = report;
+ return report;
+ }
+ return InsertNew(id);
+}
+
+// Looks for a report with the given |id|. If one is not found, NULL
+// will be returned.
+StatsReport* StatsCollection::Find(const StatsReport::Id& id) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ Container::iterator it = std::find_if(list_.begin(), list_.end(),
+ [&id](const StatsReport* r)->bool { return r->id()->Equals(id); });
+ return it == list_.end() ? nullptr : *it;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/statstypes.h b/talk/app/webrtc/statstypes.h
new file mode 100644
index 0000000000..7fa9f3212d
--- /dev/null
+++ b/talk/app/webrtc/statstypes.h
@@ -0,0 +1,417 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains structures used for retrieving statistics from an ongoing
+// libjingle session.
+
+#ifndef TALK_APP_WEBRTC_STATSTYPES_H_
+#define TALK_APP_WEBRTC_STATSTYPES_H_
+
+#include <algorithm>
+#include <list>
+#include <map>
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/linked_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc {
+
+class StatsReport {
+ public:
+ // Indicates whether a track is for sending or receiving.
+ // Used in reports for audio/video tracks.
+ enum Direction {
+ kSend = 0,
+ kReceive,
+ };
+
+ enum StatsType {
+ // StatsReport types.
+ // A StatsReport of |type| = "googSession" contains overall information
+ // about the thing libjingle calls a session (which may contain one
+ // or more RTP sessions.
+ kStatsReportTypeSession,
+
+ // A StatsReport of |type| = "googTransport" contains information
+ // about a libjingle "transport".
+ kStatsReportTypeTransport,
+
+ // A StatsReport of |type| = "googComponent" contains information
+ // about a libjingle "channel" (typically, RTP or RTCP for a transport).
+ // This is intended to be the same thing as an ICE "Component".
+ kStatsReportTypeComponent,
+
+ // A StatsReport of |type| = "googCandidatePair" contains information
+ // about a libjingle "connection" - a single source/destination port pair.
+ // This is intended to be the same thing as an ICE "candidate pair".
+ kStatsReportTypeCandidatePair,
+
+ // A StatsReport of |type| = "VideoBWE" is statistics for video Bandwidth
+ // Estimation, which is global per-session. The |id| field is "bweforvideo"
+ // (will probably change in the future).
+ kStatsReportTypeBwe,
+
+ // A StatsReport of |type| = "ssrc" is statistics for a specific rtp stream.
+ // The |id| field is the SSRC in decimal form of the rtp stream.
+ kStatsReportTypeSsrc,
+
+ // A StatsReport of |type| = "remoteSsrc" is statistics for a specific
+ // rtp stream, generated by the remote end of the connection.
+ kStatsReportTypeRemoteSsrc,
+
+ // A StatsReport of |type| = "googTrack" is statistics for a specific media
+ // track. The |id| field is the track id.
+ kStatsReportTypeTrack,
+
+ // A StatsReport of |type| = "localcandidate" or "remotecandidate" is
+ // attributes on a specific ICE Candidate. It links to its connection pair
+ // by candidate id. The string value is taken from
+ // http://w3c.github.io/webrtc-stats/#rtcstatstype-enum*.
+ kStatsReportTypeIceLocalCandidate,
+ kStatsReportTypeIceRemoteCandidate,
+
+ // A StatsReport of |type| = "googCertificate" contains an SSL certificate
+ // transmitted by one of the endpoints of this connection. The |id| is
+ // controlled by the fingerprint, and is used to identify the certificate in
+ // the Channel stats (as "googLocalCertificateId" or
+ // "googRemoteCertificateId") and in any child certificates (as
+ // "googIssuerId").
+ kStatsReportTypeCertificate,
+
+ // A StatsReport of |type| = "datachannel" with statistics for a
+ // particular DataChannel.
+ kStatsReportTypeDataChannel,
+ };
+
+ enum StatsValueName {
+ kStatsValueNameActiveConnection,
+ kStatsValueNameAudioInputLevel,
+ kStatsValueNameAudioOutputLevel,
+ kStatsValueNameBytesReceived,
+ kStatsValueNameBytesSent,
+ kStatsValueNameDataChannelId,
+ kStatsValueNamePacketsLost,
+ kStatsValueNamePacketsReceived,
+ kStatsValueNamePacketsSent,
+ kStatsValueNameProtocol,
+ kStatsValueNameReceiving,
+ kStatsValueNameSelectedCandidatePairId,
+ kStatsValueNameSsrc,
+ kStatsValueNameState,
+ kStatsValueNameTransportId,
+
+ // Internal StatsValue names.
+ kStatsValueNameAccelerateRate,
+ kStatsValueNameActualEncBitrate,
+ kStatsValueNameAdaptationChanges,
+ kStatsValueNameAvailableReceiveBandwidth,
+ kStatsValueNameAvailableSendBandwidth,
+ kStatsValueNameAvgEncodeMs,
+ kStatsValueNameBandwidthLimitedResolution,
+ kStatsValueNameBucketDelay,
+ kStatsValueNameCaptureStartNtpTimeMs,
+ kStatsValueNameCandidateIPAddress,
+ kStatsValueNameCandidateNetworkType,
+ kStatsValueNameCandidatePortNumber,
+ kStatsValueNameCandidatePriority,
+ kStatsValueNameCandidateTransportType,
+ kStatsValueNameCandidateType,
+ kStatsValueNameChannelId,
+ kStatsValueNameCodecName,
+ kStatsValueNameComponent,
+ kStatsValueNameContentName,
+ kStatsValueNameCpuLimitedResolution,
+ kStatsValueNameCurrentDelayMs,
+ kStatsValueNameDecodeMs,
+ kStatsValueNameDecodingCNG,
+ kStatsValueNameDecodingCTN,
+ kStatsValueNameDecodingCTSG,
+ kStatsValueNameDecodingNormal,
+ kStatsValueNameDecodingPLC,
+ kStatsValueNameDecodingPLCCNG,
+ kStatsValueNameDer,
+ kStatsValueNameDtlsCipher,
+ kStatsValueNameEchoCancellationQualityMin,
+ kStatsValueNameEchoDelayMedian,
+ kStatsValueNameEchoDelayStdDev,
+ kStatsValueNameEchoReturnLoss,
+ kStatsValueNameEchoReturnLossEnhancement,
+ kStatsValueNameEncodeUsagePercent,
+ kStatsValueNameExpandRate,
+ kStatsValueNameFingerprint,
+ kStatsValueNameFingerprintAlgorithm,
+ kStatsValueNameFirsReceived,
+ kStatsValueNameFirsSent,
+ kStatsValueNameFrameHeightInput,
+ kStatsValueNameFrameHeightReceived,
+ kStatsValueNameFrameHeightSent,
+ kStatsValueNameFrameRateDecoded,
+ kStatsValueNameFrameRateInput,
+ kStatsValueNameFrameRateOutput,
+ kStatsValueNameFrameRateReceived,
+ kStatsValueNameFrameRateSent,
+ kStatsValueNameFrameWidthInput,
+ kStatsValueNameFrameWidthReceived,
+ kStatsValueNameFrameWidthSent,
+ kStatsValueNameInitiator,
+ kStatsValueNameIssuerId,
+ kStatsValueNameJitterBufferMs,
+ kStatsValueNameJitterReceived,
+ kStatsValueNameLabel,
+ kStatsValueNameLocalAddress,
+ kStatsValueNameLocalCandidateId,
+ kStatsValueNameLocalCandidateType,
+ kStatsValueNameLocalCertificateId,
+ kStatsValueNameMaxDecodeMs,
+ kStatsValueNameMinPlayoutDelayMs,
+ kStatsValueNameNacksReceived,
+ kStatsValueNameNacksSent,
+ kStatsValueNamePlisReceived,
+ kStatsValueNamePlisSent,
+ kStatsValueNamePreemptiveExpandRate,
+ kStatsValueNamePreferredJitterBufferMs,
+ kStatsValueNameRemoteAddress,
+ kStatsValueNameRemoteCandidateId,
+ kStatsValueNameRemoteCandidateType,
+ kStatsValueNameRemoteCertificateId,
+ kStatsValueNameRenderDelayMs,
+ kStatsValueNameRetransmitBitrate,
+ kStatsValueNameRtt,
+ kStatsValueNameSecondaryDecodedRate,
+ kStatsValueNameSendPacketsDiscarded,
+ kStatsValueNameSpeechExpandRate,
+ kStatsValueNameSrtpCipher,
+ kStatsValueNameTargetDelayMs,
+ kStatsValueNameTargetEncBitrate,
+ kStatsValueNameTrackId,
+ kStatsValueNameTransmitBitrate,
+ kStatsValueNameTransportType,
+ kStatsValueNameTypingNoiseState,
+ kStatsValueNameViewLimitedResolution,
+ kStatsValueNameWritable,
+ };
+
+ class IdBase : public rtc::RefCountInterface {
+ public:
+ ~IdBase() override;
+ StatsType type() const;
+
+ // Users of IdBase will be using the Id typedef, which is compatible with
+ // this Equals() function. It simply calls the protected (and overridden)
+ // Equals() method.
+ bool Equals(const rtc::scoped_refptr<IdBase>& other) const {
+ return Equals(*other.get());
+ }
+
+ virtual std::string ToString() const = 0;
+
+ protected:
+ // Protected since users of the IdBase type will be using the Id typedef.
+ virtual bool Equals(const IdBase& other) const;
+
+ IdBase(StatsType type); // Only meant for derived classes.
+ const StatsType type_;
+
+ static const char kSeparator = '_';
+ };
+
+ typedef rtc::scoped_refptr<IdBase> Id;
+
+ struct Value {
+ enum Type {
+ kInt, // int.
+ kInt64, // int64_t.
+ kFloat, // float.
+ kString, // std::string
+ kStaticString, // const char*.
+ kBool, // bool.
+ kId, // Id.
+ };
+
+ Value(StatsValueName name, int64_t value, Type int_type);
+ Value(StatsValueName name, float f);
+ Value(StatsValueName name, const std::string& value);
+ Value(StatsValueName name, const char* value);
+ Value(StatsValueName name, bool b);
+ Value(StatsValueName name, const Id& value);
+
+ ~Value();
+
+ // TODO(tommi): This compares name as well as value...
+ // I think we should only need to compare the value part and
+ // move the name part into a hash map.
+ bool Equals(const Value& other) const;
+
+ // Comparison operators. Return true iff the current instance is of the
+ // correct type and holds the same value. No conversion is performed so
+ // a string value of "123" is not equal to an int value of 123 and an int
+ // value of 123 is not equal to a float value of 123.0f.
+ // One exception to this is that types kInt and kInt64 can be compared and
+ // kString and kStaticString too.
+ bool operator==(const std::string& value) const;
+ bool operator==(const char* value) const;
+ bool operator==(int64_t value) const;
+ bool operator==(bool value) const;
+ bool operator==(float value) const;
+ bool operator==(const Id& value) const;
+
+ // Getters that allow getting the native value directly.
+ // The caller must know the type beforehand or else hit a check.
+ int int_val() const;
+ int64_t int64_val() const;
+ float float_val() const;
+ const char* static_string_val() const;
+ const std::string& string_val() const;
+ bool bool_val() const;
+ const Id& id_val() const;
+
+ // Returns the string representation of |name|.
+ const char* display_name() const;
+
+ // Converts the native value to a string representation of the value.
+ std::string ToString() const;
+
+ Type type() const { return type_; }
+
+ // TODO(tommi): Move |name| and |display_name| out of the Value struct.
+ const StatsValueName name;
+
+ private:
+ const Type type_;
+ // TODO(tommi): Use C++ 11 union and make value_ const.
+ union InternalType {
+ int int_;
+ int64_t int64_;
+ float float_;
+ bool bool_;
+ std::string* string_;
+ const char* static_string_;
+ Id* id_;
+ } value_;
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(Value);
+ };
+
+ // TODO(tommi): Consider using a similar approach to how we store Ids using
+ // scoped_refptr for values.
+ typedef rtc::linked_ptr<Value> ValuePtr;
+ typedef std::map<StatsValueName, ValuePtr> Values;
+
+ // Ownership of |id| is passed to |this|.
+ explicit StatsReport(const Id& id);
+
+ // Factory functions for various types of stats IDs.
+ static Id NewBandwidthEstimationId();
+ static Id NewTypedId(StatsType type, const std::string& id);
+ static Id NewTypedIntId(StatsType type, int id);
+ static Id NewIdWithDirection(
+ StatsType type, const std::string& id, Direction direction);
+ static Id NewCandidateId(bool local, const std::string& id);
+ static Id NewComponentId(
+ const std::string& content_name, int component);
+ static Id NewCandidatePairId(
+ const std::string& content_name, int component, int index);
+
+ const Id& id() const { return id_; }
+ StatsType type() const { return id_->type(); }
+ double timestamp() const { return timestamp_; }
+ void set_timestamp(double t) { timestamp_ = t; }
+ bool empty() const { return values_.empty(); }
+ const Values& values() const { return values_; }
+
+ const char* TypeToString() const;
+
+ void AddString(StatsValueName name, const std::string& value);
+ void AddString(StatsValueName name, const char* value);
+ void AddInt64(StatsValueName name, int64_t value);
+ void AddInt(StatsValueName name, int value);
+ void AddFloat(StatsValueName name, float value);
+ void AddBoolean(StatsValueName name, bool value);
+ void AddId(StatsValueName name, const Id& value);
+
+ const Value* FindValue(StatsValueName name) const;
+
+ private:
+ // The unique identifier for this object.
+ // This is used as a key for this report in ordered containers,
+ // so it must never be changed.
+ const Id id_;
+ double timestamp_; // Time since 1970-01-01T00:00:00Z in milliseconds.
+ Values values_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(StatsReport);
+};
+
+// Typedef for an array of const StatsReport pointers.
+// Ownership of the pointers held by this implementation is assumed to lie
+// elsewhere and lifetime guarantees are made by the implementation that uses
+// this type. In the StatsCollector, object ownership lies with the
+// StatsCollection class.
+typedef std::vector<const StatsReport*> StatsReports;
+
+// A map from the report id to the report.
+// This class wraps an STL container and provides a limited set of
+// functionality in order to keep things simple.
+class StatsCollection {
+ public:
+ StatsCollection();
+ ~StatsCollection();
+
+ typedef std::list<StatsReport*> Container;
+ typedef Container::iterator iterator;
+ typedef Container::const_iterator const_iterator;
+
+ const_iterator begin() const;
+ const_iterator end() const;
+ size_t size() const;
+
+ // Creates a new report object with |id| that does not already
+ // exist in the list of reports.
+ StatsReport* InsertNew(const StatsReport::Id& id);
+ StatsReport* FindOrAddNew(const StatsReport::Id& id);
+ StatsReport* ReplaceOrAddNew(const StatsReport::Id& id);
+
+ // Looks for a report with the given |id|. If one is not found, NULL
+ // will be returned.
+ StatsReport* Find(const StatsReport::Id& id);
+
+ private:
+ Container list_;
+ rtc::ThreadChecker thread_checker_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_STATSTYPES_H_
diff --git a/talk/app/webrtc/streamcollection.h b/talk/app/webrtc/streamcollection.h
new file mode 100644
index 0000000000..07a30a68c8
--- /dev/null
+++ b/talk/app/webrtc/streamcollection.h
@@ -0,0 +1,125 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_STREAMCOLLECTION_H_
+#define TALK_APP_WEBRTC_STREAMCOLLECTION_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+namespace webrtc {
+
+// Implementation of StreamCollection.
+class StreamCollection : public StreamCollectionInterface {
+ public:
+ static rtc::scoped_refptr<StreamCollection> Create() {
+ rtc::RefCountedObject<StreamCollection>* implementation =
+ new rtc::RefCountedObject<StreamCollection>();
+ return implementation;
+ }
+
+ static rtc::scoped_refptr<StreamCollection> Create(
+ StreamCollection* streams) {
+ rtc::RefCountedObject<StreamCollection>* implementation =
+ new rtc::RefCountedObject<StreamCollection>(streams);
+ return implementation;
+ }
+
+ virtual size_t count() {
+ return media_streams_.size();
+ }
+
+ virtual MediaStreamInterface* at(size_t index) {
+ return media_streams_.at(index);
+ }
+
+ virtual MediaStreamInterface* find(const std::string& label) {
+ for (StreamVector::iterator it = media_streams_.begin();
+ it != media_streams_.end(); ++it) {
+ if ((*it)->label().compare(label) == 0) {
+ return (*it);
+ }
+ }
+ return NULL;
+ }
+
+ virtual MediaStreamTrackInterface* FindAudioTrack(
+ const std::string& id) {
+ for (size_t i = 0; i < media_streams_.size(); ++i) {
+ MediaStreamTrackInterface* track = media_streams_[i]->FindAudioTrack(id);
+ if (track) {
+ return track;
+ }
+ }
+ return NULL;
+ }
+
+ virtual MediaStreamTrackInterface* FindVideoTrack(
+ const std::string& id) {
+ for (size_t i = 0; i < media_streams_.size(); ++i) {
+ MediaStreamTrackInterface* track = media_streams_[i]->FindVideoTrack(id);
+ if (track) {
+ return track;
+ }
+ }
+ return NULL;
+ }
+
+ void AddStream(MediaStreamInterface* stream) {
+ for (StreamVector::iterator it = media_streams_.begin();
+ it != media_streams_.end(); ++it) {
+ if ((*it)->label().compare(stream->label()) == 0)
+ return;
+ }
+ media_streams_.push_back(stream);
+ }
+
+ void RemoveStream(MediaStreamInterface* remove_stream) {
+ for (StreamVector::iterator it = media_streams_.begin();
+ it != media_streams_.end(); ++it) {
+ if ((*it)->label().compare(remove_stream->label()) == 0) {
+ media_streams_.erase(it);
+ break;
+ }
+ }
+ }
+
+ protected:
+ StreamCollection() {}
+ explicit StreamCollection(StreamCollection* original)
+ : media_streams_(original->media_streams_) {
+ }
+ typedef std::vector<rtc::scoped_refptr<MediaStreamInterface> >
+ StreamVector;
+ StreamVector media_streams_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_STREAMCOLLECTION_H_
diff --git a/talk/app/webrtc/test/fakeaudiocapturemodule.cc b/talk/app/webrtc/test/fakeaudiocapturemodule.cc
new file mode 100644
index 0000000000..3564d28d25
--- /dev/null
+++ b/talk/app/webrtc/test/fakeaudiocapturemodule.cc
@@ -0,0 +1,744 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/test/fakeaudiocapturemodule.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+// Audio sample value that is high enough that it doesn't occur naturally when
+// frames are being faked. E.g. NetEq will not generate this large sample value
+// unless it has received an audio frame containing a sample of this value.
+// Even simpler buffers would likely just contain audio sample values of 0.
+static const int kHighSampleValue = 10000;
+
+// Same value as src/modules/audio_device/main/source/audio_device_config.h in
+// https://code.google.com/p/webrtc/
+static const uint32_t kAdmMaxIdleTimeProcess = 1000;
+
+// Constants here are derived by running VoE using a real ADM.
+// The constants correspond to 10ms of mono audio at 44kHz.
+static const int kTimePerFrameMs = 10;
+static const uint8_t kNumberOfChannels = 1;
+static const int kSamplesPerSecond = 44000;
+static const int kTotalDelayMs = 0;
+static const int kClockDriftMs = 0;
+static const uint32_t kMaxVolume = 14392;
+
+enum {
+ MSG_START_PROCESS,
+ MSG_RUN_PROCESS,
+};
+
+FakeAudioCaptureModule::FakeAudioCaptureModule()
+ : last_process_time_ms_(0),
+ audio_callback_(nullptr),
+ recording_(false),
+ playing_(false),
+ play_is_initialized_(false),
+ rec_is_initialized_(false),
+ current_mic_level_(kMaxVolume),
+ started_(false),
+ next_frame_time_(0),
+ frames_received_(0) {
+}
+
+FakeAudioCaptureModule::~FakeAudioCaptureModule() {
+ if (process_thread_) {
+ process_thread_->Stop();
+ }
+}
+
+rtc::scoped_refptr<FakeAudioCaptureModule> FakeAudioCaptureModule::Create() {
+ rtc::scoped_refptr<FakeAudioCaptureModule> capture_module(
+ new rtc::RefCountedObject<FakeAudioCaptureModule>());
+ if (!capture_module->Initialize()) {
+ return nullptr;
+ }
+ return capture_module;
+}
+
+int FakeAudioCaptureModule::frames_received() const {
+ rtc::CritScope cs(&crit_);
+ return frames_received_;
+}
+
+int64_t FakeAudioCaptureModule::TimeUntilNextProcess() {
+ const uint32_t current_time = rtc::Time();
+ if (current_time < last_process_time_ms_) {
+ // TODO: wraparound could be handled more gracefully.
+ return 0;
+ }
+ const uint32_t elapsed_time = current_time - last_process_time_ms_;
+ if (kAdmMaxIdleTimeProcess < elapsed_time) {
+ return 0;
+ }
+ return kAdmMaxIdleTimeProcess - elapsed_time;
+}
+
+int32_t FakeAudioCaptureModule::Process() {
+ last_process_time_ms_ = rtc::Time();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::ActiveAudioLayer(
+ AudioLayer* /*audio_layer*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+webrtc::AudioDeviceModule::ErrorCode FakeAudioCaptureModule::LastError() const {
+ ASSERT(false);
+ return webrtc::AudioDeviceModule::kAdmErrNone;
+}
+
+int32_t FakeAudioCaptureModule::RegisterEventObserver(
+ webrtc::AudioDeviceObserver* /*event_callback*/) {
+ // Only used to report warnings and errors. This fake implementation won't
+ // generate any so discard this callback.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::RegisterAudioCallback(
+ webrtc::AudioTransport* audio_callback) {
+ rtc::CritScope cs(&crit_callback_);
+ audio_callback_ = audio_callback;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::Init() {
+ // Initialize is called by the factory method. Safe to ignore this Init call.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::Terminate() {
+ // Clean up in the destructor. No action here, just success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Initialized() const {
+ ASSERT(false);
+ return 0;
+}
+
+int16_t FakeAudioCaptureModule::PlayoutDevices() {
+ ASSERT(false);
+ return 0;
+}
+
+int16_t FakeAudioCaptureModule::RecordingDevices() {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutDeviceName(
+ uint16_t /*index*/,
+ char /*name*/[webrtc::kAdmMaxDeviceNameSize],
+ char /*guid*/[webrtc::kAdmMaxGuidSize]) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingDeviceName(
+ uint16_t /*index*/,
+ char /*name*/[webrtc::kAdmMaxDeviceNameSize],
+ char /*guid*/[webrtc::kAdmMaxGuidSize]) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutDevice(uint16_t /*index*/) {
+ // No playout device, just playing from file. Return success.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutDevice(WindowsDeviceType /*device*/) {
+ if (play_is_initialized_) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingDevice(uint16_t /*index*/) {
+ // No recording device, just dropping audio. Return success.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingDevice(
+ WindowsDeviceType /*device*/) {
+ if (rec_is_initialized_) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutIsAvailable(bool* /*available*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitPlayout() {
+ play_is_initialized_ = true;
+ return 0;
+}
+
+bool FakeAudioCaptureModule::PlayoutIsInitialized() const {
+ return play_is_initialized_;
+}
+
+int32_t FakeAudioCaptureModule::RecordingIsAvailable(bool* /*available*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitRecording() {
+ rec_is_initialized_ = true;
+ return 0;
+}
+
+bool FakeAudioCaptureModule::RecordingIsInitialized() const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StartPlayout() {
+ if (!play_is_initialized_) {
+ return -1;
+ }
+ {
+ rtc::CritScope cs(&crit_);
+ playing_ = true;
+ }
+ bool start = true;
+ UpdateProcessing(start);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopPlayout() {
+ bool start = false;
+ {
+ rtc::CritScope cs(&crit_);
+ playing_ = false;
+ start = ShouldStartProcessing();
+ }
+ UpdateProcessing(start);
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Playing() const {
+ rtc::CritScope cs(&crit_);
+ return playing_;
+}
+
+int32_t FakeAudioCaptureModule::StartRecording() {
+ if (!rec_is_initialized_) {
+ return -1;
+ }
+ {
+ rtc::CritScope cs(&crit_);
+ recording_ = true;
+ }
+ bool start = true;
+ UpdateProcessing(start);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopRecording() {
+ bool start = false;
+ {
+ rtc::CritScope cs(&crit_);
+ recording_ = false;
+ start = ShouldStartProcessing();
+ }
+ UpdateProcessing(start);
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Recording() const {
+ rtc::CritScope cs(&crit_);
+ return recording_;
+}
+
+int32_t FakeAudioCaptureModule::SetAGC(bool /*enable*/) {
+ // No AGC but not needed since audio is pregenerated. Return success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::AGC() const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetWaveOutVolume(uint16_t /*volume_left*/,
+ uint16_t /*volume_right*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::WaveOutVolume(
+ uint16_t* /*volume_left*/,
+ uint16_t* /*volume_right*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitSpeaker() {
+ // No speaker, just playing from file. Return success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::SpeakerIsInitialized() const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitMicrophone() {
+ // No microphone, just playing from file. Return success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::MicrophoneIsInitialized() const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolumeIsAvailable(bool* /*available*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetSpeakerVolume(uint32_t /*volume*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolume(uint32_t* /*volume*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MaxSpeakerVolume(
+ uint32_t* /*max_volume*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MinSpeakerVolume(
+ uint32_t* /*min_volume*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolumeStepSize(
+ uint16_t* /*step_size*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolumeIsAvailable(
+ bool* /*available*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneVolume(uint32_t volume) {
+ rtc::CritScope cs(&crit_);
+ current_mic_level_ = volume;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolume(uint32_t* volume) const {
+ rtc::CritScope cs(&crit_);
+ *volume = current_mic_level_;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MaxMicrophoneVolume(
+ uint32_t* max_volume) const {
+ *max_volume = kMaxVolume;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MinMicrophoneVolume(
+ uint32_t* /*min_volume*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolumeStepSize(
+ uint16_t* /*step_size*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerMuteIsAvailable(bool* /*available*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetSpeakerMute(bool /*enable*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerMute(bool* /*enabled*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneMuteIsAvailable(bool* /*available*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneMute(bool /*enable*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneMute(bool* /*enabled*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneBoostIsAvailable(
+ bool* /*available*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneBoost(bool /*enable*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneBoost(bool* /*enabled*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoPlayoutIsAvailable(
+ bool* available) const {
+ // No recording device, just dropping audio. Stereo can be dropped just
+ // as easily as mono.
+ *available = true;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetStereoPlayout(bool /*enable*/) {
+ // No recording device, just dropping audio. Stereo can be dropped just
+ // as easily as mono.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoPlayout(bool* /*enabled*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoRecordingIsAvailable(
+ bool* available) const {
+ // Keep thing simple. No stereo recording.
+ *available = false;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetStereoRecording(bool enable) {
+ if (!enable) {
+ return 0;
+ }
+ return -1;
+}
+
+int32_t FakeAudioCaptureModule::StereoRecording(bool* /*enabled*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingChannel(
+ const ChannelType channel) {
+ if (channel != AudioDeviceModule::kChannelBoth) {
+ // There is no right or left in mono. I.e. kChannelBoth should be used for
+ // mono.
+ ASSERT(false);
+ return -1;
+ }
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingChannel(ChannelType* channel) const {
+ // Stereo recording not supported. However, WebRTC ADM returns kChannelBoth
+ // in that case. Do the same here.
+ *channel = AudioDeviceModule::kChannelBoth;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutBuffer(const BufferType /*type*/,
+ uint16_t /*size_ms*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutBuffer(BufferType* /*type*/,
+ uint16_t* /*size_ms*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutDelay(uint16_t* delay_ms) const {
+ // No delay since audio frames are dropped.
+ *delay_ms = 0;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingDelay(uint16_t* /*delay_ms*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::CPULoad(uint16_t* /*load*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StartRawOutputFileRecording(
+ const char /*pcm_file_name_utf8*/[webrtc::kAdmMaxFileNameSize]) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopRawOutputFileRecording() {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StartRawInputFileRecording(
+ const char /*pcm_file_name_utf8*/[webrtc::kAdmMaxFileNameSize]) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopRawInputFileRecording() {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingSampleRate(
+ const uint32_t /*samples_per_sec*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingSampleRate(
+ uint32_t* /*samples_per_sec*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutSampleRate(
+ const uint32_t /*samples_per_sec*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutSampleRate(
+ uint32_t* /*samples_per_sec*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::ResetAudioDevice() {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetLoudspeakerStatus(bool /*enable*/) {
+ ASSERT(false);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::GetLoudspeakerStatus(bool* /*enabled*/) const {
+ ASSERT(false);
+ return 0;
+}
+
+void FakeAudioCaptureModule::OnMessage(rtc::Message* msg) {
+ switch (msg->message_id) {
+ case MSG_START_PROCESS:
+ StartProcessP();
+ break;
+ case MSG_RUN_PROCESS:
+ ProcessFrameP();
+ break;
+ default:
+ // All existing messages should be caught. Getting here should never
+ // happen.
+ ASSERT(false);
+ }
+}
+
+bool FakeAudioCaptureModule::Initialize() {
+ // Set the send buffer samples high enough that it would not occur on the
+ // remote side unless a packet containing a sample of that magnitude has been
+ // sent to it. Note that the audio processing pipeline will likely distort the
+ // original signal.
+ SetSendBuffer(kHighSampleValue);
+ last_process_time_ms_ = rtc::Time();
+ return true;
+}
+
+void FakeAudioCaptureModule::SetSendBuffer(int value) {
+ Sample* buffer_ptr = reinterpret_cast<Sample*>(send_buffer_);
+ const size_t buffer_size_in_samples =
+ sizeof(send_buffer_) / kNumberBytesPerSample;
+ for (size_t i = 0; i < buffer_size_in_samples; ++i) {
+ buffer_ptr[i] = value;
+ }
+}
+
+void FakeAudioCaptureModule::ResetRecBuffer() {
+ memset(rec_buffer_, 0, sizeof(rec_buffer_));
+}
+
+bool FakeAudioCaptureModule::CheckRecBuffer(int value) {
+ const Sample* buffer_ptr = reinterpret_cast<const Sample*>(rec_buffer_);
+ const size_t buffer_size_in_samples =
+ sizeof(rec_buffer_) / kNumberBytesPerSample;
+ for (size_t i = 0; i < buffer_size_in_samples; ++i) {
+ if (buffer_ptr[i] >= value) return true;
+ }
+ return false;
+}
+
+bool FakeAudioCaptureModule::ShouldStartProcessing() {
+ return recording_ || playing_;
+}
+
+void FakeAudioCaptureModule::UpdateProcessing(bool start) {
+ if (start) {
+ if (!process_thread_) {
+ process_thread_.reset(new rtc::Thread());
+ process_thread_->Start();
+ }
+ process_thread_->Post(this, MSG_START_PROCESS);
+ } else {
+ if (process_thread_) {
+ process_thread_->Stop();
+ process_thread_.reset(nullptr);
+ }
+ started_ = false;
+ }
+}
+
+void FakeAudioCaptureModule::StartProcessP() {
+ ASSERT(process_thread_->IsCurrent());
+ if (started_) {
+ // Already started.
+ return;
+ }
+ ProcessFrameP();
+}
+
+void FakeAudioCaptureModule::ProcessFrameP() {
+ ASSERT(process_thread_->IsCurrent());
+ if (!started_) {
+ next_frame_time_ = rtc::Time();
+ started_ = true;
+ }
+
+ {
+ rtc::CritScope cs(&crit_);
+ // Receive and send frames every kTimePerFrameMs.
+ if (playing_) {
+ ReceiveFrameP();
+ }
+ if (recording_) {
+ SendFrameP();
+ }
+ }
+
+ next_frame_time_ += kTimePerFrameMs;
+ const uint32_t current_time = rtc::Time();
+ const uint32_t wait_time =
+ (next_frame_time_ > current_time) ? next_frame_time_ - current_time : 0;
+ process_thread_->PostDelayed(wait_time, this, MSG_RUN_PROCESS);
+}
+
+void FakeAudioCaptureModule::ReceiveFrameP() {
+ ASSERT(process_thread_->IsCurrent());
+ {
+ rtc::CritScope cs(&crit_callback_);
+ if (!audio_callback_) {
+ return;
+ }
+ ResetRecBuffer();
+ size_t nSamplesOut = 0;
+ int64_t elapsed_time_ms = 0;
+ int64_t ntp_time_ms = 0;
+ if (audio_callback_->NeedMorePlayData(kNumberSamples, kNumberBytesPerSample,
+ kNumberOfChannels, kSamplesPerSecond,
+ rec_buffer_, nSamplesOut,
+ &elapsed_time_ms, &ntp_time_ms) != 0) {
+ ASSERT(false);
+ }
+ ASSERT(nSamplesOut == kNumberSamples);
+ }
+ // The SetBuffer() function ensures that after decoding, the audio buffer
+ // should contain samples of similar magnitude (there is likely to be some
+ // distortion due to the audio pipeline). If one sample is detected to
+ // have the same or greater magnitude somewhere in the frame, an actual frame
+ // has been received from the remote side (i.e. faked frames are not being
+ // pulled).
+ if (CheckRecBuffer(kHighSampleValue)) {
+ rtc::CritScope cs(&crit_);
+ ++frames_received_;
+ }
+}
+
+void FakeAudioCaptureModule::SendFrameP() {
+ ASSERT(process_thread_->IsCurrent());
+ rtc::CritScope cs(&crit_callback_);
+ if (!audio_callback_) {
+ return;
+ }
+ bool key_pressed = false;
+ uint32_t current_mic_level = 0;
+ MicrophoneVolume(&current_mic_level);
+ if (audio_callback_->RecordedDataIsAvailable(send_buffer_, kNumberSamples,
+ kNumberBytesPerSample,
+ kNumberOfChannels,
+ kSamplesPerSecond, kTotalDelayMs,
+ kClockDriftMs, current_mic_level,
+ key_pressed,
+ current_mic_level) != 0) {
+ ASSERT(false);
+ }
+ SetMicrophoneVolume(current_mic_level);
+}
+
diff --git a/talk/app/webrtc/test/fakeaudiocapturemodule.h b/talk/app/webrtc/test/fakeaudiocapturemodule.h
new file mode 100644
index 0000000000..4284b9ed51
--- /dev/null
+++ b/talk/app/webrtc/test/fakeaudiocapturemodule.h
@@ -0,0 +1,287 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This class implements an AudioCaptureModule that can be used to detect if
+// audio is being received properly if it is fed by another AudioCaptureModule
+// in some arbitrary audio pipeline where they are connected. It does not play
+// out or record any audio so it does not need access to any hardware and can
+// therefore be used in the gtest testing framework.
+
+// Note P postfix of a function indicates that it should only be called by the
+// processing thread.
+
+#ifndef TALK_APP_WEBRTC_TEST_FAKEAUDIOCAPTUREMODULE_H_
+#define TALK_APP_WEBRTC_TEST_FAKEAUDIOCAPTUREMODULE_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_device/include/audio_device.h"
+
+namespace rtc {
+class Thread;
+} // namespace rtc
+
+class FakeAudioCaptureModule
+ : public webrtc::AudioDeviceModule,
+ public rtc::MessageHandler {
+ public:
+ typedef uint16_t Sample;
+
+ // The value for the following constants have been derived by running VoE
+ // using a real ADM. The constants correspond to 10ms of mono audio at 44kHz.
+ static const size_t kNumberSamples = 440;
+ static const size_t kNumberBytesPerSample = sizeof(Sample);
+
+ // Creates a FakeAudioCaptureModule or returns NULL on failure.
+ static rtc::scoped_refptr<FakeAudioCaptureModule> Create();
+
+ // Returns the number of frames that have been successfully pulled by the
+ // instance. Note that correctly detecting success can only be done if the
+ // pulled frame was generated/pushed from a FakeAudioCaptureModule.
+ int frames_received() const;
+
+ // Following functions are inherited from webrtc::AudioDeviceModule.
+ // Only functions called by PeerConnection are implemented, the rest do
+ // nothing and return success. If a function is not expected to be called by
+ // PeerConnection an assertion is triggered if it is in fact called.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ int32_t ActiveAudioLayer(AudioLayer* audio_layer) const override;
+
+ ErrorCode LastError() const override;
+ int32_t RegisterEventObserver(
+ webrtc::AudioDeviceObserver* event_callback) override;
+
+ // Note: Calling this method from a callback may result in deadlock.
+ int32_t RegisterAudioCallback(
+ webrtc::AudioTransport* audio_callback) override;
+
+ int32_t Init() override;
+ int32_t Terminate() override;
+ bool Initialized() const override;
+
+ int16_t PlayoutDevices() override;
+ int16_t RecordingDevices() override;
+ int32_t PlayoutDeviceName(uint16_t index,
+ char name[webrtc::kAdmMaxDeviceNameSize],
+ char guid[webrtc::kAdmMaxGuidSize]) override;
+ int32_t RecordingDeviceName(uint16_t index,
+ char name[webrtc::kAdmMaxDeviceNameSize],
+ char guid[webrtc::kAdmMaxGuidSize]) override;
+
+ int32_t SetPlayoutDevice(uint16_t index) override;
+ int32_t SetPlayoutDevice(WindowsDeviceType device) override;
+ int32_t SetRecordingDevice(uint16_t index) override;
+ int32_t SetRecordingDevice(WindowsDeviceType device) override;
+
+ int32_t PlayoutIsAvailable(bool* available) override;
+ int32_t InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+ int32_t RecordingIsAvailable(bool* available) override;
+ int32_t InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int32_t StartPlayout() override;
+ int32_t StopPlayout() override;
+ bool Playing() const override;
+ int32_t StartRecording() override;
+ int32_t StopRecording() override;
+ bool Recording() const override;
+
+ int32_t SetAGC(bool enable) override;
+ bool AGC() const override;
+
+ int32_t SetWaveOutVolume(uint16_t volume_left,
+ uint16_t volume_right) override;
+ int32_t WaveOutVolume(uint16_t* volume_left,
+ uint16_t* volume_right) const override;
+
+ int32_t InitSpeaker() override;
+ bool SpeakerIsInitialized() const override;
+ int32_t InitMicrophone() override;
+ bool MicrophoneIsInitialized() const override;
+
+ int32_t SpeakerVolumeIsAvailable(bool* available) override;
+ int32_t SetSpeakerVolume(uint32_t volume) override;
+ int32_t SpeakerVolume(uint32_t* volume) const override;
+ int32_t MaxSpeakerVolume(uint32_t* max_volume) const override;
+ int32_t MinSpeakerVolume(uint32_t* min_volume) const override;
+ int32_t SpeakerVolumeStepSize(uint16_t* step_size) const override;
+
+ int32_t MicrophoneVolumeIsAvailable(bool* available) override;
+ int32_t SetMicrophoneVolume(uint32_t volume) override;
+ int32_t MicrophoneVolume(uint32_t* volume) const override;
+ int32_t MaxMicrophoneVolume(uint32_t* max_volume) const override;
+
+ int32_t MinMicrophoneVolume(uint32_t* min_volume) const override;
+ int32_t MicrophoneVolumeStepSize(uint16_t* step_size) const override;
+
+ int32_t SpeakerMuteIsAvailable(bool* available) override;
+ int32_t SetSpeakerMute(bool enable) override;
+ int32_t SpeakerMute(bool* enabled) const override;
+
+ int32_t MicrophoneMuteIsAvailable(bool* available) override;
+ int32_t SetMicrophoneMute(bool enable) override;
+ int32_t MicrophoneMute(bool* enabled) const override;
+
+ int32_t MicrophoneBoostIsAvailable(bool* available) override;
+ int32_t SetMicrophoneBoost(bool enable) override;
+ int32_t MicrophoneBoost(bool* enabled) const override;
+
+ int32_t StereoPlayoutIsAvailable(bool* available) const override;
+ int32_t SetStereoPlayout(bool enable) override;
+ int32_t StereoPlayout(bool* enabled) const override;
+ int32_t StereoRecordingIsAvailable(bool* available) const override;
+ int32_t SetStereoRecording(bool enable) override;
+ int32_t StereoRecording(bool* enabled) const override;
+ int32_t SetRecordingChannel(const ChannelType channel) override;
+ int32_t RecordingChannel(ChannelType* channel) const override;
+
+ int32_t SetPlayoutBuffer(const BufferType type,
+ uint16_t size_ms = 0) override;
+ int32_t PlayoutBuffer(BufferType* type, uint16_t* size_ms) const override;
+ int32_t PlayoutDelay(uint16_t* delay_ms) const override;
+ int32_t RecordingDelay(uint16_t* delay_ms) const override;
+
+ int32_t CPULoad(uint16_t* load) const override;
+
+ int32_t StartRawOutputFileRecording(
+ const char pcm_file_name_utf8[webrtc::kAdmMaxFileNameSize]) override;
+ int32_t StopRawOutputFileRecording() override;
+ int32_t StartRawInputFileRecording(
+ const char pcm_file_name_utf8[webrtc::kAdmMaxFileNameSize]) override;
+ int32_t StopRawInputFileRecording() override;
+
+ int32_t SetRecordingSampleRate(const uint32_t samples_per_sec) override;
+ int32_t RecordingSampleRate(uint32_t* samples_per_sec) const override;
+ int32_t SetPlayoutSampleRate(const uint32_t samples_per_sec) override;
+ int32_t PlayoutSampleRate(uint32_t* samples_per_sec) const override;
+
+ int32_t ResetAudioDevice() override;
+ int32_t SetLoudspeakerStatus(bool enable) override;
+ int32_t GetLoudspeakerStatus(bool* enabled) const override;
+ virtual bool BuiltInAECIsAvailable() const { return false; }
+ virtual int32_t EnableBuiltInAEC(bool enable) { return -1; }
+ virtual bool BuiltInAGCIsAvailable() const { return false; }
+ virtual int32_t EnableBuiltInAGC(bool enable) { return -1; }
+ virtual bool BuiltInNSIsAvailable() const { return false; }
+ virtual int32_t EnableBuiltInNS(bool enable) { return -1; }
+ // End of functions inherited from webrtc::AudioDeviceModule.
+
+ // The following function is inherited from rtc::MessageHandler.
+ void OnMessage(rtc::Message* msg) override;
+
+ protected:
+ // The constructor is protected because the class needs to be created as a
+ // reference counted object (for memory managment reasons). It could be
+ // exposed in which case the burden of proper instantiation would be put on
+ // the creator of a FakeAudioCaptureModule instance. To create an instance of
+ // this class use the Create(..) API.
+ explicit FakeAudioCaptureModule();
+ // The destructor is protected because it is reference counted and should not
+ // be deleted directly.
+ virtual ~FakeAudioCaptureModule();
+
+ private:
+ // Initializes the state of the FakeAudioCaptureModule. This API is called on
+ // creation by the Create() API.
+ bool Initialize();
+ // SetBuffer() sets all samples in send_buffer_ to |value|.
+ void SetSendBuffer(int value);
+ // Resets rec_buffer_. I.e., sets all rec_buffer_ samples to 0.
+ void ResetRecBuffer();
+ // Returns true if rec_buffer_ contains one or more sample greater than or
+ // equal to |value|.
+ bool CheckRecBuffer(int value);
+
+ // Returns true/false depending on if recording or playback has been
+ // enabled/started.
+ bool ShouldStartProcessing();
+
+ // Starts or stops the pushing and pulling of audio frames.
+ void UpdateProcessing(bool start);
+
+ // Starts the periodic calling of ProcessFrame() in a thread safe way.
+ void StartProcessP();
+ // Periodcally called function that ensures that frames are pulled and pushed
+ // periodically if enabled/started.
+ void ProcessFrameP();
+ // Pulls frames from the registered webrtc::AudioTransport.
+ void ReceiveFrameP();
+ // Pushes frames to the registered webrtc::AudioTransport.
+ void SendFrameP();
+
+ // The time in milliseconds when Process() was last called or 0 if no call
+ // has been made.
+ uint32_t last_process_time_ms_;
+
+ // Callback for playout and recording.
+ webrtc::AudioTransport* audio_callback_;
+
+ bool recording_; // True when audio is being pushed from the instance.
+ bool playing_; // True when audio is being pulled by the instance.
+
+ bool play_is_initialized_; // True when the instance is ready to pull audio.
+ bool rec_is_initialized_; // True when the instance is ready to push audio.
+
+ // Input to and output from RecordedDataIsAvailable(..) makes it possible to
+ // modify the current mic level. The implementation does not care about the
+ // mic level so it just feeds back what it receives.
+ uint32_t current_mic_level_;
+
+ // next_frame_time_ is updated in a non-drifting manner to indicate the next
+ // wall clock time the next frame should be generated and received. started_
+ // ensures that next_frame_time_ can be initialized properly on first call.
+ bool started_;
+ uint32_t next_frame_time_;
+
+ rtc::scoped_ptr<rtc::Thread> process_thread_;
+
+ // Buffer for storing samples received from the webrtc::AudioTransport.
+ char rec_buffer_[kNumberSamples * kNumberBytesPerSample];
+ // Buffer for samples to send to the webrtc::AudioTransport.
+ char send_buffer_[kNumberSamples * kNumberBytesPerSample];
+
+ // Counter of frames received that have samples of high enough amplitude to
+ // indicate that the frames are not faked somewhere in the audio pipeline
+ // (e.g. by a jitter buffer).
+ int frames_received_;
+
+ // Protects variables that are accessed from process_thread_ and
+ // the main thread.
+ mutable rtc::CriticalSection crit_;
+ // Protects |audio_callback_| that is accessed from process_thread_ and
+ // the main thread.
+ rtc::CriticalSection crit_callback_;
+};
+
+#endif // TALK_APP_WEBRTC_TEST_FAKEAUDIOCAPTUREMODULE_H_
diff --git a/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc b/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc
new file mode 100644
index 0000000000..e2dc12375b
--- /dev/null
+++ b/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc
@@ -0,0 +1,216 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/test/fakeaudiocapturemodule.h"
+
+#include <algorithm>
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread.h"
+
+using std::min;
+
+class FakeAdmTest : public testing::Test,
+ public webrtc::AudioTransport {
+ protected:
+ static const int kMsInSecond = 1000;
+
+ FakeAdmTest()
+ : push_iterations_(0),
+ pull_iterations_(0),
+ rec_buffer_bytes_(0) {
+ memset(rec_buffer_, 0, sizeof(rec_buffer_));
+ }
+
+ virtual void SetUp() {
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+ EXPECT_TRUE(fake_audio_capture_module_.get() != NULL);
+ }
+
+ // Callbacks inherited from webrtc::AudioTransport.
+ // ADM is pushing data.
+ int32_t RecordedDataIsAvailable(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) override {
+ rtc::CritScope cs(&crit_);
+ rec_buffer_bytes_ = nSamples * nBytesPerSample;
+ if ((rec_buffer_bytes_ == 0) ||
+ (rec_buffer_bytes_ > FakeAudioCaptureModule::kNumberSamples *
+ FakeAudioCaptureModule::kNumberBytesPerSample)) {
+ ADD_FAILURE();
+ return -1;
+ }
+ memcpy(rec_buffer_, audioSamples, rec_buffer_bytes_);
+ ++push_iterations_;
+ newMicLevel = currentMicLevel;
+ return 0;
+ }
+
+ // ADM is pulling data.
+ int32_t NeedMorePlayData(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {
+ rtc::CritScope cs(&crit_);
+ ++pull_iterations_;
+ const size_t audio_buffer_size = nSamples * nBytesPerSample;
+ const size_t bytes_out = RecordedDataReceived() ?
+ CopyFromRecBuffer(audioSamples, audio_buffer_size):
+ GenerateZeroBuffer(audioSamples, audio_buffer_size);
+ nSamplesOut = bytes_out / nBytesPerSample;
+ *elapsed_time_ms = 0;
+ *ntp_time_ms = 0;
+ return 0;
+ }
+
+ int push_iterations() const {
+ rtc::CritScope cs(&crit_);
+ return push_iterations_;
+ }
+ int pull_iterations() const {
+ rtc::CritScope cs(&crit_);
+ return pull_iterations_;
+ }
+
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+
+ private:
+ bool RecordedDataReceived() const {
+ return rec_buffer_bytes_ != 0;
+ }
+ size_t GenerateZeroBuffer(void* audio_buffer, size_t audio_buffer_size) {
+ memset(audio_buffer, 0, audio_buffer_size);
+ return audio_buffer_size;
+ }
+ size_t CopyFromRecBuffer(void* audio_buffer, size_t audio_buffer_size) {
+ EXPECT_EQ(audio_buffer_size, rec_buffer_bytes_);
+ const size_t min_buffer_size = min(audio_buffer_size, rec_buffer_bytes_);
+ memcpy(audio_buffer, rec_buffer_, min_buffer_size);
+ return min_buffer_size;
+ }
+
+ mutable rtc::CriticalSection crit_;
+
+ int push_iterations_;
+ int pull_iterations_;
+
+ char rec_buffer_[FakeAudioCaptureModule::kNumberSamples *
+ FakeAudioCaptureModule::kNumberBytesPerSample];
+ size_t rec_buffer_bytes_;
+};
+
+TEST_F(FakeAdmTest, TestProccess) {
+ // Next process call must be some time in the future (or now).
+ EXPECT_LE(0, fake_audio_capture_module_->TimeUntilNextProcess());
+ // Process call updates TimeUntilNextProcess() but there are no guarantees on
+ // timing so just check that Process can ba called successfully.
+ EXPECT_LE(0, fake_audio_capture_module_->Process());
+}
+
+TEST_F(FakeAdmTest, PlayoutTest) {
+ EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+ bool stereo_available = false;
+ EXPECT_EQ(0,
+ fake_audio_capture_module_->StereoPlayoutIsAvailable(
+ &stereo_available));
+ EXPECT_TRUE(stereo_available);
+
+ EXPECT_NE(0, fake_audio_capture_module_->StartPlayout());
+ EXPECT_FALSE(fake_audio_capture_module_->PlayoutIsInitialized());
+ EXPECT_FALSE(fake_audio_capture_module_->Playing());
+ EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitPlayout());
+ EXPECT_TRUE(fake_audio_capture_module_->PlayoutIsInitialized());
+ EXPECT_FALSE(fake_audio_capture_module_->Playing());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StartPlayout());
+ EXPECT_TRUE(fake_audio_capture_module_->Playing());
+
+ uint16_t delay_ms = 10;
+ EXPECT_EQ(0, fake_audio_capture_module_->PlayoutDelay(&delay_ms));
+ EXPECT_EQ(0, delay_ms);
+
+ EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond);
+ EXPECT_GE(0, push_iterations());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+ EXPECT_FALSE(fake_audio_capture_module_->Playing());
+}
+
+TEST_F(FakeAdmTest, RecordTest) {
+ EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+ bool stereo_available = false;
+ EXPECT_EQ(0, fake_audio_capture_module_->StereoRecordingIsAvailable(
+ &stereo_available));
+ EXPECT_FALSE(stereo_available);
+
+ EXPECT_NE(0, fake_audio_capture_module_->StartRecording());
+ EXPECT_FALSE(fake_audio_capture_module_->Recording());
+ EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitRecording());
+ EXPECT_EQ(0, fake_audio_capture_module_->StartRecording());
+ EXPECT_TRUE(fake_audio_capture_module_->Recording());
+
+ EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond);
+ EXPECT_GE(0, pull_iterations());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+ EXPECT_FALSE(fake_audio_capture_module_->Recording());
+}
+
+TEST_F(FakeAdmTest, DuplexTest) {
+ EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitPlayout());
+ EXPECT_EQ(0, fake_audio_capture_module_->StartPlayout());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitRecording());
+ EXPECT_EQ(0, fake_audio_capture_module_->StartRecording());
+
+ EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond);
+ EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond);
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+ EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+}
diff --git a/talk/app/webrtc/test/fakeconstraints.h b/talk/app/webrtc/test/fakeconstraints.h
new file mode 100644
index 0000000000..8673d85097
--- /dev/null
+++ b/talk/app/webrtc/test/fakeconstraints.h
@@ -0,0 +1,133 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_TEST_FAKECONSTRAINTS_H_
+#define TALK_APP_WEBRTC_TEST_FAKECONSTRAINTS_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "webrtc/base/stringencode.h"
+
+namespace webrtc {
+
+class FakeConstraints : public webrtc::MediaConstraintsInterface {
+ public:
+ FakeConstraints() { }
+ virtual ~FakeConstraints() { }
+
+ virtual const Constraints& GetMandatory() const {
+ return mandatory_;
+ }
+
+ virtual const Constraints& GetOptional() const {
+ return optional_;
+ }
+
+ template <class T>
+ void AddMandatory(const std::string& key, const T& value) {
+ mandatory_.push_back(Constraint(key, rtc::ToString<T>(value)));
+ }
+
+ template <class T>
+ void SetMandatory(const std::string& key, const T& value) {
+ std::string value_str;
+ if (mandatory_.FindFirst(key, &value_str)) {
+ for (Constraints::iterator iter = mandatory_.begin();
+ iter != mandatory_.end(); ++iter) {
+ if (iter->key == key) {
+ mandatory_.erase(iter);
+ break;
+ }
+ }
+ }
+ mandatory_.push_back(Constraint(key, rtc::ToString<T>(value)));
+ }
+
+ template <class T>
+ void AddOptional(const std::string& key, const T& value) {
+ optional_.push_back(Constraint(key, rtc::ToString<T>(value)));
+ }
+
+ void SetMandatoryMinAspectRatio(double ratio) {
+ SetMandatory(MediaConstraintsInterface::kMinAspectRatio, ratio);
+ }
+
+ void SetMandatoryMinWidth(int width) {
+ SetMandatory(MediaConstraintsInterface::kMinWidth, width);
+ }
+
+ void SetMandatoryMinHeight(int height) {
+ SetMandatory(MediaConstraintsInterface::kMinHeight, height);
+ }
+
+ void SetOptionalMaxWidth(int width) {
+ AddOptional(MediaConstraintsInterface::kMaxWidth, width);
+ }
+
+ void SetMandatoryMaxFrameRate(int frame_rate) {
+ SetMandatory(MediaConstraintsInterface::kMaxFrameRate, frame_rate);
+ }
+
+ void SetMandatoryReceiveAudio(bool enable) {
+ SetMandatory(MediaConstraintsInterface::kOfferToReceiveAudio, enable);
+ }
+
+ void SetMandatoryReceiveVideo(bool enable) {
+ SetMandatory(MediaConstraintsInterface::kOfferToReceiveVideo, enable);
+ }
+
+ void SetMandatoryUseRtpMux(bool enable) {
+ SetMandatory(MediaConstraintsInterface::kUseRtpMux, enable);
+ }
+
+ void SetMandatoryIceRestart(bool enable) {
+ SetMandatory(MediaConstraintsInterface::kIceRestart, enable);
+ }
+
+ void SetAllowRtpDataChannels() {
+ SetMandatory(MediaConstraintsInterface::kEnableRtpDataChannels, true);
+ SetMandatory(MediaConstraintsInterface::kEnableDtlsSrtp, false);
+ }
+
+ void SetOptionalVAD(bool enable) {
+ AddOptional(MediaConstraintsInterface::kVoiceActivityDetection, enable);
+ }
+
+ void SetAllowDtlsSctpDataChannels() {
+ SetMandatory(MediaConstraintsInterface::kEnableDtlsSrtp, true);
+ }
+
+ private:
+ Constraints mandatory_;
+ Constraints optional_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_TEST_FAKECONSTRAINTS_H_
diff --git a/talk/app/webrtc/test/fakedatachannelprovider.h b/talk/app/webrtc/test/fakedatachannelprovider.h
new file mode 100644
index 0000000000..ff44e585fe
--- /dev/null
+++ b/talk/app/webrtc/test/fakedatachannelprovider.h
@@ -0,0 +1,161 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_TEST_FAKEDATACHANNELPROVIDER_H_
+#define TALK_APP_WEBRTC_TEST_FAKEDATACHANNELPROVIDER_H_
+
+#include "talk/app/webrtc/datachannel.h"
+
+class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
+ public:
+ FakeDataChannelProvider()
+ : send_blocked_(false),
+ transport_available_(false),
+ ready_to_send_(false),
+ transport_error_(false) {}
+ virtual ~FakeDataChannelProvider() {}
+
+ bool SendData(const cricket::SendDataParams& params,
+ const rtc::Buffer& payload,
+ cricket::SendDataResult* result) override {
+ ASSERT(ready_to_send_ && transport_available_);
+ if (send_blocked_) {
+ *result = cricket::SDR_BLOCK;
+ return false;
+ }
+
+ if (transport_error_ || payload.size() == 0) {
+ *result = cricket::SDR_ERROR;
+ return false;
+ }
+
+ last_send_data_params_ = params;
+ return true;
+ }
+
+ bool ConnectDataChannel(webrtc::DataChannel* data_channel) override {
+ ASSERT(connected_channels_.find(data_channel) == connected_channels_.end());
+ if (!transport_available_) {
+ return false;
+ }
+ LOG(LS_INFO) << "DataChannel connected " << data_channel;
+ connected_channels_.insert(data_channel);
+ return true;
+ }
+
+ void DisconnectDataChannel(webrtc::DataChannel* data_channel) override {
+ ASSERT(connected_channels_.find(data_channel) != connected_channels_.end());
+ LOG(LS_INFO) << "DataChannel disconnected " << data_channel;
+ connected_channels_.erase(data_channel);
+ }
+
+ void AddSctpDataStream(int sid) override {
+ ASSERT(sid >= 0);
+ if (!transport_available_) {
+ return;
+ }
+ send_ssrcs_.insert(sid);
+ recv_ssrcs_.insert(sid);
+ }
+
+ void RemoveSctpDataStream(int sid) override {
+ ASSERT(sid >= 0);
+ send_ssrcs_.erase(sid);
+ recv_ssrcs_.erase(sid);
+ }
+
+ bool ReadyToSendData() const override { return ready_to_send_; }
+
+ // Set true to emulate the SCTP stream being blocked by congestion control.
+ void set_send_blocked(bool blocked) {
+ send_blocked_ = blocked;
+ if (!blocked) {
+ // Take a snapshot of the connected channels and check to see whether
+ // each value is still in connected_channels_ before calling
+ // OnChannelReady(). This avoids problems where the set gets modified
+ // in response to OnChannelReady().
+ for (webrtc::DataChannel *ch : std::set<webrtc::DataChannel*>(
+ connected_channels_.begin(), connected_channels_.end())) {
+ if (connected_channels_.count(ch)) {
+ ch->OnChannelReady(true);
+ }
+ }
+ }
+ }
+
+ // Set true to emulate the transport channel creation, e.g. after
+ // setLocalDescription/setRemoteDescription called with data content.
+ void set_transport_available(bool available) {
+ transport_available_ = available;
+ }
+
+ // Set true to emulate the transport ReadyToSendData signal when the transport
+ // becomes writable for the first time.
+ void set_ready_to_send(bool ready) {
+ ASSERT(transport_available_);
+ ready_to_send_ = ready;
+ if (ready) {
+ std::set<webrtc::DataChannel*>::iterator it;
+ for (it = connected_channels_.begin();
+ it != connected_channels_.end();
+ ++it) {
+ (*it)->OnChannelReady(true);
+ }
+ }
+ }
+
+ void set_transport_error() {
+ transport_error_ = true;
+ }
+
+ cricket::SendDataParams last_send_data_params() const {
+ return last_send_data_params_;
+ }
+
+ bool IsConnected(webrtc::DataChannel* data_channel) const {
+ return connected_channels_.find(data_channel) != connected_channels_.end();
+ }
+
+ bool IsSendStreamAdded(uint32_t stream) const {
+ return send_ssrcs_.find(stream) != send_ssrcs_.end();
+ }
+
+ bool IsRecvStreamAdded(uint32_t stream) const {
+ return recv_ssrcs_.find(stream) != recv_ssrcs_.end();
+ }
+
+ private:
+ cricket::SendDataParams last_send_data_params_;
+ bool send_blocked_;
+ bool transport_available_;
+ bool ready_to_send_;
+ bool transport_error_;
+ std::set<webrtc::DataChannel*> connected_channels_;
+ std::set<uint32_t> send_ssrcs_;
+ std::set<uint32_t> recv_ssrcs_;
+};
+#endif // TALK_APP_WEBRTC_TEST_FAKEDATACHANNELPROVIDER_H_
diff --git a/talk/app/webrtc/test/fakedtlsidentitystore.h b/talk/app/webrtc/test/fakedtlsidentitystore.h
new file mode 100644
index 0000000000..0f9bdb9e6c
--- /dev/null
+++ b/talk/app/webrtc/test/fakedtlsidentitystore.h
@@ -0,0 +1,143 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_TEST_FAKEDTLSIDENTITYSERVICE_H_
+#define TALK_APP_WEBRTC_TEST_FAKEDTLSIDENTITYSERVICE_H_
+
+#include <string>
+
+#include "talk/app/webrtc/dtlsidentitystore.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "webrtc/base/rtccertificate.h"
+
+static const char kRSA_PRIVATE_KEY_PEM[] =
+ "-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
+ "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
+ "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
+ "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
+ "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
+ "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
+ "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
+ "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
+ "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
+ "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
+ "UCXiYxSsu20QNVw=\n"
+ "-----END RSA PRIVATE KEY-----\n";
+
+static const char kCERT_PEM[] =
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
+ "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
+ "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
+ "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
+ "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
+ "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
+ "-----END CERTIFICATE-----\n";
+
+class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
+ public rtc::MessageHandler {
+ public:
+ typedef rtc::TypedMessageData<rtc::scoped_refptr<
+ webrtc::DtlsIdentityRequestObserver> > MessageData;
+
+ FakeDtlsIdentityStore() : should_fail_(false) {}
+
+ void set_should_fail(bool should_fail) {
+ should_fail_ = should_fail;
+ }
+
+ void RequestIdentity(
+ rtc::KeyType key_type,
+ const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>&
+ observer) override {
+ // TODO(hbos): Should be able to generate KT_ECDSA too.
+ RTC_DCHECK(key_type == rtc::KT_RSA || should_fail_);
+ MessageData* msg = new MessageData(
+ rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>(observer));
+ rtc::Thread::Current()->Post(
+ this, should_fail_ ? MSG_FAILURE : MSG_SUCCESS, msg);
+ }
+
+ static rtc::scoped_refptr<rtc::RTCCertificate> GenerateCertificate() {
+ std::string cert;
+ std::string key;
+ rtc::SSLIdentity::PemToDer("CERTIFICATE", kCERT_PEM, &cert);
+ rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY", kRSA_PRIVATE_KEY_PEM, &key);
+
+ std::string pem_cert = rtc::SSLIdentity::DerToPem(
+ rtc::kPemTypeCertificate,
+ reinterpret_cast<const unsigned char*>(cert.data()),
+ cert.length());
+ std::string pem_key = rtc::SSLIdentity::DerToPem(
+ rtc::kPemTypeRsaPrivateKey,
+ reinterpret_cast<const unsigned char*>(key.data()),
+ key.length());
+ rtc::scoped_ptr<rtc::SSLIdentity> identity(
+ rtc::SSLIdentity::FromPEMStrings(pem_key, pem_cert));
+
+ return rtc::RTCCertificate::Create(identity.Pass());
+ }
+
+ private:
+ enum {
+ MSG_SUCCESS,
+ MSG_FAILURE,
+ };
+
+ // rtc::MessageHandler implementation.
+ void OnMessage(rtc::Message* msg) {
+ MessageData* message_data = static_cast<MessageData*>(msg->pdata);
+ rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver> observer =
+ message_data->data();
+ switch (msg->message_id) {
+ case MSG_SUCCESS: {
+ std::string cert;
+ std::string key;
+ rtc::SSLIdentity::PemToDer("CERTIFICATE", kCERT_PEM, &cert);
+ rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY", kRSA_PRIVATE_KEY_PEM,
+ &key);
+ observer->OnSuccess(cert, key);
+ break;
+ }
+ case MSG_FAILURE:
+ observer->OnFailure(0);
+ break;
+ }
+ delete message_data;
+ }
+
+ bool should_fail_;
+};
+
+#endif // TALK_APP_WEBRTC_TEST_FAKEDTLSIDENTITYSERVICE_H_
diff --git a/talk/app/webrtc/test/fakemediastreamsignaling.h b/talk/app/webrtc/test/fakemediastreamsignaling.h
new file mode 100644
index 0000000000..562c4ad306
--- /dev/null
+++ b/talk/app/webrtc/test/fakemediastreamsignaling.h
@@ -0,0 +1,140 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_TEST_FAKEMEDIASTREAMSIGNALING_H_
+#define TALK_APP_WEBRTC_TEST_FAKEMEDIASTREAMSIGNALING_H_
+
+#include "talk/app/webrtc/audiotrack.h"
+#include "talk/app/webrtc/mediastreamsignaling.h"
+#include "talk/app/webrtc/videotrack.h"
+
+static const char kStream1[] = "stream1";
+static const char kVideoTrack1[] = "video1";
+static const char kAudioTrack1[] = "audio1";
+
+static const char kStream2[] = "stream2";
+static const char kVideoTrack2[] = "video2";
+static const char kAudioTrack2[] = "audio2";
+
+class FakeMediaStreamSignaling : public webrtc::MediaStreamSignaling,
+ public webrtc::MediaStreamSignalingObserver {
+ public:
+ explicit FakeMediaStreamSignaling(cricket::ChannelManager* channel_manager) :
+ webrtc::MediaStreamSignaling(rtc::Thread::Current(), this,
+ channel_manager) {
+ }
+
+ void SendAudioVideoStream1() {
+ ClearLocalStreams();
+ AddLocalStream(CreateStream(kStream1, kAudioTrack1, kVideoTrack1));
+ }
+
+ void SendAudioVideoStream2() {
+ ClearLocalStreams();
+ AddLocalStream(CreateStream(kStream2, kAudioTrack2, kVideoTrack2));
+ }
+
+ void SendAudioVideoStream1And2() {
+ ClearLocalStreams();
+ AddLocalStream(CreateStream(kStream1, kAudioTrack1, kVideoTrack1));
+ AddLocalStream(CreateStream(kStream2, kAudioTrack2, kVideoTrack2));
+ }
+
+ void SendNothing() {
+ ClearLocalStreams();
+ }
+
+ void UseOptionsAudioOnly() {
+ ClearLocalStreams();
+ AddLocalStream(CreateStream(kStream2, kAudioTrack2, ""));
+ }
+
+ void UseOptionsVideoOnly() {
+ ClearLocalStreams();
+ AddLocalStream(CreateStream(kStream2, "", kVideoTrack2));
+ }
+
+ void ClearLocalStreams() {
+ while (local_streams()->count() != 0) {
+ RemoveLocalStream(local_streams()->at(0));
+ }
+ }
+
+ // Implements MediaStreamSignalingObserver.
+ virtual void OnAddRemoteStream(webrtc::MediaStreamInterface* stream) {}
+ virtual void OnRemoveRemoteStream(webrtc::MediaStreamInterface* stream) {}
+ virtual void OnAddDataChannel(webrtc::DataChannelInterface* data_channel) {}
+ virtual void OnAddLocalAudioTrack(webrtc::MediaStreamInterface* stream,
+ webrtc::AudioTrackInterface* audio_track,
+ uint32_t ssrc) {}
+ virtual void OnAddLocalVideoTrack(webrtc::MediaStreamInterface* stream,
+ webrtc::VideoTrackInterface* video_track,
+ uint32_t ssrc) {}
+ virtual void OnAddRemoteAudioTrack(webrtc::MediaStreamInterface* stream,
+ webrtc::AudioTrackInterface* audio_track,
+ uint32_t ssrc) {}
+ virtual void OnAddRemoteVideoTrack(webrtc::MediaStreamInterface* stream,
+ webrtc::VideoTrackInterface* video_track,
+ uint32_t ssrc) {}
+ virtual void OnRemoveRemoteAudioTrack(
+ webrtc::MediaStreamInterface* stream,
+ webrtc::AudioTrackInterface* audio_track) {}
+ virtual void OnRemoveRemoteVideoTrack(
+ webrtc::MediaStreamInterface* stream,
+ webrtc::VideoTrackInterface* video_track) {}
+ virtual void OnRemoveLocalAudioTrack(webrtc::MediaStreamInterface* stream,
+ webrtc::AudioTrackInterface* audio_track,
+ uint32_t ssrc) {}
+ virtual void OnRemoveLocalVideoTrack(
+ webrtc::MediaStreamInterface* stream,
+ webrtc::VideoTrackInterface* video_track) {}
+ virtual void OnRemoveLocalStream(webrtc::MediaStreamInterface* stream) {}
+
+ private:
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> CreateStream(
+ const std::string& stream_label,
+ const std::string& audio_track_id,
+ const std::string& video_track_id) {
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
+ webrtc::MediaStream::Create(stream_label));
+
+ if (!audio_track_id.empty()) {
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ webrtc::AudioTrack::Create(audio_track_id, NULL));
+ stream->AddTrack(audio_track);
+ }
+
+ if (!video_track_id.empty()) {
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ webrtc::VideoTrack::Create(video_track_id, NULL));
+ stream->AddTrack(video_track);
+ }
+ return stream;
+ }
+};
+
+#endif // TALK_APP_WEBRTC_TEST_FAKEMEDIASTREAMSIGNALING_H_
diff --git a/talk/app/webrtc/test/fakeperiodicvideocapturer.h b/talk/app/webrtc/test/fakeperiodicvideocapturer.h
new file mode 100644
index 0000000000..34e4278ba7
--- /dev/null
+++ b/talk/app/webrtc/test/fakeperiodicvideocapturer.h
@@ -0,0 +1,89 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// FakePeriodicVideoCapturer implements a fake cricket::VideoCapturer that
+// creates video frames periodically after it has been started.
+
+#ifndef TALK_APP_WEBRTC_TEST_FAKEPERIODICVIDEOCAPTURER_H_
+#define TALK_APP_WEBRTC_TEST_FAKEPERIODICVIDEOCAPTURER_H_
+
+#include "talk/media/base/fakevideocapturer.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+class FakePeriodicVideoCapturer : public cricket::FakeVideoCapturer {
+ public:
+ FakePeriodicVideoCapturer() {
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(160, 120,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ ResetSupportedFormats(formats);
+ };
+
+ virtual cricket::CaptureState Start(const cricket::VideoFormat& format) {
+ cricket::CaptureState state = FakeVideoCapturer::Start(format);
+ if (state != cricket::CS_FAILED) {
+ rtc::Thread::Current()->Post(this, MSG_CREATEFRAME);
+ }
+ return state;
+ }
+ virtual void Stop() {
+ rtc::Thread::Current()->Clear(this);
+ }
+ // Inherited from MesageHandler.
+ virtual void OnMessage(rtc::Message* msg) {
+ if (msg->message_id == MSG_CREATEFRAME) {
+ if (IsRunning()) {
+ CaptureFrame();
+ rtc::Thread::Current()->PostDelayed(static_cast<int>(
+ GetCaptureFormat()->interval / rtc::kNumNanosecsPerMillisec),
+ this, MSG_CREATEFRAME);
+ }
+ } else {
+ FakeVideoCapturer::OnMessage(msg);
+ }
+ }
+
+ private:
+ enum {
+ // Offset 0xFF to make sure this don't collide with base class messages.
+ MSG_CREATEFRAME = 0xFF
+ };
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_TEST_FAKEPERIODICVIDEOCAPTURER_H_
diff --git a/talk/app/webrtc/test/fakevideotrackrenderer.h b/talk/app/webrtc/test/fakevideotrackrenderer.h
new file mode 100644
index 0000000000..38b84a6aff
--- /dev/null
+++ b/talk/app/webrtc/test/fakevideotrackrenderer.h
@@ -0,0 +1,75 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_TEST_FAKEVIDEOTRACKRENDERER_H_
+#define TALK_APP_WEBRTC_TEST_FAKEVIDEOTRACKRENDERER_H_
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/media/base/fakevideorenderer.h"
+
+namespace webrtc {
+
+class FakeVideoTrackRenderer : public VideoRendererInterface {
+ public:
+ FakeVideoTrackRenderer(VideoTrackInterface* video_track)
+ : video_track_(video_track), last_frame_(NULL) {
+ video_track_->AddRenderer(this);
+ }
+ ~FakeVideoTrackRenderer() {
+ video_track_->RemoveRenderer(this);
+ }
+
+ virtual void RenderFrame(const cricket::VideoFrame* video_frame) override {
+ last_frame_ = const_cast<cricket::VideoFrame*>(video_frame);
+ if (!fake_renderer_.SetSize(static_cast<int>(video_frame->GetWidth()),
+ static_cast<int>(video_frame->GetHeight()),
+ 0)) {
+ return;
+ }
+
+ fake_renderer_.RenderFrame(video_frame);
+ }
+
+ int errors() const { return fake_renderer_.errors(); }
+ int width() const { return fake_renderer_.width(); }
+ int height() const { return fake_renderer_.height(); }
+ int num_rendered_frames() const {
+ return fake_renderer_.num_rendered_frames();
+ }
+ const cricket::VideoFrame* last_frame() const { return last_frame_; }
+
+ private:
+ cricket::FakeVideoRenderer fake_renderer_;
+ rtc::scoped_refptr<VideoTrackInterface> video_track_;
+
+ // Weak reference for frame pointer comparison only.
+ cricket::VideoFrame* last_frame_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_TEST_FAKEVIDEOTRACKRENDERER_H_
diff --git a/talk/app/webrtc/test/mockpeerconnectionobservers.h b/talk/app/webrtc/test/mockpeerconnectionobservers.h
new file mode 100644
index 0000000000..f1bdbee9f5
--- /dev/null
+++ b/talk/app/webrtc/test/mockpeerconnectionobservers.h
@@ -0,0 +1,243 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains mock implementations of observers used in PeerConnection.
+
+#ifndef TALK_APP_WEBRTC_TEST_MOCKPEERCONNECTIONOBSERVERS_H_
+#define TALK_APP_WEBRTC_TEST_MOCKPEERCONNECTIONOBSERVERS_H_
+
+#include <string>
+
+#include "talk/app/webrtc/datachannelinterface.h"
+
+namespace webrtc {
+
+class MockCreateSessionDescriptionObserver
+ : public webrtc::CreateSessionDescriptionObserver {
+ public:
+ MockCreateSessionDescriptionObserver()
+ : called_(false),
+ result_(false) {}
+ virtual ~MockCreateSessionDescriptionObserver() {}
+ virtual void OnSuccess(SessionDescriptionInterface* desc) {
+ called_ = true;
+ result_ = true;
+ desc_.reset(desc);
+ }
+ virtual void OnFailure(const std::string& error) {
+ called_ = true;
+ result_ = false;
+ }
+ bool called() const { return called_; }
+ bool result() const { return result_; }
+ SessionDescriptionInterface* release_desc() {
+ return desc_.release();
+ }
+
+ private:
+ bool called_;
+ bool result_;
+ rtc::scoped_ptr<SessionDescriptionInterface> desc_;
+};
+
+class MockSetSessionDescriptionObserver
+ : public webrtc::SetSessionDescriptionObserver {
+ public:
+ MockSetSessionDescriptionObserver()
+ : called_(false),
+ result_(false) {}
+ virtual ~MockSetSessionDescriptionObserver() {}
+ virtual void OnSuccess() {
+ called_ = true;
+ result_ = true;
+ }
+ virtual void OnFailure(const std::string& error) {
+ called_ = true;
+ result_ = false;
+ }
+ bool called() const { return called_; }
+ bool result() const { return result_; }
+
+ private:
+ bool called_;
+ bool result_;
+};
+
+class MockDataChannelObserver : public webrtc::DataChannelObserver {
+ public:
+ explicit MockDataChannelObserver(webrtc::DataChannelInterface* channel)
+ : channel_(channel), received_message_count_(0) {
+ channel_->RegisterObserver(this);
+ state_ = channel_->state();
+ }
+ virtual ~MockDataChannelObserver() {
+ channel_->UnregisterObserver();
+ }
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override {}
+
+ void OnStateChange() override { state_ = channel_->state(); }
+ void OnMessage(const DataBuffer& buffer) override {
+ last_message_.assign(buffer.data.data<char>(), buffer.data.size());
+ ++received_message_count_;
+ }
+
+ bool IsOpen() const { return state_ == DataChannelInterface::kOpen; }
+ const std::string& last_message() const { return last_message_; }
+ size_t received_message_count() const { return received_message_count_; }
+
+ private:
+ rtc::scoped_refptr<webrtc::DataChannelInterface> channel_;
+ DataChannelInterface::DataState state_;
+ std::string last_message_;
+ size_t received_message_count_;
+};
+
+class MockStatsObserver : public webrtc::StatsObserver {
+ public:
+ MockStatsObserver() : called_(false), stats_() {}
+ virtual ~MockStatsObserver() {}
+
+ virtual void OnComplete(const StatsReports& reports) {
+ ASSERT(!called_);
+ called_ = true;
+ stats_.Clear();
+ stats_.number_of_reports = reports.size();
+ for (const auto* r : reports) {
+ if (r->type() == StatsReport::kStatsReportTypeSsrc) {
+ stats_.timestamp = r->timestamp();
+ GetIntValue(r, StatsReport::kStatsValueNameAudioOutputLevel,
+ &stats_.audio_output_level);
+ GetIntValue(r, StatsReport::kStatsValueNameAudioInputLevel,
+ &stats_.audio_input_level);
+ GetIntValue(r, StatsReport::kStatsValueNameBytesReceived,
+ &stats_.bytes_received);
+ GetIntValue(r, StatsReport::kStatsValueNameBytesSent,
+ &stats_.bytes_sent);
+ } else if (r->type() == StatsReport::kStatsReportTypeBwe) {
+ stats_.timestamp = r->timestamp();
+ GetIntValue(r, StatsReport::kStatsValueNameAvailableReceiveBandwidth,
+ &stats_.available_receive_bandwidth);
+ } else if (r->type() == StatsReport::kStatsReportTypeComponent) {
+ stats_.timestamp = r->timestamp();
+ GetStringValue(r, StatsReport::kStatsValueNameDtlsCipher,
+ &stats_.dtls_cipher);
+ GetStringValue(r, StatsReport::kStatsValueNameSrtpCipher,
+ &stats_.srtp_cipher);
+ }
+ }
+ }
+
+ bool called() const { return called_; }
+ size_t number_of_reports() const { return stats_.number_of_reports; }
+ double timestamp() const { return stats_.timestamp; }
+
+ int AudioOutputLevel() const {
+ ASSERT(called_);
+ return stats_.audio_output_level;
+ }
+
+ int AudioInputLevel() const {
+ ASSERT(called_);
+ return stats_.audio_input_level;
+ }
+
+ int BytesReceived() const {
+ ASSERT(called_);
+ return stats_.bytes_received;
+ }
+
+ int BytesSent() const {
+ ASSERT(called_);
+ return stats_.bytes_sent;
+ }
+
+ int AvailableReceiveBandwidth() const {
+ ASSERT(called_);
+ return stats_.available_receive_bandwidth;
+ }
+
+ std::string DtlsCipher() const {
+ ASSERT(called_);
+ return stats_.dtls_cipher;
+ }
+
+ std::string SrtpCipher() const {
+ ASSERT(called_);
+ return stats_.srtp_cipher;
+ }
+
+ private:
+ bool GetIntValue(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ int* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (v) {
+ // TODO(tommi): We should really just be using an int here :-/
+ *value = rtc::FromString<int>(v->ToString());
+ }
+ return v != nullptr;
+ }
+
+ bool GetStringValue(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ std::string* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (v)
+ *value = v->ToString();
+ return v != nullptr;
+ }
+
+ bool called_;
+ struct {
+ void Clear() {
+ number_of_reports = 0;
+ timestamp = 0;
+ audio_output_level = 0;
+ audio_input_level = 0;
+ bytes_received = 0;
+ bytes_sent = 0;
+ available_receive_bandwidth = 0;
+ dtls_cipher.clear();
+ srtp_cipher.clear();
+ }
+
+ size_t number_of_reports;
+ double timestamp;
+ int audio_output_level;
+ int audio_input_level;
+ int bytes_received;
+ int bytes_sent;
+ int available_receive_bandwidth;
+ std::string dtls_cipher;
+ std::string srtp_cipher;
+ } stats_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_TEST_MOCKPEERCONNECTIONOBSERVERS_H_
diff --git a/talk/app/webrtc/test/peerconnectiontestwrapper.cc b/talk/app/webrtc/test/peerconnectiontestwrapper.cc
new file mode 100644
index 0000000000..2eb24d9700
--- /dev/null
+++ b/talk/app/webrtc/test/peerconnectiontestwrapper.cc
@@ -0,0 +1,297 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/fakeportallocatorfactory.h"
+#include "talk/app/webrtc/test/fakedtlsidentitystore.h"
+#include "talk/app/webrtc/test/fakeperiodicvideocapturer.h"
+#include "talk/app/webrtc/test/mockpeerconnectionobservers.h"
+#include "talk/app/webrtc/test/peerconnectiontestwrapper.h"
+#include "talk/app/webrtc/videosourceinterface.h"
+#include "webrtc/base/gunit.h"
+
+static const char kStreamLabelBase[] = "stream_label";
+static const char kVideoTrackLabelBase[] = "video_track";
+static const char kAudioTrackLabelBase[] = "audio_track";
+static const int kMaxWait = 10000;
+static const int kTestAudioFrameCount = 3;
+static const int kTestVideoFrameCount = 3;
+
+using webrtc::FakeConstraints;
+using webrtc::FakeVideoTrackRenderer;
+using webrtc::IceCandidateInterface;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MockSetSessionDescriptionObserver;
+using webrtc::PeerConnectionInterface;
+using webrtc::SessionDescriptionInterface;
+using webrtc::VideoTrackInterface;
+
+void PeerConnectionTestWrapper::Connect(PeerConnectionTestWrapper* caller,
+ PeerConnectionTestWrapper* callee) {
+ caller->SignalOnIceCandidateReady.connect(
+ callee, &PeerConnectionTestWrapper::AddIceCandidate);
+ callee->SignalOnIceCandidateReady.connect(
+ caller, &PeerConnectionTestWrapper::AddIceCandidate);
+
+ caller->SignalOnSdpReady.connect(
+ callee, &PeerConnectionTestWrapper::ReceiveOfferSdp);
+ callee->SignalOnSdpReady.connect(
+ caller, &PeerConnectionTestWrapper::ReceiveAnswerSdp);
+}
+
+PeerConnectionTestWrapper::PeerConnectionTestWrapper(const std::string& name)
+ : name_(name) {}
+
+PeerConnectionTestWrapper::~PeerConnectionTestWrapper() {}
+
+bool PeerConnectionTestWrapper::CreatePc(
+ const MediaConstraintsInterface* constraints) {
+ allocator_factory_ = webrtc::FakePortAllocatorFactory::Create();
+ if (!allocator_factory_) {
+ return false;
+ }
+
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+ if (fake_audio_capture_module_ == NULL) {
+ return false;
+ }
+
+ peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(),
+ fake_audio_capture_module_, NULL, NULL);
+ if (!peer_connection_factory_) {
+ return false;
+ }
+
+ // CreatePeerConnection with IceServers.
+ webrtc::PeerConnectionInterface::IceServers ice_servers;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = "stun:stun.l.google.com:19302";
+ ice_servers.push_back(ice_server);
+ rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store(
+ rtc::SSLStreamAdapter::HaveDtlsSrtp() ?
+ new FakeDtlsIdentityStore() : nullptr);
+ peer_connection_ = peer_connection_factory_->CreatePeerConnection(
+ ice_servers, constraints, allocator_factory_.get(),
+ dtls_identity_store.Pass(), this);
+
+ return peer_connection_.get() != NULL;
+}
+
+rtc::scoped_refptr<webrtc::DataChannelInterface>
+PeerConnectionTestWrapper::CreateDataChannel(
+ const std::string& label,
+ const webrtc::DataChannelInit& init) {
+ return peer_connection_->CreateDataChannel(label, &init);
+}
+
+void PeerConnectionTestWrapper::OnAddStream(MediaStreamInterface* stream) {
+ LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": OnAddStream";
+ // TODO(ronghuawu): support multiple streams.
+ if (stream->GetVideoTracks().size() > 0) {
+ renderer_.reset(new FakeVideoTrackRenderer(stream->GetVideoTracks()[0]));
+ }
+}
+
+void PeerConnectionTestWrapper::OnIceCandidate(
+ const IceCandidateInterface* candidate) {
+ std::string sdp;
+ EXPECT_TRUE(candidate->ToString(&sdp));
+ // Give the user a chance to modify sdp for testing.
+ SignalOnIceCandidateCreated(&sdp);
+ SignalOnIceCandidateReady(candidate->sdp_mid(), candidate->sdp_mline_index(),
+ sdp);
+}
+
+void PeerConnectionTestWrapper::OnDataChannel(
+ webrtc::DataChannelInterface* data_channel) {
+ SignalOnDataChannel(data_channel);
+}
+
+void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) {
+ // This callback should take the ownership of |desc|.
+ rtc::scoped_ptr<SessionDescriptionInterface> owned_desc(desc);
+ std::string sdp;
+ EXPECT_TRUE(desc->ToString(&sdp));
+
+ LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": " << desc->type() << " sdp created: " << sdp;
+
+ // Give the user a chance to modify sdp for testing.
+ SignalOnSdpCreated(&sdp);
+
+ SetLocalDescription(desc->type(), sdp);
+
+ SignalOnSdpReady(sdp);
+}
+
+void PeerConnectionTestWrapper::CreateOffer(
+ const MediaConstraintsInterface* constraints) {
+ LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": CreateOffer.";
+ peer_connection_->CreateOffer(this, constraints);
+}
+
+void PeerConnectionTestWrapper::CreateAnswer(
+ const MediaConstraintsInterface* constraints) {
+ LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": CreateAnswer.";
+ peer_connection_->CreateAnswer(this, constraints);
+}
+
+void PeerConnectionTestWrapper::ReceiveOfferSdp(const std::string& sdp) {
+ SetRemoteDescription(SessionDescriptionInterface::kOffer, sdp);
+ CreateAnswer(NULL);
+}
+
+void PeerConnectionTestWrapper::ReceiveAnswerSdp(const std::string& sdp) {
+ SetRemoteDescription(SessionDescriptionInterface::kAnswer, sdp);
+}
+
+void PeerConnectionTestWrapper::SetLocalDescription(const std::string& type,
+ const std::string& sdp) {
+ LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": SetLocalDescription " << type << " " << sdp;
+
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+ observer(new rtc::RefCountedObject<
+ MockSetSessionDescriptionObserver>());
+ peer_connection_->SetLocalDescription(
+ observer, webrtc::CreateSessionDescription(type, sdp, NULL));
+}
+
+void PeerConnectionTestWrapper::SetRemoteDescription(const std::string& type,
+ const std::string& sdp) {
+ LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": SetRemoteDescription " << type << " " << sdp;
+
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+ observer(new rtc::RefCountedObject<
+ MockSetSessionDescriptionObserver>());
+ peer_connection_->SetRemoteDescription(
+ observer, webrtc::CreateSessionDescription(type, sdp, NULL));
+}
+
+void PeerConnectionTestWrapper::AddIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& candidate) {
+ rtc::scoped_ptr<webrtc::IceCandidateInterface> owned_candidate(
+ webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, candidate, NULL));
+ EXPECT_TRUE(peer_connection_->AddIceCandidate(owned_candidate.get()));
+}
+
+void PeerConnectionTestWrapper::WaitForCallEstablished() {
+ WaitForConnection();
+ WaitForAudio();
+ WaitForVideo();
+}
+
+void PeerConnectionTestWrapper::WaitForConnection() {
+ EXPECT_TRUE_WAIT(CheckForConnection(), kMaxWait);
+ LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": Connected.";
+}
+
+bool PeerConnectionTestWrapper::CheckForConnection() {
+ return (peer_connection_->ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionConnected) ||
+ (peer_connection_->ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionCompleted);
+}
+
+void PeerConnectionTestWrapper::WaitForAudio() {
+ EXPECT_TRUE_WAIT(CheckForAudio(), kMaxWait);
+ LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": Got enough audio frames.";
+}
+
+bool PeerConnectionTestWrapper::CheckForAudio() {
+ return (fake_audio_capture_module_->frames_received() >=
+ kTestAudioFrameCount);
+}
+
+void PeerConnectionTestWrapper::WaitForVideo() {
+ EXPECT_TRUE_WAIT(CheckForVideo(), kMaxWait);
+ LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": Got enough video frames.";
+}
+
+bool PeerConnectionTestWrapper::CheckForVideo() {
+ if (!renderer_) {
+ return false;
+ }
+ return (renderer_->num_rendered_frames() >= kTestVideoFrameCount);
+}
+
+void PeerConnectionTestWrapper::GetAndAddUserMedia(
+ bool audio, const webrtc::FakeConstraints& audio_constraints,
+ bool video, const webrtc::FakeConstraints& video_constraints) {
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ GetUserMedia(audio, audio_constraints, video, video_constraints);
+ EXPECT_TRUE(peer_connection_->AddStream(stream));
+}
+
+rtc::scoped_refptr<webrtc::MediaStreamInterface>
+ PeerConnectionTestWrapper::GetUserMedia(
+ bool audio, const webrtc::FakeConstraints& audio_constraints,
+ bool video, const webrtc::FakeConstraints& video_constraints) {
+ std::string label = kStreamLabelBase +
+ rtc::ToString<int>(
+ static_cast<int>(peer_connection_->local_streams()->count()));
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ peer_connection_factory_->CreateLocalMediaStream(label);
+
+ if (audio) {
+ FakeConstraints constraints = audio_constraints;
+ // Disable highpass filter so that we can get all the test audio frames.
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kHighpassFilter, false);
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ peer_connection_factory_->CreateAudioSource(&constraints);
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ peer_connection_factory_->CreateAudioTrack(kAudioTrackLabelBase,
+ source));
+ stream->AddTrack(audio_track);
+ }
+
+ if (video) {
+ // Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
+ FakeConstraints constraints = video_constraints;
+ constraints.SetMandatoryMaxFrameRate(10);
+
+ rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
+ peer_connection_factory_->CreateVideoSource(
+ new webrtc::FakePeriodicVideoCapturer(), &constraints);
+ std::string videotrack_label = label + kVideoTrackLabelBase;
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ peer_connection_factory_->CreateVideoTrack(videotrack_label, source));
+
+ stream->AddTrack(video_track);
+ }
+ return stream;
+}
diff --git a/talk/app/webrtc/test/peerconnectiontestwrapper.h b/talk/app/webrtc/test/peerconnectiontestwrapper.h
new file mode 100644
index 0000000000..b65426326f
--- /dev/null
+++ b/talk/app/webrtc/test/peerconnectiontestwrapper.h
@@ -0,0 +1,122 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_TEST_PEERCONNECTIONTESTWRAPPER_H_
+#define TALK_APP_WEBRTC_TEST_PEERCONNECTIONTESTWRAPPER_H_
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/test/fakeaudiocapturemodule.h"
+#include "talk/app/webrtc/test/fakeconstraints.h"
+#include "talk/app/webrtc/test/fakevideotrackrenderer.h"
+#include "webrtc/base/sigslot.h"
+
+namespace webrtc {
+class DtlsIdentityStoreInterface;
+class PortAllocatorFactoryInterface;
+}
+
+class PeerConnectionTestWrapper
+ : public webrtc::PeerConnectionObserver,
+ public webrtc::CreateSessionDescriptionObserver,
+ public sigslot::has_slots<> {
+ public:
+ static void Connect(PeerConnectionTestWrapper* caller,
+ PeerConnectionTestWrapper* callee);
+
+ explicit PeerConnectionTestWrapper(const std::string& name);
+ virtual ~PeerConnectionTestWrapper();
+
+ bool CreatePc(const webrtc::MediaConstraintsInterface* constraints);
+
+ rtc::scoped_refptr<webrtc::DataChannelInterface> CreateDataChannel(
+ const std::string& label,
+ const webrtc::DataChannelInit& init);
+
+ // Implements PeerConnectionObserver.
+ virtual void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) {}
+ virtual void OnStateChange(
+ webrtc::PeerConnectionObserver::StateType state_changed) {}
+ virtual void OnAddStream(webrtc::MediaStreamInterface* stream);
+ virtual void OnRemoveStream(webrtc::MediaStreamInterface* stream) {}
+ virtual void OnDataChannel(webrtc::DataChannelInterface* data_channel);
+ virtual void OnRenegotiationNeeded() {}
+ virtual void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) {}
+ virtual void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) {}
+ virtual void OnIceCandidate(const webrtc::IceCandidateInterface* candidate);
+ virtual void OnIceComplete() {}
+
+ // Implements CreateSessionDescriptionObserver.
+ virtual void OnSuccess(webrtc::SessionDescriptionInterface* desc);
+ virtual void OnFailure(const std::string& error) {}
+
+ void CreateOffer(const webrtc::MediaConstraintsInterface* constraints);
+ void CreateAnswer(const webrtc::MediaConstraintsInterface* constraints);
+ void ReceiveOfferSdp(const std::string& sdp);
+ void ReceiveAnswerSdp(const std::string& sdp);
+ void AddIceCandidate(const std::string& sdp_mid, int sdp_mline_index,
+ const std::string& candidate);
+ void WaitForCallEstablished();
+ void WaitForConnection();
+ void WaitForAudio();
+ void WaitForVideo();
+ void GetAndAddUserMedia(
+ bool audio, const webrtc::FakeConstraints& audio_constraints,
+ bool video, const webrtc::FakeConstraints& video_constraints);
+
+ // sigslots
+ sigslot::signal1<std::string*> SignalOnIceCandidateCreated;
+ sigslot::signal3<const std::string&,
+ int,
+ const std::string&> SignalOnIceCandidateReady;
+ sigslot::signal1<std::string*> SignalOnSdpCreated;
+ sigslot::signal1<const std::string&> SignalOnSdpReady;
+ sigslot::signal1<webrtc::DataChannelInterface*> SignalOnDataChannel;
+
+ private:
+ void SetLocalDescription(const std::string& type, const std::string& sdp);
+ void SetRemoteDescription(const std::string& type, const std::string& sdp);
+ bool CheckForConnection();
+ bool CheckForAudio();
+ bool CheckForVideo();
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> GetUserMedia(
+ bool audio, const webrtc::FakeConstraints& audio_constraints,
+ bool video, const webrtc::FakeConstraints& video_constraints);
+
+ std::string name_;
+ rtc::scoped_refptr<webrtc::PortAllocatorFactoryInterface>
+ allocator_factory_;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+ peer_connection_factory_;
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+ rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer> renderer_;
+};
+
+#endif // TALK_APP_WEBRTC_TEST_PEERCONNECTIONTESTWRAPPER_H_
diff --git a/talk/app/webrtc/test/testsdpstrings.h b/talk/app/webrtc/test/testsdpstrings.h
new file mode 100644
index 0000000000..e27c9a2f88
--- /dev/null
+++ b/talk/app/webrtc/test/testsdpstrings.h
@@ -0,0 +1,147 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contain SDP strings used for testing.
+
+#ifndef TALK_APP_WEBRTC_TEST_TESTSDPSTRINGS_H_
+#define TALK_APP_WEBRTC_TEST_TESTSDPSTRINGS_H_
+
+namespace webrtc {
+
+// SDP offer string from a Nightly FireFox build.
+static const char kFireFoxSdpOffer[] =
+ "v=0\r\n"
+ "o=Mozilla-SIPUA 23551 0 IN IP4 0.0.0.0\r\n"
+ "s=SIP Call\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 A7:24:72:CA:6E:02:55:39:BA:66:DF:6E:CC:4C:D8:B0:1A:"
+ "BF:1A:56:65:7D:F4:03:AD:7E:77:43:2A:29:EC:93\r\n"
+ "m=audio 36993 RTP/SAVPF 109 0 8 101\r\n"
+ "c=IN IP4 74.95.2.170\r\n"
+ "a=rtpmap:109 opus/48000/2\r\n"
+ "a=ptime:20\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:101 telephone-event/8000\r\n"
+ "a=fmtp:101 0-15\r\n"
+ "a=sendrecv\r\n"
+ "a=candidate:0 1 UDP 2112946431 172.16.191.1 61725 typ host\r\n"
+ "a=candidate:2 1 UDP 2112487679 172.16.131.1 58798 typ host\r\n"
+ "a=candidate:4 1 UDP 2113667327 10.0.254.2 58122 typ host\r\n"
+ "a=candidate:5 1 UDP 1694302207 74.95.2.170 36993 typ srflx raddr "
+ "10.0.254.2 rport 58122\r\n"
+ "a=candidate:0 2 UDP 2112946430 172.16.191.1 55025 typ host\r\n"
+ "a=candidate:2 2 UDP 2112487678 172.16.131.1 63576 typ host\r\n"
+ "a=candidate:4 2 UDP 2113667326 10.0.254.2 50962 typ host\r\n"
+ "a=candidate:5 2 UDP 1694302206 74.95.2.170 41028 typ srflx raddr"
+ " 10.0.254.2 rport 50962\r\n"
+ "m=video 38826 RTP/SAVPF 120\r\n"
+ "c=IN IP4 74.95.2.170\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=sendrecv\r\n"
+ "a=candidate:0 1 UDP 2112946431 172.16.191.1 62017 typ host\r\n"
+ "a=candidate:2 1 UDP 2112487679 172.16.131.1 59741 typ host\r\n"
+ "a=candidate:4 1 UDP 2113667327 10.0.254.2 62652 typ host\r\n"
+ "a=candidate:5 1 UDP 1694302207 74.95.2.170 38826 typ srflx raddr"
+ " 10.0.254.2 rport 62652\r\n"
+ "a=candidate:0 2 UDP 2112946430 172.16.191.1 63440 typ host\r\n"
+ "a=candidate:2 2 UDP 2112487678 172.16.131.1 51847 typ host\r\n"
+ "a=candidate:4 2 UDP 2113667326 10.0.254.2 58890 typ host\r\n"
+ "a=candidate:5 2 UDP 1694302206 74.95.2.170 33611 typ srflx raddr"
+ " 10.0.254.2 rport 58890\r\n"
+#ifdef HAVE_SCTP
+ "m=application 45536 SCTP/DTLS 5000\r\n"
+ "c=IN IP4 74.95.2.170\r\n"
+ "a=fmtp:5000 protocol=webrtc-datachannel;streams=16\r\n"
+ "a=sendrecv\r\n"
+ "a=candidate:0 1 UDP 2112946431 172.16.191.1 60248 typ host\r\n"
+ "a=candidate:2 1 UDP 2112487679 172.16.131.1 55925 typ host\r\n"
+ "a=candidate:4 1 UDP 2113667327 10.0.254.2 65268 typ host\r\n"
+ "a=candidate:5 1 UDP 1694302207 74.95.2.170 45536 typ srflx raddr"
+ " 10.0.254.2 rport 65268\r\n"
+ "a=candidate:0 2 UDP 2112946430 172.16.191.1 49162 typ host\r\n"
+ "a=candidate:2 2 UDP 2112487678 172.16.131.1 59635 typ host\r\n"
+ "a=candidate:4 2 UDP 2113667326 10.0.254.2 61232 typ host\r\n"
+ "a=candidate:5 2 UDP 1694302206 74.95.2.170 45468 typ srflx raddr"
+ " 10.0.254.2 rport 61232\r\n"
+#endif
+ ;
+
+// Audio SDP with a limited set of audio codecs.
+static const char kAudioSdp[] =
+ "v=0\r\n"
+ "o=- 7859371131 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:GeAAgb6XCPNLVMX5\r\n"
+ "a=ssrc:4227871655 msid:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4 a0\r\n"
+ "a=ssrc:4227871655 mslabel:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4\r\n"
+ "a=ssrc:4227871655 label:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4a0\r\n"
+ "a=mid:audio\r\n";
+
+static const char kAudioSdpWithUnsupportedCodecs[] =
+ "v=0\r\n"
+ "o=- 6858750541 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 18 110 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:18 WeirdCodec1/8000\r\n"
+ "a=rtpmap:110 WeirdCodec2/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendonly\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:TsmD02HRfhkJBm4m\r\n"
+ "a=ssrc:4227871655 msid:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3 a0\r\n"
+ "a=ssrc:4227871655 mslabel:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3\r\n"
+ "a=ssrc:4227871655 label:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3a0\r\n"
+ "a=mid:audio\r\n";
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_TEST_TESTSDPSTRINGS_H_
diff --git a/talk/app/webrtc/umametrics.h b/talk/app/webrtc/umametrics.h
new file mode 100644
index 0000000000..14fac962f4
--- /dev/null
+++ b/talk/app/webrtc/umametrics.h
@@ -0,0 +1,128 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains enums related to IPv4/IPv6 metrics.
+
+#ifndef TALK_APP_WEBRTC_UMAMETRICS_H_
+#define TALK_APP_WEBRTC_UMAMETRICS_H_
+
+namespace webrtc {
+
+// Used to specify which enum counter type we're incrementing in
+// MetricsObserverInterface::IncrementEnumCounter.
+enum PeerConnectionEnumCounterType {
+ kEnumCounterAddressFamily,
+ // For the next 2 counters, we track them separately based on the "first hop"
+ // protocol used by the local candidate. "First hop" means the local candidate
+ // type in the case of non-TURN candidates, and the protocol used to connect
+ // to the TURN server in the case of TURN candidates.
+ kEnumCounterIceCandidatePairTypeUdp,
+ kEnumCounterIceCandidatePairTypeTcp,
+
+ kEnumCounterAudioSrtpCipher,
+ kEnumCounterAudioSslCipher,
+ kEnumCounterVideoSrtpCipher,
+ kEnumCounterVideoSslCipher,
+ kEnumCounterDataSrtpCipher,
+ kEnumCounterDataSslCipher,
+ kPeerConnectionEnumCounterMax
+};
+
+// Currently this contains information related to WebRTC network/transport
+// information.
+
+// The difference between PeerConnectionEnumCounter and
+// PeerConnectionMetricsName is that the "EnumCounter" is only counting the
+// occurrences of events, while "Name" has a value associated with it which is
+// used to form a histogram.
+
+// This enum is backed by Chromium's histograms.xml,
+// chromium/src/tools/metrics/histograms/histograms.xml
+// Existing values cannot be re-ordered and new enums must be added
+// before kBoundary.
+enum PeerConnectionAddressFamilyCounter {
+ kPeerConnection_IPv4,
+ kPeerConnection_IPv6,
+ kBestConnections_IPv4,
+ kBestConnections_IPv6,
+ kPeerConnectionAddressFamilyCounter_Max,
+};
+
+// TODO(guoweis): Keep previous name here until all references are renamed.
+#define kBoundary kPeerConnectionAddressFamilyCounter_Max
+
+// TODO(guoweis): Keep previous name here until all references are renamed.
+typedef PeerConnectionAddressFamilyCounter PeerConnectionUMAMetricsCounter;
+
+// This enum defines types for UMA samples, which will have a range.
+enum PeerConnectionMetricsName {
+ kNetworkInterfaces_IPv4, // Number of IPv4 interfaces.
+ kNetworkInterfaces_IPv6, // Number of IPv6 interfaces.
+ kTimeToConnect, // In milliseconds.
+ kLocalCandidates_IPv4, // Number of IPv4 local candidates.
+ kLocalCandidates_IPv6, // Number of IPv6 local candidates.
+ kPeerConnectionMetricsName_Max
+};
+
+// TODO(guoweis): Keep previous name here until all references are renamed.
+typedef PeerConnectionMetricsName PeerConnectionUMAMetricsName;
+
+// The IceCandidatePairType has the format of
+// <local_candidate_type>_<remote_candidate_type>. It is recorded based on the
+// type of candidate pair used when the PeerConnection first goes to a completed
+// state. When BUNDLE is enabled, only the first transport gets recorded.
+enum IceCandidatePairType {
+ // HostHost is deprecated. It was replaced with the set of types at the bottom
+ // to report private or public host IP address.
+ kIceCandidatePairHostHost,
+ kIceCandidatePairHostSrflx,
+ kIceCandidatePairHostRelay,
+ kIceCandidatePairHostPrflx,
+ kIceCandidatePairSrflxHost,
+ kIceCandidatePairSrflxSrflx,
+ kIceCandidatePairSrflxRelay,
+ kIceCandidatePairSrflxPrflx,
+ kIceCandidatePairRelayHost,
+ kIceCandidatePairRelaySrflx,
+ kIceCandidatePairRelayRelay,
+ kIceCandidatePairRelayPrflx,
+ kIceCandidatePairPrflxHost,
+ kIceCandidatePairPrflxSrflx,
+ kIceCandidatePairPrflxRelay,
+
+ // The following 4 types tell whether local and remote hosts have private or
+ // public IP addresses.
+ kIceCandidatePairHostPrivateHostPrivate,
+ kIceCandidatePairHostPrivateHostPublic,
+ kIceCandidatePairHostPublicHostPrivate,
+ kIceCandidatePairHostPublicHostPublic,
+ kIceCandidatePairMax
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_UMAMETRICS_H_
diff --git a/talk/app/webrtc/videosource.cc b/talk/app/webrtc/videosource.cc
new file mode 100644
index 0000000000..b33f5f9e13
--- /dev/null
+++ b/talk/app/webrtc/videosource.cc
@@ -0,0 +1,469 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/videosource.h"
+
+#include <vector>
+#include <cstdlib>
+
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "talk/session/media/channelmanager.h"
+
+using cricket::CaptureState;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+
+namespace {
+
+const double kRoundingTruncation = 0.0005;
+
+enum {
+ MSG_VIDEOCAPTURESTATECONNECT,
+ MSG_VIDEOCAPTURESTATEDISCONNECT,
+ MSG_VIDEOCAPTURESTATECHANGE,
+};
+
+// Default resolution. If no constraint is specified, this is the resolution we
+// will use.
+static const cricket::VideoFormatPod kDefaultFormat =
+ {640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY};
+
+// List of formats used if the camera doesn't support capability enumeration.
+static const cricket::VideoFormatPod kVideoFormats[] = {
+ {1920, 1080, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+ {1280, 720, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+ {960, 720, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+ {640, 360, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+ {640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+ {320, 240, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+ {320, 180, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY}
+};
+
+MediaSourceInterface::SourceState
+GetReadyState(cricket::CaptureState state) {
+ switch (state) {
+ case cricket::CS_STARTING:
+ return MediaSourceInterface::kInitializing;
+ case cricket::CS_RUNNING:
+ return MediaSourceInterface::kLive;
+ case cricket::CS_FAILED:
+ case cricket::CS_NO_DEVICE:
+ case cricket::CS_STOPPED:
+ return MediaSourceInterface::kEnded;
+ case cricket::CS_PAUSED:
+ return MediaSourceInterface::kMuted;
+ default:
+ ASSERT(false && "GetReadyState unknown state");
+ }
+ return MediaSourceInterface::kEnded;
+}
+
+void SetUpperLimit(int new_limit, int* original_limit) {
+ if (*original_limit < 0 || new_limit < *original_limit)
+ *original_limit = new_limit;
+}
+
+// Updates |format_upper_limit| from |constraint|.
+// If constraint.maxFoo is smaller than format_upper_limit.foo,
+// set format_upper_limit.foo to constraint.maxFoo.
+void SetUpperLimitFromConstraint(
+ const MediaConstraintsInterface::Constraint& constraint,
+ cricket::VideoFormat* format_upper_limit) {
+ if (constraint.key == MediaConstraintsInterface::kMaxWidth) {
+ int value = rtc::FromString<int>(constraint.value);
+ SetUpperLimit(value, &(format_upper_limit->width));
+ } else if (constraint.key == MediaConstraintsInterface::kMaxHeight) {
+ int value = rtc::FromString<int>(constraint.value);
+ SetUpperLimit(value, &(format_upper_limit->height));
+ }
+}
+
+// Fills |format_out| with the max width and height allowed by |constraints|.
+void FromConstraintsForScreencast(
+ const MediaConstraintsInterface::Constraints& constraints,
+ cricket::VideoFormat* format_out) {
+ typedef MediaConstraintsInterface::Constraints::const_iterator
+ ConstraintsIterator;
+
+ cricket::VideoFormat upper_limit(-1, -1, 0, 0);
+ for (ConstraintsIterator constraints_it = constraints.begin();
+ constraints_it != constraints.end(); ++constraints_it)
+ SetUpperLimitFromConstraint(*constraints_it, &upper_limit);
+
+ if (upper_limit.width >= 0)
+ format_out->width = upper_limit.width;
+ if (upper_limit.height >= 0)
+ format_out->height = upper_limit.height;
+}
+
+// Returns true if |constraint| is fulfilled. |format_out| can differ from
+// |format_in| if the format is changed by the constraint. Ie - the frame rate
+// can be changed by setting maxFrameRate.
+bool NewFormatWithConstraints(
+ const MediaConstraintsInterface::Constraint& constraint,
+ const cricket::VideoFormat& format_in,
+ bool mandatory,
+ cricket::VideoFormat* format_out) {
+ ASSERT(format_out != NULL);
+ *format_out = format_in;
+
+ if (constraint.key == MediaConstraintsInterface::kMinWidth) {
+ int value = rtc::FromString<int>(constraint.value);
+ return (value <= format_in.width);
+ } else if (constraint.key == MediaConstraintsInterface::kMaxWidth) {
+ int value = rtc::FromString<int>(constraint.value);
+ return (value >= format_in.width);
+ } else if (constraint.key == MediaConstraintsInterface::kMinHeight) {
+ int value = rtc::FromString<int>(constraint.value);
+ return (value <= format_in.height);
+ } else if (constraint.key == MediaConstraintsInterface::kMaxHeight) {
+ int value = rtc::FromString<int>(constraint.value);
+ return (value >= format_in.height);
+ } else if (constraint.key == MediaConstraintsInterface::kMinFrameRate) {
+ int value = rtc::FromString<int>(constraint.value);
+ return (value <= cricket::VideoFormat::IntervalToFps(format_in.interval));
+ } else if (constraint.key == MediaConstraintsInterface::kMaxFrameRate) {
+ int value = rtc::FromString<int>(constraint.value);
+ if (value == 0) {
+ if (mandatory) {
+ // TODO(ronghuawu): Convert the constraint value to float when sub-1fps
+ // is supported by the capturer.
+ return false;
+ } else {
+ value = 1;
+ }
+ }
+ if (value <= cricket::VideoFormat::IntervalToFps(format_in.interval))
+ format_out->interval = cricket::VideoFormat::FpsToInterval(value);
+ return true;
+ } else if (constraint.key == MediaConstraintsInterface::kMinAspectRatio) {
+ double value = rtc::FromString<double>(constraint.value);
+ // The aspect ratio in |constraint.value| has been converted to a string and
+ // back to a double, so it may have a rounding error.
+ // E.g if the value 1/3 is converted to a string, the string will not have
+ // infinite length.
+ // We add a margin of 0.0005 which is high enough to detect the same aspect
+ // ratio but small enough to avoid matching wrong aspect ratios.
+ double ratio = static_cast<double>(format_in.width) / format_in.height;
+ return (value <= ratio + kRoundingTruncation);
+ } else if (constraint.key == MediaConstraintsInterface::kMaxAspectRatio) {
+ double value = rtc::FromString<double>(constraint.value);
+ double ratio = static_cast<double>(format_in.width) / format_in.height;
+ // Subtract 0.0005 to avoid rounding problems. Same as above.
+ const double kRoundingTruncation = 0.0005;
+ return (value >= ratio - kRoundingTruncation);
+ } else if (constraint.key == MediaConstraintsInterface::kNoiseReduction) {
+ // These are actually options, not constraints, so they can be satisfied
+ // regardless of the format.
+ return true;
+ }
+ LOG(LS_WARNING) << "Found unknown MediaStream constraint. Name:"
+ << constraint.key << " Value:" << constraint.value;
+ return false;
+}
+
+// Removes cricket::VideoFormats from |formats| that don't meet |constraint|.
+void FilterFormatsByConstraint(
+ const MediaConstraintsInterface::Constraint& constraint,
+ bool mandatory,
+ std::vector<cricket::VideoFormat>* formats) {
+ std::vector<cricket::VideoFormat>::iterator format_it =
+ formats->begin();
+ while (format_it != formats->end()) {
+ // Modify the format_it to fulfill the constraint if possible.
+ // Delete it otherwise.
+ if (!NewFormatWithConstraints(constraint, (*format_it),
+ mandatory, &(*format_it))) {
+ format_it = formats->erase(format_it);
+ } else {
+ ++format_it;
+ }
+ }
+}
+
+// Returns a vector of cricket::VideoFormat that best match |constraints|.
+std::vector<cricket::VideoFormat> FilterFormats(
+ const MediaConstraintsInterface::Constraints& mandatory,
+ const MediaConstraintsInterface::Constraints& optional,
+ const std::vector<cricket::VideoFormat>& supported_formats) {
+ typedef MediaConstraintsInterface::Constraints::const_iterator
+ ConstraintsIterator;
+ std::vector<cricket::VideoFormat> candidates = supported_formats;
+
+ for (ConstraintsIterator constraints_it = mandatory.begin();
+ constraints_it != mandatory.end(); ++constraints_it)
+ FilterFormatsByConstraint(*constraints_it, true, &candidates);
+
+ if (candidates.size() == 0)
+ return candidates;
+
+ // Ok - all mandatory checked and we still have a candidate.
+ // Let's try filtering using the optional constraints.
+ for (ConstraintsIterator constraints_it = optional.begin();
+ constraints_it != optional.end(); ++constraints_it) {
+ std::vector<cricket::VideoFormat> current_candidates = candidates;
+ FilterFormatsByConstraint(*constraints_it, false, &current_candidates);
+ if (current_candidates.size() > 0) {
+ candidates = current_candidates;
+ }
+ }
+
+ // We have done as good as we can to filter the supported resolutions.
+ return candidates;
+}
+
+// Find the format that best matches the default video size.
+// Constraints are optional and since the performance of a video call
+// might be bad due to bitrate limitations, CPU, and camera performance,
+// it is better to select a resolution that is as close as possible to our
+// default and still meets the contraints.
+const cricket::VideoFormat& GetBestCaptureFormat(
+ const std::vector<cricket::VideoFormat>& formats) {
+ ASSERT(formats.size() > 0);
+
+ int default_area = kDefaultFormat.width * kDefaultFormat.height;
+
+ std::vector<cricket::VideoFormat>::const_iterator it = formats.begin();
+ std::vector<cricket::VideoFormat>::const_iterator best_it = formats.begin();
+ int best_diff_area = std::abs(default_area - it->width * it->height);
+ int64_t best_diff_interval = kDefaultFormat.interval;
+ for (; it != formats.end(); ++it) {
+ int diff_area = std::abs(default_area - it->width * it->height);
+ int64_t diff_interval = std::abs(kDefaultFormat.interval - it->interval);
+ if (diff_area < best_diff_area ||
+ (diff_area == best_diff_area && diff_interval < best_diff_interval)) {
+ best_diff_area = diff_area;
+ best_diff_interval = diff_interval;
+ best_it = it;
+ }
+ }
+ return *best_it;
+}
+
+// Set |option| to the highest-priority value of |key| in the constraints.
+// Return false if the key is mandatory, and the value is invalid.
+bool ExtractOption(const MediaConstraintsInterface* all_constraints,
+ const std::string& key, cricket::Settable<bool>* option) {
+ size_t mandatory = 0;
+ bool value;
+ if (FindConstraint(all_constraints, key, &value, &mandatory)) {
+ option->Set(value);
+ return true;
+ }
+
+ return mandatory == 0;
+}
+
+// Search |all_constraints| for known video options. Apply all options that are
+// found with valid values, and return false if any mandatory video option was
+// found with an invalid value.
+bool ExtractVideoOptions(const MediaConstraintsInterface* all_constraints,
+ cricket::VideoOptions* options) {
+ bool all_valid = true;
+
+ all_valid &= ExtractOption(all_constraints,
+ MediaConstraintsInterface::kNoiseReduction,
+ &(options->video_noise_reduction));
+
+ return all_valid;
+}
+
+class FrameInputWrapper : public cricket::VideoRenderer {
+ public:
+ explicit FrameInputWrapper(cricket::VideoCapturer* capturer)
+ : capturer_(capturer) {
+ ASSERT(capturer_ != NULL);
+ }
+
+ virtual ~FrameInputWrapper() {}
+
+ // VideoRenderer implementation.
+ bool SetSize(int width, int height, int reserved) override { return true; }
+
+ bool RenderFrame(const cricket::VideoFrame* frame) override {
+ if (!capturer_->IsRunning()) {
+ return true;
+ }
+
+ // This signal will be made on media engine render thread. The clients
+ // of this signal should have no assumptions on what thread this signal
+ // come from.
+ capturer_->SignalVideoFrame(capturer_, frame);
+ return true;
+ }
+
+ private:
+ cricket::VideoCapturer* capturer_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(FrameInputWrapper);
+};
+
+} // anonymous namespace
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoSource> VideoSource::Create(
+ cricket::ChannelManager* channel_manager,
+ cricket::VideoCapturer* capturer,
+ const webrtc::MediaConstraintsInterface* constraints) {
+ ASSERT(channel_manager != NULL);
+ ASSERT(capturer != NULL);
+ rtc::scoped_refptr<VideoSource> source(
+ new rtc::RefCountedObject<VideoSource>(channel_manager,
+ capturer));
+ source->Initialize(constraints);
+ return source;
+}
+
+VideoSource::VideoSource(cricket::ChannelManager* channel_manager,
+ cricket::VideoCapturer* capturer)
+ : channel_manager_(channel_manager),
+ video_capturer_(capturer),
+ state_(kInitializing) {
+ channel_manager_->SignalVideoCaptureStateChange.connect(
+ this, &VideoSource::OnStateChange);
+}
+
+VideoSource::~VideoSource() {
+ channel_manager_->StopVideoCapture(video_capturer_.get(), format_);
+ channel_manager_->SignalVideoCaptureStateChange.disconnect(this);
+}
+
+void VideoSource::Initialize(
+ const webrtc::MediaConstraintsInterface* constraints) {
+
+ std::vector<cricket::VideoFormat> formats =
+ channel_manager_->GetSupportedFormats(video_capturer_.get());
+ if (formats.empty()) {
+ if (video_capturer_->IsScreencast()) {
+ // The screen capturer can accept any resolution and we will derive the
+ // format from the constraints if any.
+ // Note that this only affects tab capturing, not desktop capturing,
+ // since the desktop capturer does not respect the VideoFormat passed in.
+ formats.push_back(cricket::VideoFormat(kDefaultFormat));
+ } else {
+ // The VideoCapturer implementation doesn't support capability
+ // enumeration. We need to guess what the camera supports.
+ for (int i = 0; i < ARRAY_SIZE(kVideoFormats); ++i) {
+ formats.push_back(cricket::VideoFormat(kVideoFormats[i]));
+ }
+ }
+ }
+
+ if (constraints) {
+ MediaConstraintsInterface::Constraints mandatory_constraints =
+ constraints->GetMandatory();
+ MediaConstraintsInterface::Constraints optional_constraints;
+ optional_constraints = constraints->GetOptional();
+
+ if (video_capturer_->IsScreencast()) {
+ // Use the maxWidth and maxHeight allowed by constraints for screencast.
+ FromConstraintsForScreencast(mandatory_constraints, &(formats[0]));
+ }
+
+ formats = FilterFormats(mandatory_constraints, optional_constraints,
+ formats);
+ }
+
+ if (formats.size() == 0) {
+ LOG(LS_WARNING) << "Failed to find a suitable video format.";
+ SetState(kEnded);
+ return;
+ }
+
+ cricket::VideoOptions options;
+ if (!ExtractVideoOptions(constraints, &options)) {
+ LOG(LS_WARNING) << "Could not satisfy mandatory options.";
+ SetState(kEnded);
+ return;
+ }
+ options_.SetAll(options);
+
+ format_ = GetBestCaptureFormat(formats);
+ // Start the camera with our best guess.
+ // TODO(perkj): Should we try again with another format it it turns out that
+ // the camera doesn't produce frames with the correct format? Or will
+ // cricket::VideCapturer be able to re-scale / crop to the requested
+ // resolution?
+ if (!channel_manager_->StartVideoCapture(video_capturer_.get(), format_)) {
+ SetState(kEnded);
+ return;
+ }
+ // Initialize hasn't succeeded until a successful state change has occurred.
+}
+
+cricket::VideoRenderer* VideoSource::FrameInput() {
+ // Defer creation of frame_input_ until it's needed, e.g. the local video
+ // sources will never need it.
+ if (!frame_input_) {
+ frame_input_.reset(new FrameInputWrapper(video_capturer_.get()));
+ }
+ return frame_input_.get();
+}
+
+void VideoSource::Stop() {
+ channel_manager_->StopVideoCapture(video_capturer_.get(), format_);
+}
+
+void VideoSource::Restart() {
+ if (!channel_manager_->StartVideoCapture(video_capturer_.get(), format_)) {
+ SetState(kEnded);
+ return;
+ }
+ for(cricket::VideoRenderer* sink : sinks_) {
+ channel_manager_->AddVideoRenderer(video_capturer_.get(), sink);
+ }
+}
+
+void VideoSource::AddSink(cricket::VideoRenderer* output) {
+ sinks_.push_back(output);
+ channel_manager_->AddVideoRenderer(video_capturer_.get(), output);
+}
+
+void VideoSource::RemoveSink(cricket::VideoRenderer* output) {
+ sinks_.remove(output);
+ channel_manager_->RemoveVideoRenderer(video_capturer_.get(), output);
+}
+
+// OnStateChange listens to the ChannelManager::SignalVideoCaptureStateChange.
+// This signal is triggered for all video capturers. Not only the one we are
+// interested in.
+void VideoSource::OnStateChange(cricket::VideoCapturer* capturer,
+ cricket::CaptureState capture_state) {
+ if (capturer == video_capturer_.get()) {
+ SetState(GetReadyState(capture_state));
+ }
+}
+
+void VideoSource::SetState(SourceState new_state) {
+ if (VERIFY(state_ != new_state)) {
+ state_ = new_state;
+ FireOnChanged();
+ }
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/videosource.h b/talk/app/webrtc/videosource.h
new file mode 100644
index 0000000000..8253cbac18
--- /dev/null
+++ b/talk/app/webrtc/videosource.h
@@ -0,0 +1,111 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_VIDEOSOURCE_H_
+#define TALK_APP_WEBRTC_VIDEOSOURCE_H_
+
+#include <list>
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/notifier.h"
+#include "talk/app/webrtc/videosourceinterface.h"
+#include "talk/app/webrtc/videotrackrenderers.h"
+#include "talk/media/base/videocapturer.h"
+#include "talk/media/base/videocommon.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sigslot.h"
+
+// VideoSource implements VideoSourceInterface. It owns a
+// cricket::VideoCapturer and make sure the camera is started at a resolution
+// that honors the constraints.
+// The state is set depending on the result of starting the capturer.
+// If the constraint can't be met or the capturer fails to start, the state
+// transition to kEnded, otherwise it transitions to kLive.
+
+namespace cricket {
+
+class ChannelManager;
+
+} // namespace cricket
+
+namespace webrtc {
+
+class MediaConstraintsInterface;
+
+class VideoSource : public Notifier<VideoSourceInterface>,
+ public sigslot::has_slots<> {
+ public:
+ // Creates an instance of VideoSource.
+ // VideoSource take ownership of |capturer|.
+ // |constraints| can be NULL and in that case the camera is opened using a
+ // default resolution.
+ static rtc::scoped_refptr<VideoSource> Create(
+ cricket::ChannelManager* channel_manager,
+ cricket::VideoCapturer* capturer,
+ const webrtc::MediaConstraintsInterface* constraints);
+
+ virtual SourceState state() const { return state_; }
+ virtual const cricket::VideoOptions* options() const { return &options_; }
+ virtual cricket::VideoRenderer* FrameInput();
+
+ virtual cricket::VideoCapturer* GetVideoCapturer() {
+ return video_capturer_.get();
+ }
+
+ void Stop() override;
+ void Restart() override;
+
+ // |output| will be served video frames as long as the underlying capturer
+ // is running video frames.
+ virtual void AddSink(cricket::VideoRenderer* output);
+ virtual void RemoveSink(cricket::VideoRenderer* output);
+
+ protected:
+ VideoSource(cricket::ChannelManager* channel_manager,
+ cricket::VideoCapturer* capturer);
+ virtual ~VideoSource();
+ void Initialize(const webrtc::MediaConstraintsInterface* constraints);
+
+ private:
+ void OnStateChange(cricket::VideoCapturer* capturer,
+ cricket::CaptureState capture_state);
+ void SetState(SourceState new_state);
+
+ cricket::ChannelManager* channel_manager_;
+ rtc::scoped_ptr<cricket::VideoCapturer> video_capturer_;
+ rtc::scoped_ptr<cricket::VideoRenderer> frame_input_;
+
+ std::list<cricket::VideoRenderer*> sinks_;
+
+ cricket::VideoFormat format_;
+ cricket::VideoOptions options_;
+ SourceState state_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_VIDEOSOURCE_H_
diff --git a/talk/app/webrtc/videosource_unittest.cc b/talk/app/webrtc/videosource_unittest.cc
new file mode 100644
index 0000000000..2efcc1d84e
--- /dev/null
+++ b/talk/app/webrtc/videosource_unittest.cc
@@ -0,0 +1,562 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/remotevideocapturer.h"
+#include "talk/app/webrtc/test/fakeconstraints.h"
+#include "talk/app/webrtc/videosource.h"
+#include "talk/media/base/fakemediaengine.h"
+#include "talk/media/base/fakevideocapturer.h"
+#include "talk/media/base/fakevideorenderer.h"
+#include "talk/media/webrtc/webrtcvideoframe.h"
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/base/gunit.h"
+
+using webrtc::FakeConstraints;
+using webrtc::VideoSource;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+using webrtc::ObserverInterface;
+using webrtc::VideoSourceInterface;
+
+namespace {
+
+// Max wait time for a test.
+const int kMaxWaitMs = 100;
+
+} // anonymous namespace
+
+
+// TestVideoCapturer extends cricket::FakeVideoCapturer so it can be used for
+// testing without known camera formats.
+// It keeps its own lists of cricket::VideoFormats for the unit tests in this
+// file.
+class TestVideoCapturer : public cricket::FakeVideoCapturer {
+ public:
+ TestVideoCapturer() : test_without_formats_(false) {
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(352, 288,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ ResetSupportedFormats(formats);
+ }
+
+ // This function is used for resetting the supported capture formats and
+ // simulating a cricket::VideoCapturer implementation that don't support
+ // capture format enumeration. This is used to simulate the current
+ // Chrome implementation.
+ void TestWithoutCameraFormats() {
+ test_without_formats_ = true;
+ std::vector<cricket::VideoFormat> formats;
+ ResetSupportedFormats(formats);
+ }
+
+ virtual cricket::CaptureState Start(
+ const cricket::VideoFormat& capture_format) {
+ if (test_without_formats_) {
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(capture_format);
+ ResetSupportedFormats(formats);
+ }
+ return FakeVideoCapturer::Start(capture_format);
+ }
+
+ virtual bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
+ cricket::VideoFormat* best_format) {
+ if (test_without_formats_) {
+ *best_format = desired;
+ return true;
+ }
+ return FakeVideoCapturer::GetBestCaptureFormat(desired,
+ best_format);
+ }
+
+ private:
+ bool test_without_formats_;
+};
+
+class StateObserver : public ObserverInterface {
+ public:
+ explicit StateObserver(VideoSourceInterface* source)
+ : state_(source->state()),
+ source_(source) {
+ }
+ virtual void OnChanged() {
+ state_ = source_->state();
+ }
+ MediaSourceInterface::SourceState state() const { return state_; }
+
+ private:
+ MediaSourceInterface::SourceState state_;
+ rtc::scoped_refptr<VideoSourceInterface> source_;
+};
+
+class VideoSourceTest : public testing::Test {
+ protected:
+ VideoSourceTest()
+ : capturer_cleanup_(new TestVideoCapturer()),
+ capturer_(capturer_cleanup_.get()),
+ channel_manager_(new cricket::ChannelManager(
+ new cricket::FakeMediaEngine(), rtc::Thread::Current())) {
+ }
+
+ void SetUp() {
+ ASSERT_TRUE(channel_manager_->Init());
+ }
+
+ void CreateVideoSource() {
+ CreateVideoSource(NULL);
+ }
+
+ void CreateVideoSource(
+ const webrtc::MediaConstraintsInterface* constraints) {
+ // VideoSource take ownership of |capturer_|
+ source_ = VideoSource::Create(channel_manager_.get(),
+ capturer_cleanup_.release(),
+ constraints);
+
+ ASSERT_TRUE(source_.get() != NULL);
+ EXPECT_EQ(capturer_, source_->GetVideoCapturer());
+
+ state_observer_.reset(new StateObserver(source_));
+ source_->RegisterObserver(state_observer_.get());
+ source_->AddSink(&renderer_);
+ }
+
+ rtc::scoped_ptr<TestVideoCapturer> capturer_cleanup_;
+ TestVideoCapturer* capturer_;
+ cricket::FakeVideoRenderer renderer_;
+ rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+ rtc::scoped_ptr<StateObserver> state_observer_;
+ rtc::scoped_refptr<VideoSource> source_;
+};
+
+
+// Test that a VideoSource transition to kLive state when the capture
+// device have started and kEnded if it is stopped.
+// It also test that an output can receive video frames.
+TEST_F(VideoSourceTest, CapturerStartStop) {
+ // Initialize without constraints.
+ CreateVideoSource();
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+
+ ASSERT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(1, renderer_.num_rendered_frames());
+
+ capturer_->Stop();
+ EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+ kMaxWaitMs);
+}
+
+// Test that a VideoSource can be stopped and restarted.
+TEST_F(VideoSourceTest, StopRestart) {
+ // Initialize without constraints.
+ CreateVideoSource();
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+
+ ASSERT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(1, renderer_.num_rendered_frames());
+
+ source_->Stop();
+ EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+ kMaxWaitMs);
+
+ source_->Restart();
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+
+ ASSERT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(2, renderer_.num_rendered_frames());
+
+ source_->Stop();
+}
+
+// Test start stop with a remote VideoSource - the video source that has a
+// RemoteVideoCapturer and takes video frames from FrameInput.
+TEST_F(VideoSourceTest, StartStopRemote) {
+ source_ = VideoSource::Create(channel_manager_.get(),
+ new webrtc::RemoteVideoCapturer(),
+ NULL);
+
+ ASSERT_TRUE(source_.get() != NULL);
+ EXPECT_TRUE(NULL != source_->GetVideoCapturer());
+
+ state_observer_.reset(new StateObserver(source_));
+ source_->RegisterObserver(state_observer_.get());
+ source_->AddSink(&renderer_);
+
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+
+ cricket::VideoRenderer* frameinput = source_->FrameInput();
+ cricket::WebRtcVideoFrame test_frame;
+ frameinput->SetSize(1280, 720, 0);
+ frameinput->RenderFrame(&test_frame);
+ EXPECT_EQ(1, renderer_.num_rendered_frames());
+
+ source_->GetVideoCapturer()->Stop();
+ EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+ kMaxWaitMs);
+}
+
+// Test that a VideoSource transition to kEnded if the capture device
+// fails.
+TEST_F(VideoSourceTest, CameraFailed) {
+ CreateVideoSource();
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+
+ capturer_->SignalStateChange(capturer_, cricket::CS_FAILED);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+ kMaxWaitMs);
+}
+
+// Test that the capture output is CIF if we set max constraints to CIF.
+// and the capture device support CIF.
+TEST_F(VideoSourceTest, MandatoryConstraintCif5Fps) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352);
+ constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288);
+ constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 5);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ ASSERT_TRUE(format != NULL);
+ EXPECT_EQ(352, format->width);
+ EXPECT_EQ(288, format->height);
+ EXPECT_EQ(30, format->framerate());
+}
+
+// Test that the capture output is 720P if the camera support it and the
+// optional constraint is set to 720P.
+TEST_F(VideoSourceTest, MandatoryMinVgaOptional720P) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
+ constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480);
+ constraints.AddOptional(MediaConstraintsInterface::kMinWidth, 1280);
+ constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio,
+ 1280.0 / 720);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ ASSERT_TRUE(format != NULL);
+ EXPECT_EQ(1280, format->width);
+ EXPECT_EQ(720, format->height);
+ EXPECT_EQ(30, format->framerate());
+}
+
+// Test that the capture output have aspect ratio 4:3 if a mandatory constraint
+// require it even if an optional constraint request a higher resolution
+// that don't have this aspect ratio.
+TEST_F(VideoSourceTest, MandatoryAspectRatio4To3) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
+ constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480);
+ constraints.AddMandatory(MediaConstraintsInterface::kMaxAspectRatio,
+ 640.0 / 480);
+ constraints.AddOptional(MediaConstraintsInterface::kMinWidth, 1280);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ ASSERT_TRUE(format != NULL);
+ EXPECT_EQ(640, format->width);
+ EXPECT_EQ(480, format->height);
+ EXPECT_EQ(30, format->framerate());
+}
+
+
+// Test that the source state transition to kEnded if the mandatory aspect ratio
+// is set higher than supported.
+TEST_F(VideoSourceTest, MandatoryAspectRatioTooHigh) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio, 2);
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+ kMaxWaitMs);
+}
+
+// Test that the source ignores an optional aspect ratio that is higher than
+// supported.
+TEST_F(VideoSourceTest, OptionalAspectRatioTooHigh) {
+ FakeConstraints constraints;
+ constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio, 2);
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ ASSERT_TRUE(format != NULL);
+ double aspect_ratio = static_cast<double>(format->width) / format->height;
+ EXPECT_LT(aspect_ratio, 2);
+}
+
+// Test that the source starts video with the default resolution if the
+// camera doesn't support capability enumeration and there are no constraints.
+TEST_F(VideoSourceTest, NoCameraCapability) {
+ capturer_->TestWithoutCameraFormats();
+
+ CreateVideoSource();
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ ASSERT_TRUE(format != NULL);
+ EXPECT_EQ(640, format->width);
+ EXPECT_EQ(480, format->height);
+ EXPECT_EQ(30, format->framerate());
+}
+
+// Test that the source can start the video and get the requested aspect ratio
+// if the camera doesn't support capability enumeration and the aspect ratio is
+// set.
+TEST_F(VideoSourceTest, NoCameraCapability16To9Ratio) {
+ capturer_->TestWithoutCameraFormats();
+
+ FakeConstraints constraints;
+ double requested_aspect_ratio = 640.0 / 360;
+ constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
+ constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio,
+ requested_aspect_ratio);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ double aspect_ratio = static_cast<double>(format->width) / format->height;
+ EXPECT_LE(requested_aspect_ratio, aspect_ratio);
+}
+
+// Test that the source state transitions to kEnded if an unknown mandatory
+// constraint is found.
+TEST_F(VideoSourceTest, InvalidMandatoryConstraint) {
+ FakeConstraints constraints;
+ constraints.AddMandatory("weird key", 640);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+ kMaxWaitMs);
+}
+
+// Test that the source ignores an unknown optional constraint.
+TEST_F(VideoSourceTest, InvalidOptionalConstraint) {
+ FakeConstraints constraints;
+ constraints.AddOptional("weird key", 640);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+}
+
+TEST_F(VideoSourceTest, SetValidOptionValues) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(MediaConstraintsInterface::kNoiseReduction, "false");
+
+ CreateVideoSource(&constraints);
+
+ bool value = true;
+ EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
+ EXPECT_FALSE(value);
+}
+
+TEST_F(VideoSourceTest, OptionNotSet) {
+ FakeConstraints constraints;
+ CreateVideoSource(&constraints);
+ bool value;
+ EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
+}
+
+TEST_F(VideoSourceTest, MandatoryOptionOverridesOptional) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kNoiseReduction, true);
+ constraints.AddOptional(
+ MediaConstraintsInterface::kNoiseReduction, false);
+
+ CreateVideoSource(&constraints);
+
+ bool value = false;
+ EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
+ EXPECT_TRUE(value);
+}
+
+TEST_F(VideoSourceTest, InvalidOptionKeyOptional) {
+ FakeConstraints constraints;
+ constraints.AddOptional(
+ MediaConstraintsInterface::kNoiseReduction, false);
+ constraints.AddOptional("invalidKey", false);
+
+ CreateVideoSource(&constraints);
+
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ bool value = true;
+ EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
+ EXPECT_FALSE(value);
+}
+
+TEST_F(VideoSourceTest, InvalidOptionKeyMandatory) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kNoiseReduction, false);
+ constraints.AddMandatory("invalidKey", false);
+
+ CreateVideoSource(&constraints);
+
+ EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+ kMaxWaitMs);
+ bool value;
+ EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
+}
+
+TEST_F(VideoSourceTest, InvalidOptionValueOptional) {
+ FakeConstraints constraints;
+ constraints.AddOptional(
+ MediaConstraintsInterface::kNoiseReduction, "not a boolean");
+
+ CreateVideoSource(&constraints);
+
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ bool value = false;
+ EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
+}
+
+TEST_F(VideoSourceTest, InvalidOptionValueMandatory) {
+ FakeConstraints constraints;
+ // Optional constraints should be ignored if the mandatory constraints fail.
+ constraints.AddOptional(
+ MediaConstraintsInterface::kNoiseReduction, "false");
+ // Values are case-sensitive and must be all lower-case.
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kNoiseReduction, "True");
+
+ CreateVideoSource(&constraints);
+
+ EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+ kMaxWaitMs);
+ bool value;
+ EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
+}
+
+TEST_F(VideoSourceTest, MixedOptionsAndConstraints) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352);
+ constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288);
+ constraints.AddOptional(MediaConstraintsInterface::kMaxFrameRate, 5);
+
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kNoiseReduction, false);
+ constraints.AddOptional(
+ MediaConstraintsInterface::kNoiseReduction, true);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ ASSERT_TRUE(format != NULL);
+ EXPECT_EQ(352, format->width);
+ EXPECT_EQ(288, format->height);
+ EXPECT_EQ(30, format->framerate());
+
+ bool value = true;
+ EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
+ EXPECT_FALSE(value);
+}
+
+// Tests that the source starts video with the default resolution for
+// screencast if no constraint is set.
+TEST_F(VideoSourceTest, ScreencastResolutionNoConstraint) {
+ capturer_->TestWithoutCameraFormats();
+ capturer_->SetScreencast(true);
+
+ CreateVideoSource();
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ ASSERT_TRUE(format != NULL);
+ EXPECT_EQ(640, format->width);
+ EXPECT_EQ(480, format->height);
+ EXPECT_EQ(30, format->framerate());
+}
+
+// Tests that the source starts video with the max width and height set by
+// constraints for screencast.
+TEST_F(VideoSourceTest, ScreencastResolutionWithConstraint) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 480);
+ constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 270);
+
+ capturer_->TestWithoutCameraFormats();
+ capturer_->SetScreencast(true);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ ASSERT_TRUE(format != NULL);
+ EXPECT_EQ(480, format->width);
+ EXPECT_EQ(270, format->height);
+ EXPECT_EQ(30, format->framerate());
+}
+
+TEST_F(VideoSourceTest, MandatorySubOneFpsConstraints) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 0.5);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+ kMaxWaitMs);
+ ASSERT_TRUE(capturer_->GetCaptureFormat() == NULL);
+}
+
+TEST_F(VideoSourceTest, OptionalSubOneFpsConstraints) {
+ FakeConstraints constraints;
+ constraints.AddOptional(MediaConstraintsInterface::kMaxFrameRate, 0.5);
+
+ CreateVideoSource(&constraints);
+ EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+ kMaxWaitMs);
+ const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+ ASSERT_TRUE(format != NULL);
+ EXPECT_EQ(30, format->framerate());
+}
+
diff --git a/talk/app/webrtc/videosourceinterface.h b/talk/app/webrtc/videosourceinterface.h
new file mode 100644
index 0000000000..a90e3d5a48
--- /dev/null
+++ b/talk/app/webrtc/videosourceinterface.h
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_VIDEOSOURCEINTERFACE_H_
+#define TALK_APP_WEBRTC_VIDEOSOURCEINTERFACE_H_
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/media/base/mediachannel.h"
+
+namespace webrtc {
+
+// VideoSourceInterface is a reference counted source used for VideoTracks.
+// The same source can be used in multiple VideoTracks.
+// The methods are only supposed to be called by the PeerConnection
+// implementation.
+class VideoSourceInterface : public MediaSourceInterface {
+ public:
+ // Get access to the source implementation of cricket::VideoCapturer.
+ // This can be used for receiving frames and state notifications.
+ // But it should not be used for starting or stopping capturing.
+ virtual cricket::VideoCapturer* GetVideoCapturer() = 0;
+
+ // Stop the video capturer.
+ virtual void Stop() = 0;
+ virtual void Restart() = 0;
+
+ // Adds |output| to the source to receive frames.
+ virtual void AddSink(cricket::VideoRenderer* output) = 0;
+ virtual void RemoveSink(cricket::VideoRenderer* output) = 0;
+ virtual const cricket::VideoOptions* options() const = 0;
+ virtual cricket::VideoRenderer* FrameInput() = 0;
+
+ protected:
+ virtual ~VideoSourceInterface() {}
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_VIDEOSOURCEINTERFACE_H_
diff --git a/talk/app/webrtc/videosourceproxy.h b/talk/app/webrtc/videosourceproxy.h
new file mode 100644
index 0000000000..677fa9cf0f
--- /dev/null
+++ b/talk/app/webrtc/videosourceproxy.h
@@ -0,0 +1,54 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_VIDEOSOURCEPROXY_H_
+#define TALK_APP_WEBRTC_VIDEOSOURCEPROXY_H_
+
+#include "talk/app/webrtc/proxy.h"
+#include "talk/app/webrtc/videosourceinterface.h"
+
+namespace webrtc {
+
+// VideoSourceProxy makes sure the real VideoSourceInterface implementation is
+// destroyed on the signaling thread and marshals all method calls to the
+// signaling thread.
+BEGIN_PROXY_MAP(VideoSource)
+ PROXY_CONSTMETHOD0(SourceState, state)
+ PROXY_METHOD0(cricket::VideoCapturer*, GetVideoCapturer)
+ PROXY_METHOD0(void, Stop)
+ PROXY_METHOD0(void, Restart)
+ PROXY_METHOD1(void, AddSink, cricket::VideoRenderer*)
+ PROXY_METHOD1(void, RemoveSink, cricket::VideoRenderer*)
+ PROXY_CONSTMETHOD0(const cricket::VideoOptions*, options)
+ PROXY_METHOD0(cricket::VideoRenderer*, FrameInput)
+ PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+ PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY()
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_VIDEOSOURCEPROXY_H_
diff --git a/talk/app/webrtc/videotrack.cc b/talk/app/webrtc/videotrack.cc
new file mode 100644
index 0000000000..7c78aea91f
--- /dev/null
+++ b/talk/app/webrtc/videotrack.cc
@@ -0,0 +1,73 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/videotrack.h"
+
+#include <string>
+
+namespace webrtc {
+
+static const char kVideoTrackKind[] = "video";
+
+VideoTrack::VideoTrack(const std::string& label,
+ VideoSourceInterface* video_source)
+ : MediaStreamTrack<VideoTrackInterface>(label),
+ video_source_(video_source) {
+ if (video_source_)
+ video_source_->AddSink(&renderers_);
+}
+
+VideoTrack::~VideoTrack() {
+ if (video_source_)
+ video_source_->RemoveSink(&renderers_);
+}
+
+std::string VideoTrack::kind() const {
+ return kVideoTrackKind;
+}
+
+void VideoTrack::AddRenderer(VideoRendererInterface* renderer) {
+ renderers_.AddRenderer(renderer);
+}
+
+void VideoTrack::RemoveRenderer(VideoRendererInterface* renderer) {
+ renderers_.RemoveRenderer(renderer);
+}
+
+bool VideoTrack::set_enabled(bool enable) {
+ renderers_.SetEnabled(enable);
+ return MediaStreamTrack<VideoTrackInterface>::set_enabled(enable);
+}
+
+rtc::scoped_refptr<VideoTrack> VideoTrack::Create(
+ const std::string& id, VideoSourceInterface* source) {
+ rtc::RefCountedObject<VideoTrack>* track =
+ new rtc::RefCountedObject<VideoTrack>(id, source);
+ return track;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/videotrack.h b/talk/app/webrtc/videotrack.h
new file mode 100644
index 0000000000..67a2163615
--- /dev/null
+++ b/talk/app/webrtc/videotrack.h
@@ -0,0 +1,64 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_VIDEOTRACK_H_
+#define TALK_APP_WEBRTC_VIDEOTRACK_H_
+
+#include <string>
+
+#include "talk/app/webrtc/mediastreamtrack.h"
+#include "talk/app/webrtc/videosourceinterface.h"
+#include "talk/app/webrtc/videotrackrenderers.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace webrtc {
+
+class VideoTrack : public MediaStreamTrack<VideoTrackInterface> {
+ public:
+ static rtc::scoped_refptr<VideoTrack> Create(
+ const std::string& label, VideoSourceInterface* source);
+
+ virtual void AddRenderer(VideoRendererInterface* renderer);
+ virtual void RemoveRenderer(VideoRendererInterface* renderer);
+ virtual VideoSourceInterface* GetSource() const {
+ return video_source_.get();
+ }
+ virtual bool set_enabled(bool enable);
+ virtual std::string kind() const;
+
+ protected:
+ VideoTrack(const std::string& id, VideoSourceInterface* video_source);
+ ~VideoTrack();
+
+ private:
+ VideoTrackRenderers renderers_;
+ rtc::scoped_refptr<VideoSourceInterface> video_source_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_VIDEOTRACK_H_
diff --git a/talk/app/webrtc/videotrack_unittest.cc b/talk/app/webrtc/videotrack_unittest.cc
new file mode 100644
index 0000000000..609ee80ffc
--- /dev/null
+++ b/talk/app/webrtc/videotrack_unittest.cc
@@ -0,0 +1,111 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/remotevideocapturer.h"
+#include "talk/app/webrtc/test/fakevideotrackrenderer.h"
+#include "talk/app/webrtc/videosource.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "talk/media/base/fakemediaengine.h"
+#include "talk/media/webrtc/webrtcvideoframe.h"
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
+
+using webrtc::FakeVideoTrackRenderer;
+using webrtc::VideoSource;
+using webrtc::VideoTrack;
+using webrtc::VideoTrackInterface;
+
+namespace {
+
+class WebRtcVideoTestFrame : public cricket::WebRtcVideoFrame {
+ public:
+ using cricket::WebRtcVideoFrame::SetRotation;
+};
+
+} // namespace
+
+class VideoTrackTest : public testing::Test {
+ public:
+ VideoTrackTest() {
+ static const char kVideoTrackId[] = "track_id";
+
+ channel_manager_.reset(new cricket::ChannelManager(
+ new cricket::FakeMediaEngine(), rtc::Thread::Current()));
+ EXPECT_TRUE(channel_manager_->Init());
+ video_track_ = VideoTrack::Create(
+ kVideoTrackId,
+ VideoSource::Create(channel_manager_.get(),
+ new webrtc::RemoteVideoCapturer(), NULL));
+ }
+
+ protected:
+ rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+ rtc::scoped_refptr<VideoTrackInterface> video_track_;
+};
+
+// Test adding renderers to a video track and render to them by providing
+// frames to the source.
+TEST_F(VideoTrackTest, RenderVideo) {
+ // FakeVideoTrackRenderer register itself to |video_track_|
+ rtc::scoped_ptr<FakeVideoTrackRenderer> renderer_1(
+ new FakeVideoTrackRenderer(video_track_.get()));
+
+ cricket::VideoRenderer* renderer_input =
+ video_track_->GetSource()->FrameInput();
+ ASSERT_FALSE(renderer_input == NULL);
+
+ cricket::WebRtcVideoFrame frame;
+ frame.InitToBlack(123, 123, 1, 1, 0);
+ renderer_input->RenderFrame(&frame);
+ EXPECT_EQ(1, renderer_1->num_rendered_frames());
+
+ EXPECT_EQ(123, renderer_1->width());
+ EXPECT_EQ(123, renderer_1->height());
+
+ // FakeVideoTrackRenderer register itself to |video_track_|
+ rtc::scoped_ptr<FakeVideoTrackRenderer> renderer_2(
+ new FakeVideoTrackRenderer(video_track_.get()));
+
+ renderer_input->RenderFrame(&frame);
+
+ EXPECT_EQ(123, renderer_1->width());
+ EXPECT_EQ(123, renderer_1->height());
+ EXPECT_EQ(123, renderer_2->width());
+ EXPECT_EQ(123, renderer_2->height());
+
+ EXPECT_EQ(2, renderer_1->num_rendered_frames());
+ EXPECT_EQ(1, renderer_2->num_rendered_frames());
+
+ video_track_->RemoveRenderer(renderer_1.get());
+ renderer_input->RenderFrame(&frame);
+
+ EXPECT_EQ(2, renderer_1->num_rendered_frames());
+ EXPECT_EQ(2, renderer_2->num_rendered_frames());
+}
diff --git a/talk/app/webrtc/videotrackrenderers.cc b/talk/app/webrtc/videotrackrenderers.cc
new file mode 100644
index 0000000000..3c47c6edab
--- /dev/null
+++ b/talk/app/webrtc/videotrackrenderers.cc
@@ -0,0 +1,72 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/videotrackrenderers.h"
+#include "talk/media/base/videoframe.h"
+
+namespace webrtc {
+
+VideoTrackRenderers::VideoTrackRenderers() : enabled_(true) {
+}
+
+VideoTrackRenderers::~VideoTrackRenderers() {
+}
+
+void VideoTrackRenderers::AddRenderer(VideoRendererInterface* renderer) {
+ if (!renderer) {
+ return;
+ }
+ rtc::CritScope cs(&critical_section_);
+ renderers_.insert(renderer);
+}
+
+void VideoTrackRenderers::RemoveRenderer(VideoRendererInterface* renderer) {
+ rtc::CritScope cs(&critical_section_);
+ renderers_.erase(renderer);
+}
+
+void VideoTrackRenderers::SetEnabled(bool enable) {
+ rtc::CritScope cs(&critical_section_);
+ enabled_ = enable;
+}
+
+bool VideoTrackRenderers::SetSize(int width, int height, int reserved) {
+ return true;
+}
+
+bool VideoTrackRenderers::RenderFrame(const cricket::VideoFrame* frame) {
+ rtc::CritScope cs(&critical_section_);
+ if (!enabled_) {
+ return true;
+ }
+ for (VideoRendererInterface* renderer : renderers_) {
+ renderer->RenderFrame(frame);
+ }
+ return true;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/videotrackrenderers.h b/talk/app/webrtc/videotrackrenderers.h
new file mode 100644
index 0000000000..15274a1530
--- /dev/null
+++ b/talk/app/webrtc/videotrackrenderers.h
@@ -0,0 +1,67 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_VIDEOTRACKRENDERERS_H_
+#define TALK_APP_WEBRTC_VIDEOTRACKRENDERERS_H_
+
+#include <set>
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/media/base/videorenderer.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace webrtc {
+
+// Class used for rendering cricket::VideoFrames to multiple renderers of type
+// VideoRendererInterface.
+// Each VideoTrack owns a VideoTrackRenderers instance.
+// The class is thread safe. Rendering to the added VideoRendererInterfaces is
+// done on the same thread as the cricket::VideoRenderer.
+class VideoTrackRenderers : public cricket::VideoRenderer {
+ public:
+ VideoTrackRenderers();
+ ~VideoTrackRenderers();
+
+ // Implements cricket::VideoRenderer
+ virtual bool SetSize(int width, int height, int reserved);
+ virtual bool RenderFrame(const cricket::VideoFrame* frame);
+
+ void AddRenderer(VideoRendererInterface* renderer);
+ void RemoveRenderer(VideoRendererInterface* renderer);
+ void SetEnabled(bool enable);
+
+ private:
+ bool enabled_;
+ std::set<VideoRendererInterface*> renderers_;
+
+ rtc::CriticalSection critical_section_; // Protects the above variables
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_VIDEOTRACKRENDERERS_H_
diff --git a/talk/app/webrtc/webrtcsdp.cc b/talk/app/webrtc/webrtcsdp.cc
new file mode 100644
index 0000000000..3fa9a7d469
--- /dev/null
+++ b/talk/app/webrtc/webrtcsdp.cc
@@ -0,0 +1,3065 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/webrtcsdp.h"
+
+#include <limits.h>
+#include <stdio.h>
+#include <algorithm>
+#include <string>
+#include <vector>
+#include <ctype.h>
+
+#include "talk/app/webrtc/jsepicecandidate.h"
+#include "talk/app/webrtc/jsepsessiondescription.h"
+#include "talk/media/base/codec.h"
+#include "talk/media/base/constants.h"
+#include "talk/media/base/cryptoparams.h"
+#include "talk/media/base/rtputils.h"
+#include "talk/media/sctp/sctpdataengine.h"
+#include "webrtc/p2p/base/candidate.h"
+#include "webrtc/p2p/base/constants.h"
+#include "webrtc/p2p/base/port.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/messagedigest.h"
+#include "webrtc/base/stringutils.h"
+
+using cricket::AudioContentDescription;
+using cricket::Candidate;
+using cricket::Candidates;
+using cricket::ContentDescription;
+using cricket::ContentInfo;
+using cricket::CryptoParams;
+using cricket::DataContentDescription;
+using cricket::ICE_CANDIDATE_COMPONENT_RTP;
+using cricket::ICE_CANDIDATE_COMPONENT_RTCP;
+using cricket::kCodecParamMaxBitrate;
+using cricket::kCodecParamMaxPTime;
+using cricket::kCodecParamMaxQuantization;
+using cricket::kCodecParamMinBitrate;
+using cricket::kCodecParamMinPTime;
+using cricket::kCodecParamPTime;
+using cricket::kCodecParamSPropStereo;
+using cricket::kCodecParamStartBitrate;
+using cricket::kCodecParamStereo;
+using cricket::kCodecParamUseInbandFec;
+using cricket::kCodecParamUseDtx;
+using cricket::kCodecParamSctpProtocol;
+using cricket::kCodecParamSctpStreams;
+using cricket::kCodecParamMaxAverageBitrate;
+using cricket::kCodecParamMaxPlaybackRate;
+using cricket::kCodecParamAssociatedPayloadType;
+using cricket::MediaContentDescription;
+using cricket::MediaType;
+using cricket::RtpHeaderExtension;
+using cricket::SsrcGroup;
+using cricket::StreamParams;
+using cricket::StreamParamsVec;
+using cricket::TransportDescription;
+using cricket::TransportInfo;
+using cricket::VideoContentDescription;
+using rtc::SocketAddress;
+
+typedef std::vector<RtpHeaderExtension> RtpHeaderExtensions;
+
+namespace cricket {
+class SessionDescription;
+}
+
+namespace webrtc {
+
+// Line type
+// RFC 4566
+// An SDP session description consists of a number of lines of text of
+// the form:
+// <type>=<value>
+// where <type> MUST be exactly one case-significant character.
+static const int kLinePrefixLength = 2; // Lenght of <type>=
+static const char kLineTypeVersion = 'v';
+static const char kLineTypeOrigin = 'o';
+static const char kLineTypeSessionName = 's';
+static const char kLineTypeSessionInfo = 'i';
+static const char kLineTypeSessionUri = 'u';
+static const char kLineTypeSessionEmail = 'e';
+static const char kLineTypeSessionPhone = 'p';
+static const char kLineTypeSessionBandwidth = 'b';
+static const char kLineTypeTiming = 't';
+static const char kLineTypeRepeatTimes = 'r';
+static const char kLineTypeTimeZone = 'z';
+static const char kLineTypeEncryptionKey = 'k';
+static const char kLineTypeMedia = 'm';
+static const char kLineTypeConnection = 'c';
+static const char kLineTypeAttributes = 'a';
+
+// Attributes
+static const char kAttributeGroup[] = "group";
+static const char kAttributeMid[] = "mid";
+static const char kAttributeRtcpMux[] = "rtcp-mux";
+static const char kAttributeSsrc[] = "ssrc";
+static const char kSsrcAttributeCname[] = "cname";
+static const char kAttributeExtmap[] = "extmap";
+// draft-alvestrand-mmusic-msid-01
+// a=msid-semantic: WMS
+static const char kAttributeMsidSemantics[] = "msid-semantic";
+static const char kMediaStreamSemantic[] = "WMS";
+static const char kSsrcAttributeMsid[] = "msid";
+static const char kDefaultMsid[] = "default";
+static const char kSsrcAttributeMslabel[] = "mslabel";
+static const char kSSrcAttributeLabel[] = "label";
+static const char kAttributeSsrcGroup[] = "ssrc-group";
+static const char kAttributeCrypto[] = "crypto";
+static const char kAttributeCandidate[] = "candidate";
+static const char kAttributeCandidateTyp[] = "typ";
+static const char kAttributeCandidateRaddr[] = "raddr";
+static const char kAttributeCandidateRport[] = "rport";
+static const char kAttributeCandidateUsername[] = "username";
+static const char kAttributeCandidatePassword[] = "password";
+static const char kAttributeCandidateGeneration[] = "generation";
+static const char kAttributeFingerprint[] = "fingerprint";
+static const char kAttributeSetup[] = "setup";
+static const char kAttributeFmtp[] = "fmtp";
+static const char kAttributeRtpmap[] = "rtpmap";
+static const char kAttributeSctpmap[] = "sctpmap";
+static const char kAttributeRtcp[] = "rtcp";
+static const char kAttributeIceUfrag[] = "ice-ufrag";
+static const char kAttributeIcePwd[] = "ice-pwd";
+static const char kAttributeIceLite[] = "ice-lite";
+static const char kAttributeIceOption[] = "ice-options";
+static const char kAttributeSendOnly[] = "sendonly";
+static const char kAttributeRecvOnly[] = "recvonly";
+static const char kAttributeRtcpFb[] = "rtcp-fb";
+static const char kAttributeSendRecv[] = "sendrecv";
+static const char kAttributeInactive[] = "inactive";
+// draft-ietf-mmusic-sctp-sdp-07
+// a=sctp-port
+static const char kAttributeSctpPort[] = "sctp-port";
+
+// Experimental flags
+static const char kAttributeXGoogleFlag[] = "x-google-flag";
+static const char kValueConference[] = "conference";
+
+// Candidate
+static const char kCandidateHost[] = "host";
+static const char kCandidateSrflx[] = "srflx";
+// TODO: How to map the prflx with circket candidate type
+// static const char kCandidatePrflx[] = "prflx";
+static const char kCandidateRelay[] = "relay";
+static const char kTcpCandidateType[] = "tcptype";
+
+static const char kSdpDelimiterEqual = '=';
+static const char kSdpDelimiterSpace = ' ';
+static const char kSdpDelimiterColon = ':';
+static const char kSdpDelimiterSemicolon = ';';
+static const char kSdpDelimiterSlash = '/';
+static const char kNewLine = '\n';
+static const char kReturn = '\r';
+static const char kLineBreak[] = "\r\n";
+
+// TODO: Generate the Session and Time description
+// instead of hardcoding.
+static const char kSessionVersion[] = "v=0";
+// RFC 4566
+static const char kSessionOriginUsername[] = "-";
+static const char kSessionOriginSessionId[] = "0";
+static const char kSessionOriginSessionVersion[] = "0";
+static const char kSessionOriginNettype[] = "IN";
+static const char kSessionOriginAddrtype[] = "IP4";
+static const char kSessionOriginAddress[] = "127.0.0.1";
+static const char kSessionName[] = "s=-";
+static const char kTimeDescription[] = "t=0 0";
+static const char kAttrGroup[] = "a=group:BUNDLE";
+static const char kConnectionNettype[] = "IN";
+static const char kConnectionIpv4Addrtype[] = "IP4";
+static const char kConnectionIpv6Addrtype[] = "IP6";
+static const char kMediaTypeVideo[] = "video";
+static const char kMediaTypeAudio[] = "audio";
+static const char kMediaTypeData[] = "application";
+static const char kMediaPortRejected[] = "0";
+// draft-ietf-mmusic-trickle-ice-01
+// When no candidates have been gathered, set the connection
+// address to IP6 ::.
+// TODO(perkj): FF can not parse IP6 ::. See http://crbug/430333
+// Use IPV4 per default.
+static const char kDummyAddress[] = "0.0.0.0";
+static const char kDummyPort[] = "9";
+// RFC 3556
+static const char kApplicationSpecificMaximum[] = "AS";
+
+static const int kDefaultVideoClockrate = 90000;
+
+// ISAC special-case.
+static const char kIsacCodecName[] = "ISAC"; // From webrtcvoiceengine.cc
+static const int kIsacWbDefaultRate = 32000; // From acm_common_defs.h
+static const int kIsacSwbDefaultRate = 56000; // From acm_common_defs.h
+
+static const char kDefaultSctpmapProtocol[] = "webrtc-datachannel";
+
+// RTP payload type is in the 0-127 range. Use -1 to indicate "all" payload
+// types.
+const int kWildcardPayloadType = -1;
+
+struct SsrcInfo {
+ SsrcInfo()
+ : msid_identifier(kDefaultMsid),
+ // TODO(ronghuawu): What should we do if the appdata doesn't appear?
+ // Create random string (which will be used as track label later)?
+ msid_appdata(rtc::CreateRandomString(8)) {
+ }
+ uint32_t ssrc_id;
+ std::string cname;
+ std::string msid_identifier;
+ std::string msid_appdata;
+
+ // For backward compatibility.
+ // TODO(ronghuawu): Remove below 2 fields once all the clients support msid.
+ std::string label;
+ std::string mslabel;
+};
+typedef std::vector<SsrcInfo> SsrcInfoVec;
+typedef std::vector<SsrcGroup> SsrcGroupVec;
+
+template <class T>
+static void AddFmtpLine(const T& codec, std::string* message);
+static void BuildMediaDescription(const ContentInfo* content_info,
+ const TransportInfo* transport_info,
+ const MediaType media_type,
+ const std::vector<Candidate>& candidates,
+ std::string* message);
+static void BuildSctpContentAttributes(std::string* message, int sctp_port);
+static void BuildRtpContentAttributes(
+ const MediaContentDescription* media_desc,
+ const MediaType media_type,
+ std::string* message);
+static void BuildRtpMap(const MediaContentDescription* media_desc,
+ const MediaType media_type,
+ std::string* message);
+static void BuildCandidate(const std::vector<Candidate>& candidates,
+ std::string* message);
+static void BuildIceOptions(const std::vector<std::string>& transport_options,
+ std::string* message);
+static bool IsRtp(const std::string& protocol);
+static bool IsDtlsSctp(const std::string& protocol);
+static bool ParseSessionDescription(const std::string& message, size_t* pos,
+ std::string* session_id,
+ std::string* session_version,
+ TransportDescription* session_td,
+ RtpHeaderExtensions* session_extmaps,
+ cricket::SessionDescription* desc,
+ SdpParseError* error);
+static bool ParseGroupAttribute(const std::string& line,
+ cricket::SessionDescription* desc,
+ SdpParseError* error);
+static bool ParseMediaDescription(
+ const std::string& message,
+ const TransportDescription& session_td,
+ const RtpHeaderExtensions& session_extmaps,
+ size_t* pos, cricket::SessionDescription* desc,
+ std::vector<JsepIceCandidate*>* candidates,
+ SdpParseError* error);
+static bool ParseContent(const std::string& message,
+ const MediaType media_type,
+ int mline_index,
+ const std::string& protocol,
+ const std::vector<int>& codec_preference,
+ size_t* pos,
+ std::string* content_name,
+ MediaContentDescription* media_desc,
+ TransportDescription* transport,
+ std::vector<JsepIceCandidate*>* candidates,
+ SdpParseError* error);
+static bool ParseSsrcAttribute(const std::string& line,
+ SsrcInfoVec* ssrc_infos,
+ SdpParseError* error);
+static bool ParseSsrcGroupAttribute(const std::string& line,
+ SsrcGroupVec* ssrc_groups,
+ SdpParseError* error);
+static bool ParseCryptoAttribute(const std::string& line,
+ MediaContentDescription* media_desc,
+ SdpParseError* error);
+static bool ParseRtpmapAttribute(const std::string& line,
+ const MediaType media_type,
+ const std::vector<int>& codec_preference,
+ MediaContentDescription* media_desc,
+ SdpParseError* error);
+static bool ParseFmtpAttributes(const std::string& line,
+ const MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error);
+static bool ParseFmtpParam(const std::string& line, std::string* parameter,
+ std::string* value, SdpParseError* error);
+static bool ParseCandidate(const std::string& message, Candidate* candidate,
+ SdpParseError* error, bool is_raw);
+static bool ParseRtcpFbAttribute(const std::string& line,
+ const MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error);
+static bool ParseIceOptions(const std::string& line,
+ std::vector<std::string>* transport_options,
+ SdpParseError* error);
+static bool ParseExtmap(const std::string& line,
+ RtpHeaderExtension* extmap,
+ SdpParseError* error);
+static bool ParseFingerprintAttribute(const std::string& line,
+ rtc::SSLFingerprint** fingerprint,
+ SdpParseError* error);
+static bool ParseDtlsSetup(const std::string& line,
+ cricket::ConnectionRole* role,
+ SdpParseError* error);
+
+// Helper functions
+
+// Below ParseFailed*** functions output the line that caused the parsing
+// failure and the detailed reason (|description|) of the failure to |error|.
+// The functions always return false so that they can be used directly in the
+// following way when error happens:
+// "return ParseFailed***(...);"
+
+// The line starting at |line_start| of |message| is the failing line.
+// The reason for the failure should be provided in the |description|.
+// An example of a description could be "unknown character".
+static bool ParseFailed(const std::string& message,
+ size_t line_start,
+ const std::string& description,
+ SdpParseError* error) {
+ // Get the first line of |message| from |line_start|.
+ std::string first_line;
+ size_t line_end = message.find(kNewLine, line_start);
+ if (line_end != std::string::npos) {
+ if (line_end > 0 && (message.at(line_end - 1) == kReturn)) {
+ --line_end;
+ }
+ first_line = message.substr(line_start, (line_end - line_start));
+ } else {
+ first_line = message.substr(line_start);
+ }
+
+ if (error) {
+ error->line = first_line;
+ error->description = description;
+ }
+ LOG(LS_ERROR) << "Failed to parse: \"" << first_line
+ << "\". Reason: " << description;
+ return false;
+}
+
+// |line| is the failing line. The reason for the failure should be
+// provided in the |description|.
+static bool ParseFailed(const std::string& line,
+ const std::string& description,
+ SdpParseError* error) {
+ return ParseFailed(line, 0, description, error);
+}
+
+// Parses failure where the failing SDP line isn't know or there are multiple
+// failing lines.
+static bool ParseFailed(const std::string& description,
+ SdpParseError* error) {
+ return ParseFailed("", description, error);
+}
+
+// |line| is the failing line. The failure is due to the fact that |line|
+// doesn't have |expected_fields| fields.
+static bool ParseFailedExpectFieldNum(const std::string& line,
+ int expected_fields,
+ SdpParseError* error) {
+ std::ostringstream description;
+ description << "Expects " << expected_fields << " fields.";
+ return ParseFailed(line, description.str(), error);
+}
+
+// |line| is the failing line. The failure is due to the fact that |line| has
+// less than |expected_min_fields| fields.
+static bool ParseFailedExpectMinFieldNum(const std::string& line,
+ int expected_min_fields,
+ SdpParseError* error) {
+ std::ostringstream description;
+ description << "Expects at least " << expected_min_fields << " fields.";
+ return ParseFailed(line, description.str(), error);
+}
+
+// |line| is the failing line. The failure is due to the fact that it failed to
+// get the value of |attribute|.
+static bool ParseFailedGetValue(const std::string& line,
+ const std::string& attribute,
+ SdpParseError* error) {
+ std::ostringstream description;
+ description << "Failed to get the value of attribute: " << attribute;
+ return ParseFailed(line, description.str(), error);
+}
+
+// The line starting at |line_start| of |message| is the failing line. The
+// failure is due to the line type (e.g. the "m" part of the "m-line")
+// not matching what is expected. The expected line type should be
+// provided as |line_type|.
+static bool ParseFailedExpectLine(const std::string& message,
+ size_t line_start,
+ const char line_type,
+ const std::string& line_value,
+ SdpParseError* error) {
+ std::ostringstream description;
+ description << "Expect line: " << line_type << "=" << line_value;
+ return ParseFailed(message, line_start, description.str(), error);
+}
+
+static bool AddLine(const std::string& line, std::string* message) {
+ if (!message)
+ return false;
+
+ message->append(line);
+ message->append(kLineBreak);
+ return true;
+}
+
+static bool GetLine(const std::string& message,
+ size_t* pos,
+ std::string* line) {
+ size_t line_begin = *pos;
+ size_t line_end = message.find(kNewLine, line_begin);
+ if (line_end == std::string::npos) {
+ return false;
+ }
+ // Update the new start position
+ *pos = line_end + 1;
+ if (line_end > 0 && (message.at(line_end - 1) == kReturn)) {
+ --line_end;
+ }
+ *line = message.substr(line_begin, (line_end - line_begin));
+ const char* cline = line->c_str();
+ // RFC 4566
+ // An SDP session description consists of a number of lines of text of
+ // the form:
+ // <type>=<value>
+ // where <type> MUST be exactly one case-significant character and
+ // <value> is structured text whose format depends on <type>.
+ // Whitespace MUST NOT be used on either side of the "=" sign.
+ if (line->length() < 3 ||
+ !islower(cline[0]) ||
+ cline[1] != kSdpDelimiterEqual ||
+ cline[2] == kSdpDelimiterSpace) {
+ *pos = line_begin;
+ return false;
+ }
+ return true;
+}
+
+// Init |os| to "|type|=|value|".
+static void InitLine(const char type,
+ const std::string& value,
+ std::ostringstream* os) {
+ os->str("");
+ *os << type << kSdpDelimiterEqual << value;
+}
+
+// Init |os| to "a=|attribute|".
+static void InitAttrLine(const std::string& attribute, std::ostringstream* os) {
+ InitLine(kLineTypeAttributes, attribute, os);
+}
+
+// Writes a SDP attribute line based on |attribute| and |value| to |message|.
+static void AddAttributeLine(const std::string& attribute, int value,
+ std::string* message) {
+ std::ostringstream os;
+ InitAttrLine(attribute, &os);
+ os << kSdpDelimiterColon << value;
+ AddLine(os.str(), message);
+}
+
+static bool IsLineType(const std::string& message,
+ const char type,
+ size_t line_start) {
+ if (message.size() < line_start + kLinePrefixLength) {
+ return false;
+ }
+ const char* cmessage = message.c_str();
+ return (cmessage[line_start] == type &&
+ cmessage[line_start + 1] == kSdpDelimiterEqual);
+}
+
+static bool IsLineType(const std::string& line,
+ const char type) {
+ return IsLineType(line, type, 0);
+}
+
+static bool GetLineWithType(const std::string& message, size_t* pos,
+ std::string* line, const char type) {
+ if (!IsLineType(message, type, *pos)) {
+ return false;
+ }
+
+ if (!GetLine(message, pos, line))
+ return false;
+
+ return true;
+}
+
+static bool HasAttribute(const std::string& line,
+ const std::string& attribute) {
+ return (line.compare(kLinePrefixLength, attribute.size(), attribute) == 0);
+}
+
+static bool AddSsrcLine(uint32_t ssrc_id,
+ const std::string& attribute,
+ const std::string& value,
+ std::string* message) {
+ // RFC 5576
+ // a=ssrc:<ssrc-id> <attribute>:<value>
+ std::ostringstream os;
+ InitAttrLine(kAttributeSsrc, &os);
+ os << kSdpDelimiterColon << ssrc_id << kSdpDelimiterSpace
+ << attribute << kSdpDelimiterColon << value;
+ return AddLine(os.str(), message);
+}
+
+// Get value only from <attribute>:<value>.
+static bool GetValue(const std::string& message, const std::string& attribute,
+ std::string* value, SdpParseError* error) {
+ std::string leftpart;
+ if (!rtc::tokenize_first(message, kSdpDelimiterColon, &leftpart, value)) {
+ return ParseFailedGetValue(message, attribute, error);
+ }
+ // The left part should end with the expected attribute.
+ if (leftpart.length() < attribute.length() ||
+ leftpart.compare(leftpart.length() - attribute.length(),
+ attribute.length(), attribute) != 0) {
+ return ParseFailedGetValue(message, attribute, error);
+ }
+ return true;
+}
+
+static bool CaseInsensitiveFind(std::string str1, std::string str2) {
+ std::transform(str1.begin(), str1.end(), str1.begin(),
+ ::tolower);
+ std::transform(str2.begin(), str2.end(), str2.begin(),
+ ::tolower);
+ return str1.find(str2) != std::string::npos;
+}
+
+template <class T>
+static bool GetValueFromString(const std::string& line,
+ const std::string& s,
+ T* t,
+ SdpParseError* error) {
+ if (!rtc::FromString(s, t)) {
+ std::ostringstream description;
+ description << "Invalid value: " << s << ".";
+ return ParseFailed(line, description.str(), error);
+ }
+ return true;
+}
+
+static bool GetPayloadTypeFromString(const std::string& line,
+ const std::string& s,
+ int* payload_type,
+ SdpParseError* error) {
+ return GetValueFromString(line, s, payload_type, error) &&
+ cricket::IsValidRtpPayloadType(*payload_type);
+}
+
+void CreateTracksFromSsrcInfos(const SsrcInfoVec& ssrc_infos,
+ StreamParamsVec* tracks) {
+ ASSERT(tracks != NULL);
+ for (SsrcInfoVec::const_iterator ssrc_info = ssrc_infos.begin();
+ ssrc_info != ssrc_infos.end(); ++ssrc_info) {
+ if (ssrc_info->cname.empty()) {
+ continue;
+ }
+
+ std::string sync_label;
+ std::string track_id;
+ if (ssrc_info->msid_identifier == kDefaultMsid &&
+ !ssrc_info->mslabel.empty()) {
+ // If there's no msid and there's mslabel, we consider this is a sdp from
+ // a older version of client that doesn't support msid.
+ // In that case, we use the mslabel and label to construct the track.
+ sync_label = ssrc_info->mslabel;
+ track_id = ssrc_info->label;
+ } else {
+ sync_label = ssrc_info->msid_identifier;
+ // The appdata consists of the "id" attribute of a MediaStreamTrack, which
+ // is corresponding to the "id" attribute of StreamParams.
+ track_id = ssrc_info->msid_appdata;
+ }
+ if (sync_label.empty() || track_id.empty()) {
+ ASSERT(false);
+ continue;
+ }
+
+ StreamParamsVec::iterator track = tracks->begin();
+ for (; track != tracks->end(); ++track) {
+ if (track->id == track_id) {
+ break;
+ }
+ }
+ if (track == tracks->end()) {
+ // If we don't find an existing track, create a new one.
+ tracks->push_back(StreamParams());
+ track = tracks->end() - 1;
+ }
+ track->add_ssrc(ssrc_info->ssrc_id);
+ track->cname = ssrc_info->cname;
+ track->sync_label = sync_label;
+ track->id = track_id;
+ }
+}
+
+void GetMediaStreamLabels(const ContentInfo* content,
+ std::set<std::string>* labels) {
+ const MediaContentDescription* media_desc =
+ static_cast<const MediaContentDescription*>(
+ content->description);
+ const cricket::StreamParamsVec& streams = media_desc->streams();
+ for (cricket::StreamParamsVec::const_iterator it = streams.begin();
+ it != streams.end(); ++it) {
+ labels->insert(it->sync_label);
+ }
+}
+
+// RFC 5245
+// It is RECOMMENDED that default candidates be chosen based on the
+// likelihood of those candidates to work with the peer that is being
+// contacted. It is RECOMMENDED that relayed > reflexive > host.
+static const int kPreferenceUnknown = 0;
+static const int kPreferenceHost = 1;
+static const int kPreferenceReflexive = 2;
+static const int kPreferenceRelayed = 3;
+
+static int GetCandidatePreferenceFromType(const std::string& type) {
+ int preference = kPreferenceUnknown;
+ if (type == cricket::LOCAL_PORT_TYPE) {
+ preference = kPreferenceHost;
+ } else if (type == cricket::STUN_PORT_TYPE) {
+ preference = kPreferenceReflexive;
+ } else if (type == cricket::RELAY_PORT_TYPE) {
+ preference = kPreferenceRelayed;
+ } else {
+ ASSERT(false);
+ }
+ return preference;
+}
+
+// Get ip and port of the default destination from the |candidates| with the
+// given value of |component_id|. The default candidate should be the one most
+// likely to work, typically IPv4 relay.
+// RFC 5245
+// The value of |component_id| currently supported are 1 (RTP) and 2 (RTCP).
+// TODO: Decide the default destination in webrtcsession and
+// pass it down via SessionDescription.
+static void GetDefaultDestination(
+ const std::vector<Candidate>& candidates,
+ int component_id, std::string* port,
+ std::string* ip, std::string* addr_type) {
+ *addr_type = kConnectionIpv4Addrtype;
+ *port = kDummyPort;
+ *ip = kDummyAddress;
+ int current_preference = kPreferenceUnknown;
+ int current_family = AF_UNSPEC;
+ for (std::vector<Candidate>::const_iterator it = candidates.begin();
+ it != candidates.end(); ++it) {
+ if (it->component() != component_id) {
+ continue;
+ }
+ // Default destination should be UDP only.
+ if (it->protocol() != cricket::UDP_PROTOCOL_NAME) {
+ continue;
+ }
+ const int preference = GetCandidatePreferenceFromType(it->type());
+ const int family = it->address().ipaddr().family();
+ // See if this candidate is more preferable then the current one if it's the
+ // same family. Or if the current family is IPv4 already so we could safely
+ // ignore all IPv6 ones. WebRTC bug 4269.
+ // http://code.google.com/p/webrtc/issues/detail?id=4269
+ if ((preference <= current_preference && current_family == family) ||
+ (current_family == AF_INET && family == AF_INET6)) {
+ continue;
+ }
+ if (family == AF_INET) {
+ addr_type->assign(kConnectionIpv4Addrtype);
+ } else if (family == AF_INET6) {
+ addr_type->assign(kConnectionIpv6Addrtype);
+ }
+ current_preference = preference;
+ current_family = family;
+ *port = it->address().PortAsString();
+ *ip = it->address().ipaddr().ToString();
+ }
+}
+
+// Update |mline|'s default destination and append a c line after it.
+static void UpdateMediaDefaultDestination(
+ const std::vector<Candidate>& candidates,
+ const std::string& mline,
+ std::string* message) {
+ std::string new_lines;
+ AddLine(mline, &new_lines);
+ // RFC 4566
+ // m=<media> <port> <proto> <fmt> ...
+ std::vector<std::string> fields;
+ rtc::split(mline, kSdpDelimiterSpace, &fields);
+ if (fields.size() < 3) {
+ return;
+ }
+
+ std::ostringstream os;
+ std::string rtp_port, rtp_ip, addr_type;
+ GetDefaultDestination(candidates, ICE_CANDIDATE_COMPONENT_RTP,
+ &rtp_port, &rtp_ip, &addr_type);
+ // Found default RTP candidate.
+ // RFC 5245
+ // The default candidates are added to the SDP as the default
+ // destination for media. For streams based on RTP, this is done by
+ // placing the IP address and port of the RTP candidate into the c and m
+ // lines, respectively.
+ // Update the port in the m line.
+ // If this is a m-line with port equal to 0, we don't change it.
+ if (fields[1] != kMediaPortRejected) {
+ new_lines.replace(fields[0].size() + 1,
+ fields[1].size(),
+ rtp_port);
+ }
+ // Add the c line.
+ // RFC 4566
+ // c=<nettype> <addrtype> <connection-address>
+ InitLine(kLineTypeConnection, kConnectionNettype, &os);
+ os << " " << addr_type << " " << rtp_ip;
+ AddLine(os.str(), &new_lines);
+ message->append(new_lines);
+}
+
+// Gets "a=rtcp" line if found default RTCP candidate from |candidates|.
+static std::string GetRtcpLine(const std::vector<Candidate>& candidates) {
+ std::string rtcp_line, rtcp_port, rtcp_ip, addr_type;
+ GetDefaultDestination(candidates, ICE_CANDIDATE_COMPONENT_RTCP,
+ &rtcp_port, &rtcp_ip, &addr_type);
+ // Found default RTCP candidate.
+ // RFC 5245
+ // If the agent is utilizing RTCP, it MUST encode the RTCP candidate
+ // using the a=rtcp attribute as defined in RFC 3605.
+
+ // RFC 3605
+ // rtcp-attribute = "a=rtcp:" port [nettype space addrtype space
+ // connection-address] CRLF
+ std::ostringstream os;
+ InitAttrLine(kAttributeRtcp, &os);
+ os << kSdpDelimiterColon
+ << rtcp_port << " "
+ << kConnectionNettype << " "
+ << addr_type << " "
+ << rtcp_ip;
+ rtcp_line = os.str();
+ return rtcp_line;
+}
+
+// Get candidates according to the mline index from SessionDescriptionInterface.
+static void GetCandidatesByMindex(const SessionDescriptionInterface& desci,
+ int mline_index,
+ std::vector<Candidate>* candidates) {
+ if (!candidates) {
+ return;
+ }
+ const IceCandidateCollection* cc = desci.candidates(mline_index);
+ for (size_t i = 0; i < cc->count(); ++i) {
+ const IceCandidateInterface* candidate = cc->at(i);
+ candidates->push_back(candidate->candidate());
+ }
+}
+
+std::string SdpSerialize(const JsepSessionDescription& jdesc) {
+ const cricket::SessionDescription* desc = jdesc.description();
+ if (!desc) {
+ return "";
+ }
+
+ std::string message;
+
+ // Session Description.
+ AddLine(kSessionVersion, &message);
+ // Session Origin
+ // RFC 4566
+ // o=<username> <sess-id> <sess-version> <nettype> <addrtype>
+ // <unicast-address>
+ std::ostringstream os;
+ InitLine(kLineTypeOrigin, kSessionOriginUsername, &os);
+ const std::string& session_id = jdesc.session_id().empty() ?
+ kSessionOriginSessionId : jdesc.session_id();
+ const std::string& session_version = jdesc.session_version().empty() ?
+ kSessionOriginSessionVersion : jdesc.session_version();
+ os << " " << session_id << " " << session_version << " "
+ << kSessionOriginNettype << " " << kSessionOriginAddrtype << " "
+ << kSessionOriginAddress;
+ AddLine(os.str(), &message);
+ AddLine(kSessionName, &message);
+
+ // Time Description.
+ AddLine(kTimeDescription, &message);
+
+ // Group
+ if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) {
+ std::string group_line = kAttrGroup;
+ const cricket::ContentGroup* group =
+ desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ ASSERT(group != NULL);
+ const cricket::ContentNames& content_names = group->content_names();
+ for (cricket::ContentNames::const_iterator it = content_names.begin();
+ it != content_names.end(); ++it) {
+ group_line.append(" ");
+ group_line.append(*it);
+ }
+ AddLine(group_line, &message);
+ }
+
+ // MediaStream semantics
+ InitAttrLine(kAttributeMsidSemantics, &os);
+ os << kSdpDelimiterColon << " " << kMediaStreamSemantic;
+
+ std::set<std::string> media_stream_labels;
+ const ContentInfo* audio_content = GetFirstAudioContent(desc);
+ if (audio_content)
+ GetMediaStreamLabels(audio_content, &media_stream_labels);
+
+ const ContentInfo* video_content = GetFirstVideoContent(desc);
+ if (video_content)
+ GetMediaStreamLabels(video_content, &media_stream_labels);
+
+ for (std::set<std::string>::const_iterator it =
+ media_stream_labels.begin(); it != media_stream_labels.end(); ++it) {
+ os << " " << *it;
+ }
+ AddLine(os.str(), &message);
+
+ // Preserve the order of the media contents.
+ int mline_index = -1;
+ for (cricket::ContentInfos::const_iterator it = desc->contents().begin();
+ it != desc->contents().end(); ++it) {
+ const MediaContentDescription* mdesc =
+ static_cast<const MediaContentDescription*>(it->description);
+ std::vector<Candidate> candidates;
+ GetCandidatesByMindex(jdesc, ++mline_index, &candidates);
+ BuildMediaDescription(&*it,
+ desc->GetTransportInfoByName(it->name),
+ mdesc->type(),
+ candidates,
+ &message);
+ }
+ return message;
+}
+
+// Serializes the passed in IceCandidateInterface to a SDP string.
+// candidate - The candidate to be serialized.
+std::string SdpSerializeCandidate(
+ const IceCandidateInterface& candidate) {
+ std::string message;
+ std::vector<cricket::Candidate> candidates;
+ candidates.push_back(candidate.candidate());
+ BuildCandidate(candidates, &message);
+ // From WebRTC draft section 4.8.1.1 candidate-attribute will be
+ // just candidate:<candidate> not a=candidate:<blah>CRLF
+ ASSERT(message.find("a=") == 0);
+ message.erase(0, 2);
+ ASSERT(message.find(kLineBreak) == message.size() - 2);
+ message.resize(message.size() - 2);
+ return message;
+}
+
+bool SdpDeserialize(const std::string& message,
+ JsepSessionDescription* jdesc,
+ SdpParseError* error) {
+ std::string session_id;
+ std::string session_version;
+ TransportDescription session_td("", "");
+ RtpHeaderExtensions session_extmaps;
+ cricket::SessionDescription* desc = new cricket::SessionDescription();
+ std::vector<JsepIceCandidate*> candidates;
+ size_t current_pos = 0;
+
+ // Session Description
+ if (!ParseSessionDescription(message, &current_pos, &session_id,
+ &session_version, &session_td, &session_extmaps,
+ desc, error)) {
+ delete desc;
+ return false;
+ }
+
+ // Media Description
+ if (!ParseMediaDescription(message, session_td, session_extmaps, &current_pos,
+ desc, &candidates, error)) {
+ delete desc;
+ for (std::vector<JsepIceCandidate*>::const_iterator
+ it = candidates.begin(); it != candidates.end(); ++it) {
+ delete *it;
+ }
+ return false;
+ }
+
+ jdesc->Initialize(desc, session_id, session_version);
+
+ for (std::vector<JsepIceCandidate*>::const_iterator
+ it = candidates.begin(); it != candidates.end(); ++it) {
+ jdesc->AddCandidate(*it);
+ delete *it;
+ }
+ return true;
+}
+
+bool SdpDeserializeCandidate(const std::string& message,
+ JsepIceCandidate* jcandidate,
+ SdpParseError* error) {
+ ASSERT(jcandidate != NULL);
+ Candidate candidate;
+ if (!ParseCandidate(message, &candidate, error, true)) {
+ return false;
+ }
+ jcandidate->SetCandidate(candidate);
+ return true;
+}
+
+bool ParseCandidate(const std::string& message, Candidate* candidate,
+ SdpParseError* error, bool is_raw) {
+ ASSERT(candidate != NULL);
+
+ // Get the first line from |message|.
+ std::string first_line = message;
+ size_t pos = 0;
+ GetLine(message, &pos, &first_line);
+
+ // Makes sure |message| contains only one line.
+ if (message.size() > first_line.size()) {
+ std::string left, right;
+ if (rtc::tokenize_first(message, kNewLine, &left, &right) &&
+ !right.empty()) {
+ return ParseFailed(message, 0, "Expect one line only", error);
+ }
+ }
+
+ // From WebRTC draft section 4.8.1.1 candidate-attribute should be
+ // candidate:<candidate> when trickled, but we still support
+ // a=candidate:<blah>CRLF for backward compatibility and for parsing a line
+ // from the SDP.
+ if (IsLineType(first_line, kLineTypeAttributes)) {
+ first_line = first_line.substr(kLinePrefixLength);
+ }
+
+ std::string attribute_candidate;
+ std::string candidate_value;
+
+ // |first_line| must be in the form of "candidate:<value>".
+ if (!rtc::tokenize_first(first_line, kSdpDelimiterColon, &attribute_candidate,
+ &candidate_value) ||
+ attribute_candidate != kAttributeCandidate) {
+ if (is_raw) {
+ std::ostringstream description;
+ description << "Expect line: " << kAttributeCandidate
+ << ":" << "<candidate-str>";
+ return ParseFailed(first_line, 0, description.str(), error);
+ } else {
+ return ParseFailedExpectLine(first_line, 0, kLineTypeAttributes,
+ kAttributeCandidate, error);
+ }
+ }
+
+ std::vector<std::string> fields;
+ rtc::split(candidate_value, kSdpDelimiterSpace, &fields);
+
+ // RFC 5245
+ // a=candidate:<foundation> <component-id> <transport> <priority>
+ // <connection-address> <port> typ <candidate-types>
+ // [raddr <connection-address>] [rport <port>]
+ // *(SP extension-att-name SP extension-att-value)
+ const size_t expected_min_fields = 8;
+ if (fields.size() < expected_min_fields ||
+ (fields[6] != kAttributeCandidateTyp)) {
+ return ParseFailedExpectMinFieldNum(first_line, expected_min_fields, error);
+ }
+ const std::string& foundation = fields[0];
+
+ int component_id = 0;
+ if (!GetValueFromString(first_line, fields[1], &component_id, error)) {
+ return false;
+ }
+ const std::string& transport = fields[2];
+ uint32_t priority = 0;
+ if (!GetValueFromString(first_line, fields[3], &priority, error)) {
+ return false;
+ }
+ const std::string& connection_address = fields[4];
+ int port = 0;
+ if (!GetValueFromString(first_line, fields[5], &port, error)) {
+ return false;
+ }
+ SocketAddress address(connection_address, port);
+
+ cricket::ProtocolType protocol;
+ if (!StringToProto(transport.c_str(), &protocol)) {
+ return ParseFailed(first_line, "Unsupported transport type.", error);
+ }
+
+ std::string candidate_type;
+ const std::string& type = fields[7];
+ if (type == kCandidateHost) {
+ candidate_type = cricket::LOCAL_PORT_TYPE;
+ } else if (type == kCandidateSrflx) {
+ candidate_type = cricket::STUN_PORT_TYPE;
+ } else if (type == kCandidateRelay) {
+ candidate_type = cricket::RELAY_PORT_TYPE;
+ } else {
+ return ParseFailed(first_line, "Unsupported candidate type.", error);
+ }
+
+ size_t current_position = expected_min_fields;
+ SocketAddress related_address;
+ // The 2 optional fields for related address
+ // [raddr <connection-address>] [rport <port>]
+ if (fields.size() >= (current_position + 2) &&
+ fields[current_position] == kAttributeCandidateRaddr) {
+ related_address.SetIP(fields[++current_position]);
+ ++current_position;
+ }
+ if (fields.size() >= (current_position + 2) &&
+ fields[current_position] == kAttributeCandidateRport) {
+ int port = 0;
+ if (!GetValueFromString(
+ first_line, fields[++current_position], &port, error)) {
+ return false;
+ }
+ related_address.SetPort(port);
+ ++current_position;
+ }
+
+ // If this is a TCP candidate, it has additional extension as defined in
+ // RFC 6544.
+ std::string tcptype;
+ if (fields.size() >= (current_position + 2) &&
+ fields[current_position] == kTcpCandidateType) {
+ tcptype = fields[++current_position];
+ ++current_position;
+
+ if (tcptype != cricket::TCPTYPE_ACTIVE_STR &&
+ tcptype != cricket::TCPTYPE_PASSIVE_STR &&
+ tcptype != cricket::TCPTYPE_SIMOPEN_STR) {
+ return ParseFailed(first_line, "Invalid TCP candidate type.", error);
+ }
+
+ if (protocol != cricket::PROTO_TCP) {
+ return ParseFailed(first_line, "Invalid non-TCP candidate", error);
+ }
+ }
+
+ // Extension
+ // Empty string as the candidate username and password.
+ // Will be updated later with the ice-ufrag and ice-pwd.
+ // TODO: Remove the username/password extension, which is currently
+ // kept for backwards compatibility.
+ std::string username;
+ std::string password;
+ uint32_t generation = 0;
+ for (size_t i = current_position; i + 1 < fields.size(); ++i) {
+ // RFC 5245
+ // *(SP extension-att-name SP extension-att-value)
+ if (fields[i] == kAttributeCandidateGeneration) {
+ if (!GetValueFromString(first_line, fields[++i], &generation, error)) {
+ return false;
+ }
+ } else if (fields[i] == kAttributeCandidateUsername) {
+ username = fields[++i];
+ } else if (fields[i] == kAttributeCandidatePassword) {
+ password = fields[++i];
+ } else {
+ // Skip the unknown extension.
+ ++i;
+ }
+ }
+
+ *candidate = Candidate(component_id, cricket::ProtoToString(protocol),
+ address, priority, username, password, candidate_type,
+ generation, foundation);
+ candidate->set_related_address(related_address);
+ candidate->set_tcptype(tcptype);
+ return true;
+}
+
+bool ParseIceOptions(const std::string& line,
+ std::vector<std::string>* transport_options,
+ SdpParseError* error) {
+ std::string ice_options;
+ if (!GetValue(line, kAttributeIceOption, &ice_options, error)) {
+ return false;
+ }
+ std::vector<std::string> fields;
+ rtc::split(ice_options, kSdpDelimiterSpace, &fields);
+ for (size_t i = 0; i < fields.size(); ++i) {
+ transport_options->push_back(fields[i]);
+ }
+ return true;
+}
+
+bool ParseSctpPort(const std::string& line,
+ int* sctp_port,
+ SdpParseError* error) {
+ // draft-ietf-mmusic-sctp-sdp-07
+ // a=sctp-port
+ std::vector<std::string> fields;
+ const size_t expected_min_fields = 2;
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColon, &fields);
+ if (fields.size() < expected_min_fields) {
+ fields.resize(0);
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpace, &fields);
+ }
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ if (!rtc::FromString(fields[1], sctp_port)) {
+ return ParseFailed(line, "Invalid sctp port value.", error);
+ }
+ return true;
+}
+
+bool ParseExtmap(const std::string& line, RtpHeaderExtension* extmap,
+ SdpParseError* error) {
+ // RFC 5285
+ // a=extmap:<value>["/"<direction>] <URI> <extensionattributes>
+ std::vector<std::string> fields;
+ rtc::split(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpace, &fields);
+ const size_t expected_min_fields = 2;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ std::string uri = fields[1];
+
+ std::string value_direction;
+ if (!GetValue(fields[0], kAttributeExtmap, &value_direction, error)) {
+ return false;
+ }
+ std::vector<std::string> sub_fields;
+ rtc::split(value_direction, kSdpDelimiterSlash, &sub_fields);
+ int value = 0;
+ if (!GetValueFromString(line, sub_fields[0], &value, error)) {
+ return false;
+ }
+
+ *extmap = RtpHeaderExtension(uri, value);
+ return true;
+}
+
+void BuildMediaDescription(const ContentInfo* content_info,
+ const TransportInfo* transport_info,
+ const MediaType media_type,
+ const std::vector<Candidate>& candidates,
+ std::string* message) {
+ ASSERT(message != NULL);
+ if (content_info == NULL || message == NULL) {
+ return;
+ }
+ // TODO: Rethink if we should use sprintfn instead of stringstream.
+ // According to the style guide, streams should only be used for logging.
+ // http://google-styleguide.googlecode.com/svn/
+ // trunk/cppguide.xml?showone=Streams#Streams
+ std::ostringstream os;
+ const MediaContentDescription* media_desc =
+ static_cast<const MediaContentDescription*>(
+ content_info->description);
+ ASSERT(media_desc != NULL);
+
+ int sctp_port = cricket::kSctpDefaultPort;
+
+ // RFC 4566
+ // m=<media> <port> <proto> <fmt>
+ // fmt is a list of payload type numbers that MAY be used in the session.
+ const char* type = NULL;
+ if (media_type == cricket::MEDIA_TYPE_AUDIO)
+ type = kMediaTypeAudio;
+ else if (media_type == cricket::MEDIA_TYPE_VIDEO)
+ type = kMediaTypeVideo;
+ else if (media_type == cricket::MEDIA_TYPE_DATA)
+ type = kMediaTypeData;
+ else
+ ASSERT(false);
+
+ std::string fmt;
+ if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ const VideoContentDescription* video_desc =
+ static_cast<const VideoContentDescription*>(media_desc);
+ for (std::vector<cricket::VideoCodec>::const_iterator it =
+ video_desc->codecs().begin();
+ it != video_desc->codecs().end(); ++it) {
+ fmt.append(" ");
+ fmt.append(rtc::ToString<int>(it->id));
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ const AudioContentDescription* audio_desc =
+ static_cast<const AudioContentDescription*>(media_desc);
+ for (std::vector<cricket::AudioCodec>::const_iterator it =
+ audio_desc->codecs().begin();
+ it != audio_desc->codecs().end(); ++it) {
+ fmt.append(" ");
+ fmt.append(rtc::ToString<int>(it->id));
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_DATA) {
+ const DataContentDescription* data_desc =
+ static_cast<const DataContentDescription*>(media_desc);
+ if (IsDtlsSctp(media_desc->protocol())) {
+ fmt.append(" ");
+
+ for (std::vector<cricket::DataCodec>::const_iterator it =
+ data_desc->codecs().begin();
+ it != data_desc->codecs().end(); ++it) {
+ if (it->id == cricket::kGoogleSctpDataCodecId &&
+ it->GetParam(cricket::kCodecParamPort, &sctp_port)) {
+ break;
+ }
+ }
+
+ fmt.append(rtc::ToString<int>(sctp_port));
+ } else {
+ for (std::vector<cricket::DataCodec>::const_iterator it =
+ data_desc->codecs().begin();
+ it != data_desc->codecs().end(); ++it) {
+ fmt.append(" ");
+ fmt.append(rtc::ToString<int>(it->id));
+ }
+ }
+ }
+ // The fmt must never be empty. If no codecs are found, set the fmt attribute
+ // to 0.
+ if (fmt.empty()) {
+ fmt = " 0";
+ }
+
+ // The port number in the m line will be updated later when associate with
+ // the candidates.
+ // RFC 3264
+ // To reject an offered stream, the port number in the corresponding stream in
+ // the answer MUST be set to zero.
+ const std::string& port = content_info->rejected ?
+ kMediaPortRejected : kDummyPort;
+
+ rtc::SSLFingerprint* fp = (transport_info) ?
+ transport_info->description.identity_fingerprint.get() : NULL;
+
+ // Add the m and c lines.
+ InitLine(kLineTypeMedia, type, &os);
+ os << " " << port << " " << media_desc->protocol() << fmt;
+ std::string mline = os.str();
+ UpdateMediaDefaultDestination(candidates, mline, message);
+
+ // RFC 4566
+ // b=AS:<bandwidth>
+ if (media_desc->bandwidth() >= 1000) {
+ InitLine(kLineTypeSessionBandwidth, kApplicationSpecificMaximum, &os);
+ os << kSdpDelimiterColon << (media_desc->bandwidth() / 1000);
+ AddLine(os.str(), message);
+ }
+
+ // Add the a=rtcp line.
+ if (IsRtp(media_desc->protocol())) {
+ std::string rtcp_line = GetRtcpLine(candidates);
+ if (!rtcp_line.empty()) {
+ AddLine(rtcp_line, message);
+ }
+ }
+
+ // Build the a=candidate lines.
+ BuildCandidate(candidates, message);
+
+ // Use the transport_info to build the media level ice-ufrag and ice-pwd.
+ if (transport_info) {
+ // RFC 5245
+ // ice-pwd-att = "ice-pwd" ":" password
+ // ice-ufrag-att = "ice-ufrag" ":" ufrag
+ // ice-ufrag
+ InitAttrLine(kAttributeIceUfrag, &os);
+ os << kSdpDelimiterColon << transport_info->description.ice_ufrag;
+ AddLine(os.str(), message);
+ // ice-pwd
+ InitAttrLine(kAttributeIcePwd, &os);
+ os << kSdpDelimiterColon << transport_info->description.ice_pwd;
+ AddLine(os.str(), message);
+
+ // draft-petithuguenin-mmusic-ice-attributes-level-03
+ BuildIceOptions(transport_info->description.transport_options, message);
+
+ // RFC 4572
+ // fingerprint-attribute =
+ // "fingerprint" ":" hash-func SP fingerprint
+ if (fp) {
+ // Insert the fingerprint attribute.
+ InitAttrLine(kAttributeFingerprint, &os);
+ os << kSdpDelimiterColon
+ << fp->algorithm << kSdpDelimiterSpace
+ << fp->GetRfc4572Fingerprint();
+ AddLine(os.str(), message);
+
+ // Inserting setup attribute.
+ if (transport_info->description.connection_role !=
+ cricket::CONNECTIONROLE_NONE) {
+ // Making sure we are not using "passive" mode.
+ cricket::ConnectionRole role =
+ transport_info->description.connection_role;
+ std::string dtls_role_str;
+ VERIFY(cricket::ConnectionRoleToString(role, &dtls_role_str));
+ InitAttrLine(kAttributeSetup, &os);
+ os << kSdpDelimiterColon << dtls_role_str;
+ AddLine(os.str(), message);
+ }
+ }
+ }
+
+ // RFC 3388
+ // mid-attribute = "a=mid:" identification-tag
+ // identification-tag = token
+ // Use the content name as the mid identification-tag.
+ InitAttrLine(kAttributeMid, &os);
+ os << kSdpDelimiterColon << content_info->name;
+ AddLine(os.str(), message);
+
+ if (IsDtlsSctp(media_desc->protocol())) {
+ BuildSctpContentAttributes(message, sctp_port);
+ } else if (IsRtp(media_desc->protocol())) {
+ BuildRtpContentAttributes(media_desc, media_type, message);
+ }
+}
+
+void BuildSctpContentAttributes(std::string* message, int sctp_port) {
+ // draft-ietf-mmusic-sctp-sdp-04
+ // a=sctpmap:sctpmap-number protocol [streams]
+ // TODO(lally): switch this over to mmusic-sctp-sdp-12 (or later), with
+ // 'a=sctp-port:'
+ std::ostringstream os;
+ InitAttrLine(kAttributeSctpmap, &os);
+ os << kSdpDelimiterColon << sctp_port << kSdpDelimiterSpace
+ << kDefaultSctpmapProtocol << kSdpDelimiterSpace
+ << (cricket::kMaxSctpSid + 1);
+ AddLine(os.str(), message);
+}
+
+void BuildRtpContentAttributes(
+ const MediaContentDescription* media_desc,
+ const MediaType media_type,
+ std::string* message) {
+ std::ostringstream os;
+ // RFC 5285
+ // a=extmap:<value>["/"<direction>] <URI> <extensionattributes>
+ // The definitions MUST be either all session level or all media level. This
+ // implementation uses all media level.
+ for (size_t i = 0; i < media_desc->rtp_header_extensions().size(); ++i) {
+ InitAttrLine(kAttributeExtmap, &os);
+ os << kSdpDelimiterColon << media_desc->rtp_header_extensions()[i].id
+ << kSdpDelimiterSpace << media_desc->rtp_header_extensions()[i].uri;
+ AddLine(os.str(), message);
+ }
+
+ // RFC 3264
+ // a=sendrecv || a=sendonly || a=sendrecv || a=inactive
+ switch (media_desc->direction()) {
+ case cricket::MD_INACTIVE:
+ InitAttrLine(kAttributeInactive, &os);
+ break;
+ case cricket::MD_SENDONLY:
+ InitAttrLine(kAttributeSendOnly, &os);
+ break;
+ case cricket::MD_RECVONLY:
+ InitAttrLine(kAttributeRecvOnly, &os);
+ break;
+ case cricket::MD_SENDRECV:
+ default:
+ InitAttrLine(kAttributeSendRecv, &os);
+ break;
+ }
+ AddLine(os.str(), message);
+
+ // RFC 5761
+ // a=rtcp-mux
+ if (media_desc->rtcp_mux()) {
+ InitAttrLine(kAttributeRtcpMux, &os);
+ AddLine(os.str(), message);
+ }
+
+ // RFC 4568
+ // a=crypto:<tag> <crypto-suite> <key-params> [<session-params>]
+ for (std::vector<CryptoParams>::const_iterator it =
+ media_desc->cryptos().begin();
+ it != media_desc->cryptos().end(); ++it) {
+ InitAttrLine(kAttributeCrypto, &os);
+ os << kSdpDelimiterColon << it->tag << " " << it->cipher_suite << " "
+ << it->key_params;
+ if (!it->session_params.empty()) {
+ os << " " << it->session_params;
+ }
+ AddLine(os.str(), message);
+ }
+
+ // RFC 4566
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>
+ // [/<encodingparameters>]
+ BuildRtpMap(media_desc, media_type, message);
+
+ for (StreamParamsVec::const_iterator track = media_desc->streams().begin();
+ track != media_desc->streams().end(); ++track) {
+ // Require that the track belongs to a media stream,
+ // ie the sync_label is set. This extra check is necessary since the
+ // MediaContentDescription always contains a streamparam with an ssrc even
+ // if no track or media stream have been created.
+ if (track->sync_label.empty()) continue;
+
+ // Build the ssrc-group lines.
+ for (size_t i = 0; i < track->ssrc_groups.size(); ++i) {
+ // RFC 5576
+ // a=ssrc-group:<semantics> <ssrc-id> ...
+ if (track->ssrc_groups[i].ssrcs.empty()) {
+ continue;
+ }
+ std::ostringstream os;
+ InitAttrLine(kAttributeSsrcGroup, &os);
+ os << kSdpDelimiterColon << track->ssrc_groups[i].semantics;
+ std::vector<uint32_t>::const_iterator ssrc =
+ track->ssrc_groups[i].ssrcs.begin();
+ for (; ssrc != track->ssrc_groups[i].ssrcs.end(); ++ssrc) {
+ os << kSdpDelimiterSpace << rtc::ToString<uint32_t>(*ssrc);
+ }
+ AddLine(os.str(), message);
+ }
+ // Build the ssrc lines for each ssrc.
+ for (size_t i = 0; i < track->ssrcs.size(); ++i) {
+ uint32_t ssrc = track->ssrcs[i];
+ // RFC 5576
+ // a=ssrc:<ssrc-id> cname:<value>
+ AddSsrcLine(ssrc, kSsrcAttributeCname,
+ track->cname, message);
+
+ // draft-alvestrand-mmusic-msid-00
+ // a=ssrc:<ssrc-id> msid:identifier [appdata]
+ // The appdata consists of the "id" attribute of a MediaStreamTrack, which
+ // is corresponding to the "name" attribute of StreamParams.
+ std::string appdata = track->id;
+ std::ostringstream os;
+ InitAttrLine(kAttributeSsrc, &os);
+ os << kSdpDelimiterColon << ssrc << kSdpDelimiterSpace
+ << kSsrcAttributeMsid << kSdpDelimiterColon << track->sync_label
+ << kSdpDelimiterSpace << appdata;
+ AddLine(os.str(), message);
+
+ // TODO(ronghuawu): Remove below code which is for backward compatibility.
+ // draft-alvestrand-rtcweb-mid-01
+ // a=ssrc:<ssrc-id> mslabel:<value>
+ // The label isn't yet defined.
+ // a=ssrc:<ssrc-id> label:<value>
+ AddSsrcLine(ssrc, kSsrcAttributeMslabel, track->sync_label, message);
+ AddSsrcLine(ssrc, kSSrcAttributeLabel, track->id, message);
+ }
+ }
+}
+
+void WriteFmtpHeader(int payload_type, std::ostringstream* os) {
+ // fmtp header: a=fmtp:|payload_type| <parameters>
+ // Add a=fmtp
+ InitAttrLine(kAttributeFmtp, os);
+ // Add :|payload_type|
+ *os << kSdpDelimiterColon << payload_type;
+}
+
+void WriteRtcpFbHeader(int payload_type, std::ostringstream* os) {
+ // rtcp-fb header: a=rtcp-fb:|payload_type|
+ // <parameters>/<ccm <ccm_parameters>>
+ // Add a=rtcp-fb
+ InitAttrLine(kAttributeRtcpFb, os);
+ // Add :
+ *os << kSdpDelimiterColon;
+ if (payload_type == kWildcardPayloadType) {
+ *os << "*";
+ } else {
+ *os << payload_type;
+ }
+}
+
+void WriteFmtpParameter(const std::string& parameter_name,
+ const std::string& parameter_value,
+ std::ostringstream* os) {
+ // fmtp parameters: |parameter_name|=|parameter_value|
+ *os << parameter_name << kSdpDelimiterEqual << parameter_value;
+}
+
+void WriteFmtpParameters(const cricket::CodecParameterMap& parameters,
+ std::ostringstream* os) {
+ for (cricket::CodecParameterMap::const_iterator fmtp = parameters.begin();
+ fmtp != parameters.end(); ++fmtp) {
+ // Each new parameter, except the first one starts with ";" and " ".
+ if (fmtp != parameters.begin()) {
+ *os << kSdpDelimiterSemicolon;
+ }
+ *os << kSdpDelimiterSpace;
+ WriteFmtpParameter(fmtp->first, fmtp->second, os);
+ }
+}
+
+bool IsFmtpParam(const std::string& name) {
+ const char* kFmtpParams[] = {
+ kCodecParamMinPTime, kCodecParamSPropStereo,
+ kCodecParamStereo, kCodecParamUseInbandFec, kCodecParamUseDtx,
+ kCodecParamStartBitrate, kCodecParamMaxBitrate, kCodecParamMinBitrate,
+ kCodecParamMaxQuantization, kCodecParamSctpProtocol, kCodecParamSctpStreams,
+ kCodecParamMaxAverageBitrate, kCodecParamMaxPlaybackRate,
+ kCodecParamAssociatedPayloadType
+ };
+ for (size_t i = 0; i < ARRAY_SIZE(kFmtpParams); ++i) {
+ if (_stricmp(name.c_str(), kFmtpParams[i]) == 0) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Retreives fmtp parameters from |params|, which may contain other parameters
+// as well, and puts them in |fmtp_parameters|.
+void GetFmtpParams(const cricket::CodecParameterMap& params,
+ cricket::CodecParameterMap* fmtp_parameters) {
+ for (cricket::CodecParameterMap::const_iterator iter = params.begin();
+ iter != params.end(); ++iter) {
+ if (IsFmtpParam(iter->first)) {
+ (*fmtp_parameters)[iter->first] = iter->second;
+ }
+ }
+}
+
+template <class T>
+void AddFmtpLine(const T& codec, std::string* message) {
+ cricket::CodecParameterMap fmtp_parameters;
+ GetFmtpParams(codec.params, &fmtp_parameters);
+ if (fmtp_parameters.empty()) {
+ // No need to add an fmtp if it will have no (optional) parameters.
+ return;
+ }
+ std::ostringstream os;
+ WriteFmtpHeader(codec.id, &os);
+ WriteFmtpParameters(fmtp_parameters, &os);
+ AddLine(os.str(), message);
+ return;
+}
+
+template <class T>
+void AddRtcpFbLines(const T& codec, std::string* message) {
+ for (std::vector<cricket::FeedbackParam>::const_iterator iter =
+ codec.feedback_params.params().begin();
+ iter != codec.feedback_params.params().end(); ++iter) {
+ std::ostringstream os;
+ WriteRtcpFbHeader(codec.id, &os);
+ os << " " << iter->id();
+ if (!iter->param().empty()) {
+ os << " " << iter->param();
+ }
+ AddLine(os.str(), message);
+ }
+}
+
+bool AddSctpDataCodec(DataContentDescription* media_desc,
+ int sctp_port) {
+ if (media_desc->HasCodec(cricket::kGoogleSctpDataCodecId)) {
+ return ParseFailed("",
+ "Can't have multiple sctp port attributes.",
+ NULL);
+ }
+ // Add the SCTP Port number as a pseudo-codec "port" parameter
+ cricket::DataCodec codec_port(
+ cricket::kGoogleSctpDataCodecId, cricket::kGoogleSctpDataCodecName,
+ 0);
+ codec_port.SetParam(cricket::kCodecParamPort, sctp_port);
+ LOG(INFO) << "AddSctpDataCodec: Got SCTP Port Number "
+ << sctp_port;
+ media_desc->AddCodec(codec_port);
+ return true;
+}
+
+bool GetMinValue(const std::vector<int>& values, int* value) {
+ if (values.empty()) {
+ return false;
+ }
+ std::vector<int>::const_iterator found =
+ std::min_element(values.begin(), values.end());
+ *value = *found;
+ return true;
+}
+
+bool GetParameter(const std::string& name,
+ const cricket::CodecParameterMap& params, int* value) {
+ std::map<std::string, std::string>::const_iterator found =
+ params.find(name);
+ if (found == params.end()) {
+ return false;
+ }
+ if (!rtc::FromString(found->second, value)) {
+ return false;
+ }
+ return true;
+}
+
+void BuildRtpMap(const MediaContentDescription* media_desc,
+ const MediaType media_type,
+ std::string* message) {
+ ASSERT(message != NULL);
+ ASSERT(media_desc != NULL);
+ std::ostringstream os;
+ if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ const VideoContentDescription* video_desc =
+ static_cast<const VideoContentDescription*>(media_desc);
+ for (std::vector<cricket::VideoCodec>::const_iterator it =
+ video_desc->codecs().begin();
+ it != video_desc->codecs().end(); ++it) {
+ // RFC 4566
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>
+ // [/<encodingparameters>]
+ if (it->id != kWildcardPayloadType) {
+ InitAttrLine(kAttributeRtpmap, &os);
+ os << kSdpDelimiterColon << it->id << " " << it->name
+ << "/" << kDefaultVideoClockrate;
+ AddLine(os.str(), message);
+ }
+ AddRtcpFbLines(*it, message);
+ AddFmtpLine(*it, message);
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ const AudioContentDescription* audio_desc =
+ static_cast<const AudioContentDescription*>(media_desc);
+ std::vector<int> ptimes;
+ std::vector<int> maxptimes;
+ int max_minptime = 0;
+ for (std::vector<cricket::AudioCodec>::const_iterator it =
+ audio_desc->codecs().begin();
+ it != audio_desc->codecs().end(); ++it) {
+ ASSERT(!it->name.empty());
+ // RFC 4566
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>
+ // [/<encodingparameters>]
+ InitAttrLine(kAttributeRtpmap, &os);
+ os << kSdpDelimiterColon << it->id << " ";
+ os << it->name << "/" << it->clockrate;
+ if (it->channels != 1) {
+ os << "/" << it->channels;
+ }
+ AddLine(os.str(), message);
+ AddRtcpFbLines(*it, message);
+ AddFmtpLine(*it, message);
+ int minptime = 0;
+ if (GetParameter(kCodecParamMinPTime, it->params, &minptime)) {
+ max_minptime = std::max(minptime, max_minptime);
+ }
+ int ptime;
+ if (GetParameter(kCodecParamPTime, it->params, &ptime)) {
+ ptimes.push_back(ptime);
+ }
+ int maxptime;
+ if (GetParameter(kCodecParamMaxPTime, it->params, &maxptime)) {
+ maxptimes.push_back(maxptime);
+ }
+ }
+ // Populate the maxptime attribute with the smallest maxptime of all codecs
+ // under the same m-line.
+ int min_maxptime = INT_MAX;
+ if (GetMinValue(maxptimes, &min_maxptime)) {
+ AddAttributeLine(kCodecParamMaxPTime, min_maxptime, message);
+ }
+ ASSERT(min_maxptime > max_minptime);
+ // Populate the ptime attribute with the smallest ptime or the largest
+ // minptime, whichever is the largest, for all codecs under the same m-line.
+ int ptime = INT_MAX;
+ if (GetMinValue(ptimes, &ptime)) {
+ ptime = std::min(ptime, min_maxptime);
+ ptime = std::max(ptime, max_minptime);
+ AddAttributeLine(kCodecParamPTime, ptime, message);
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_DATA) {
+ const DataContentDescription* data_desc =
+ static_cast<const DataContentDescription*>(media_desc);
+ for (std::vector<cricket::DataCodec>::const_iterator it =
+ data_desc->codecs().begin();
+ it != data_desc->codecs().end(); ++it) {
+ // RFC 4566
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>
+ // [/<encodingparameters>]
+ InitAttrLine(kAttributeRtpmap, &os);
+ os << kSdpDelimiterColon << it->id << " "
+ << it->name << "/" << it->clockrate;
+ AddLine(os.str(), message);
+ }
+ }
+}
+
+void BuildCandidate(const std::vector<Candidate>& candidates,
+ std::string* message) {
+ std::ostringstream os;
+
+ for (std::vector<Candidate>::const_iterator it = candidates.begin();
+ it != candidates.end(); ++it) {
+ // RFC 5245
+ // a=candidate:<foundation> <component-id> <transport> <priority>
+ // <connection-address> <port> typ <candidate-types>
+ // [raddr <connection-address>] [rport <port>]
+ // *(SP extension-att-name SP extension-att-value)
+ std::string type;
+ // Map the cricket candidate type to "host" / "srflx" / "prflx" / "relay"
+ if (it->type() == cricket::LOCAL_PORT_TYPE) {
+ type = kCandidateHost;
+ } else if (it->type() == cricket::STUN_PORT_TYPE) {
+ type = kCandidateSrflx;
+ } else if (it->type() == cricket::RELAY_PORT_TYPE) {
+ type = kCandidateRelay;
+ } else {
+ ASSERT(false);
+ // Never write out candidates if we don't know the type.
+ continue;
+ }
+
+ InitAttrLine(kAttributeCandidate, &os);
+ os << kSdpDelimiterColon
+ << it->foundation() << " "
+ << it->component() << " "
+ << it->protocol() << " "
+ << it->priority() << " "
+ << it->address().ipaddr().ToString() << " "
+ << it->address().PortAsString() << " "
+ << kAttributeCandidateTyp << " "
+ << type << " ";
+
+ // Related address
+ if (!it->related_address().IsNil()) {
+ os << kAttributeCandidateRaddr << " "
+ << it->related_address().ipaddr().ToString() << " "
+ << kAttributeCandidateRport << " "
+ << it->related_address().PortAsString() << " ";
+ }
+
+ if (it->protocol() == cricket::TCP_PROTOCOL_NAME) {
+ os << kTcpCandidateType << " " << it->tcptype() << " ";
+ }
+
+ // Extensions
+ os << kAttributeCandidateGeneration << " " << it->generation();
+
+ AddLine(os.str(), message);
+ }
+}
+
+void BuildIceOptions(const std::vector<std::string>& transport_options,
+ std::string* message) {
+ if (!transport_options.empty()) {
+ std::ostringstream os;
+ InitAttrLine(kAttributeIceOption, &os);
+ os << kSdpDelimiterColon << transport_options[0];
+ for (size_t i = 1; i < transport_options.size(); ++i) {
+ os << kSdpDelimiterSpace << transport_options[i];
+ }
+ AddLine(os.str(), message);
+ }
+}
+
+bool IsRtp(const std::string& protocol) {
+ return protocol.empty() ||
+ (protocol.find(cricket::kMediaProtocolRtpPrefix) != std::string::npos);
+}
+
+bool IsDtlsSctp(const std::string& protocol) {
+ // This intentionally excludes "SCTP" and "SCTP/DTLS".
+ return protocol.find(cricket::kMediaProtocolDtlsSctp) != std::string::npos;
+}
+
+bool ParseSessionDescription(const std::string& message, size_t* pos,
+ std::string* session_id,
+ std::string* session_version,
+ TransportDescription* session_td,
+ RtpHeaderExtensions* session_extmaps,
+ cricket::SessionDescription* desc,
+ SdpParseError* error) {
+ std::string line;
+
+ desc->set_msid_supported(false);
+
+ // RFC 4566
+ // v= (protocol version)
+ if (!GetLineWithType(message, pos, &line, kLineTypeVersion)) {
+ return ParseFailedExpectLine(message, *pos, kLineTypeVersion,
+ std::string(), error);
+ }
+ // RFC 4566
+ // o=<username> <sess-id> <sess-version> <nettype> <addrtype>
+ // <unicast-address>
+ if (!GetLineWithType(message, pos, &line, kLineTypeOrigin)) {
+ return ParseFailedExpectLine(message, *pos, kLineTypeOrigin,
+ std::string(), error);
+ }
+ std::vector<std::string> fields;
+ rtc::split(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpace, &fields);
+ const size_t expected_fields = 6;
+ if (fields.size() != expected_fields) {
+ return ParseFailedExpectFieldNum(line, expected_fields, error);
+ }
+ *session_id = fields[1];
+ *session_version = fields[2];
+
+ // RFC 4566
+ // s= (session name)
+ if (!GetLineWithType(message, pos, &line, kLineTypeSessionName)) {
+ return ParseFailedExpectLine(message, *pos, kLineTypeSessionName,
+ std::string(), error);
+ }
+
+ // Optional lines
+ // Those are the optional lines, so shouldn't return false if not present.
+ // RFC 4566
+ // i=* (session information)
+ GetLineWithType(message, pos, &line, kLineTypeSessionInfo);
+
+ // RFC 4566
+ // u=* (URI of description)
+ GetLineWithType(message, pos, &line, kLineTypeSessionUri);
+
+ // RFC 4566
+ // e=* (email address)
+ GetLineWithType(message, pos, &line, kLineTypeSessionEmail);
+
+ // RFC 4566
+ // p=* (phone number)
+ GetLineWithType(message, pos, &line, kLineTypeSessionPhone);
+
+ // RFC 4566
+ // c=* (connection information -- not required if included in
+ // all media)
+ GetLineWithType(message, pos, &line, kLineTypeConnection);
+
+ // RFC 4566
+ // b=* (zero or more bandwidth information lines)
+ while (GetLineWithType(message, pos, &line, kLineTypeSessionBandwidth)) {
+ // By pass zero or more b lines.
+ }
+
+ // RFC 4566
+ // One or more time descriptions ("t=" and "r=" lines; see below)
+ // t= (time the session is active)
+ // r=* (zero or more repeat times)
+ // Ensure there's at least one time description
+ if (!GetLineWithType(message, pos, &line, kLineTypeTiming)) {
+ return ParseFailedExpectLine(message, *pos, kLineTypeTiming, std::string(),
+ error);
+ }
+
+ while (GetLineWithType(message, pos, &line, kLineTypeRepeatTimes)) {
+ // By pass zero or more r lines.
+ }
+
+ // Go through the rest of the time descriptions
+ while (GetLineWithType(message, pos, &line, kLineTypeTiming)) {
+ while (GetLineWithType(message, pos, &line, kLineTypeRepeatTimes)) {
+ // By pass zero or more r lines.
+ }
+ }
+
+ // RFC 4566
+ // z=* (time zone adjustments)
+ GetLineWithType(message, pos, &line, kLineTypeTimeZone);
+
+ // RFC 4566
+ // k=* (encryption key)
+ GetLineWithType(message, pos, &line, kLineTypeEncryptionKey);
+
+ // RFC 4566
+ // a=* (zero or more session attribute lines)
+ while (GetLineWithType(message, pos, &line, kLineTypeAttributes)) {
+ if (HasAttribute(line, kAttributeGroup)) {
+ if (!ParseGroupAttribute(line, desc, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeIceUfrag)) {
+ if (!GetValue(line, kAttributeIceUfrag,
+ &(session_td->ice_ufrag), error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeIcePwd)) {
+ if (!GetValue(line, kAttributeIcePwd, &(session_td->ice_pwd), error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeIceLite)) {
+ session_td->ice_mode = cricket::ICEMODE_LITE;
+ } else if (HasAttribute(line, kAttributeIceOption)) {
+ if (!ParseIceOptions(line, &(session_td->transport_options), error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeFingerprint)) {
+ if (session_td->identity_fingerprint.get()) {
+ return ParseFailed(
+ line,
+ "Can't have multiple fingerprint attributes at the same level.",
+ error);
+ }
+ rtc::SSLFingerprint* fingerprint = NULL;
+ if (!ParseFingerprintAttribute(line, &fingerprint, error)) {
+ return false;
+ }
+ session_td->identity_fingerprint.reset(fingerprint);
+ } else if (HasAttribute(line, kAttributeSetup)) {
+ if (!ParseDtlsSetup(line, &(session_td->connection_role), error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeMsidSemantics)) {
+ std::string semantics;
+ if (!GetValue(line, kAttributeMsidSemantics, &semantics, error)) {
+ return false;
+ }
+ desc->set_msid_supported(
+ CaseInsensitiveFind(semantics, kMediaStreamSemantic));
+ } else if (HasAttribute(line, kAttributeExtmap)) {
+ RtpHeaderExtension extmap;
+ if (!ParseExtmap(line, &extmap, error)) {
+ return false;
+ }
+ session_extmaps->push_back(extmap);
+ }
+ }
+
+ return true;
+}
+
+bool ParseGroupAttribute(const std::string& line,
+ cricket::SessionDescription* desc,
+ SdpParseError* error) {
+ ASSERT(desc != NULL);
+
+ // RFC 5888 and draft-holmberg-mmusic-sdp-bundle-negotiation-00
+ // a=group:BUNDLE video voice
+ std::vector<std::string> fields;
+ rtc::split(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpace, &fields);
+ std::string semantics;
+ if (!GetValue(fields[0], kAttributeGroup, &semantics, error)) {
+ return false;
+ }
+ cricket::ContentGroup group(semantics);
+ for (size_t i = 1; i < fields.size(); ++i) {
+ group.AddContentName(fields[i]);
+ }
+ desc->AddGroup(group);
+ return true;
+}
+
+static bool ParseFingerprintAttribute(const std::string& line,
+ rtc::SSLFingerprint** fingerprint,
+ SdpParseError* error) {
+ if (!IsLineType(line, kLineTypeAttributes) ||
+ !HasAttribute(line, kAttributeFingerprint)) {
+ return ParseFailedExpectLine(line, 0, kLineTypeAttributes,
+ kAttributeFingerprint, error);
+ }
+
+ std::vector<std::string> fields;
+ rtc::split(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpace, &fields);
+ const size_t expected_fields = 2;
+ if (fields.size() != expected_fields) {
+ return ParseFailedExpectFieldNum(line, expected_fields, error);
+ }
+
+ // The first field here is "fingerprint:<hash>.
+ std::string algorithm;
+ if (!GetValue(fields[0], kAttributeFingerprint, &algorithm, error)) {
+ return false;
+ }
+
+ // Downcase the algorithm. Note that we don't need to downcase the
+ // fingerprint because hex_decode can handle upper-case.
+ std::transform(algorithm.begin(), algorithm.end(), algorithm.begin(),
+ ::tolower);
+
+ // The second field is the digest value. De-hexify it.
+ *fingerprint = rtc::SSLFingerprint::CreateFromRfc4572(
+ algorithm, fields[1]);
+ if (!*fingerprint) {
+ return ParseFailed(line,
+ "Failed to create fingerprint from the digest.",
+ error);
+ }
+
+ return true;
+}
+
+static bool ParseDtlsSetup(const std::string& line,
+ cricket::ConnectionRole* role,
+ SdpParseError* error) {
+ // setup-attr = "a=setup:" role
+ // role = "active" / "passive" / "actpass" / "holdconn"
+ std::vector<std::string> fields;
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColon, &fields);
+ const size_t expected_fields = 2;
+ if (fields.size() != expected_fields) {
+ return ParseFailedExpectFieldNum(line, expected_fields, error);
+ }
+ std::string role_str = fields[1];
+ if (!cricket::StringToConnectionRole(role_str, role)) {
+ return ParseFailed(line, "Invalid attribute value.", error);
+ }
+ return true;
+}
+
+// RFC 3551
+// PT encoding media type clock rate channels
+// name (Hz)
+// 0 PCMU A 8,000 1
+// 1 reserved A
+// 2 reserved A
+// 3 GSM A 8,000 1
+// 4 G723 A 8,000 1
+// 5 DVI4 A 8,000 1
+// 6 DVI4 A 16,000 1
+// 7 LPC A 8,000 1
+// 8 PCMA A 8,000 1
+// 9 G722 A 8,000 1
+// 10 L16 A 44,100 2
+// 11 L16 A 44,100 1
+// 12 QCELP A 8,000 1
+// 13 CN A 8,000 1
+// 14 MPA A 90,000 (see text)
+// 15 G728 A 8,000 1
+// 16 DVI4 A 11,025 1
+// 17 DVI4 A 22,050 1
+// 18 G729 A 8,000 1
+struct StaticPayloadAudioCodec {
+ const char* name;
+ int clockrate;
+ int channels;
+};
+static const StaticPayloadAudioCodec kStaticPayloadAudioCodecs[] = {
+ { "PCMU", 8000, 1 },
+ { "reserved", 0, 0 },
+ { "reserved", 0, 0 },
+ { "GSM", 8000, 1 },
+ { "G723", 8000, 1 },
+ { "DVI4", 8000, 1 },
+ { "DVI4", 16000, 1 },
+ { "LPC", 8000, 1 },
+ { "PCMA", 8000, 1 },
+ { "G722", 8000, 1 },
+ { "L16", 44100, 2 },
+ { "L16", 44100, 1 },
+ { "QCELP", 8000, 1 },
+ { "CN", 8000, 1 },
+ { "MPA", 90000, 1 },
+ { "G728", 8000, 1 },
+ { "DVI4", 11025, 1 },
+ { "DVI4", 22050, 1 },
+ { "G729", 8000, 1 },
+};
+
+void MaybeCreateStaticPayloadAudioCodecs(
+ const std::vector<int>& fmts, AudioContentDescription* media_desc) {
+ if (!media_desc) {
+ return;
+ }
+ int preference = static_cast<int>(fmts.size());
+ std::vector<int>::const_iterator it = fmts.begin();
+ bool add_new_codec = false;
+ for (; it != fmts.end(); ++it) {
+ int payload_type = *it;
+ if (!media_desc->HasCodec(payload_type) &&
+ payload_type >= 0 &&
+ payload_type < ARRAY_SIZE(kStaticPayloadAudioCodecs)) {
+ std::string encoding_name = kStaticPayloadAudioCodecs[payload_type].name;
+ int clock_rate = kStaticPayloadAudioCodecs[payload_type].clockrate;
+ int channels = kStaticPayloadAudioCodecs[payload_type].channels;
+ media_desc->AddCodec(cricket::AudioCodec(payload_type, encoding_name,
+ clock_rate, 0, channels,
+ preference));
+ add_new_codec = true;
+ }
+ --preference;
+ }
+ if (add_new_codec) {
+ media_desc->SortCodecs();
+ }
+}
+
+template <class C>
+static C* ParseContentDescription(const std::string& message,
+ const MediaType media_type,
+ int mline_index,
+ const std::string& protocol,
+ const std::vector<int>& codec_preference,
+ size_t* pos,
+ std::string* content_name,
+ TransportDescription* transport,
+ std::vector<JsepIceCandidate*>* candidates,
+ webrtc::SdpParseError* error) {
+ C* media_desc = new C();
+ switch (media_type) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ *content_name = cricket::CN_AUDIO;
+ break;
+ case cricket::MEDIA_TYPE_VIDEO:
+ *content_name = cricket::CN_VIDEO;
+ break;
+ case cricket::MEDIA_TYPE_DATA:
+ *content_name = cricket::CN_DATA;
+ break;
+ default:
+ ASSERT(false);
+ break;
+ }
+ if (!ParseContent(message, media_type, mline_index, protocol,
+ codec_preference, pos, content_name,
+ media_desc, transport, candidates, error)) {
+ delete media_desc;
+ return NULL;
+ }
+ // Sort the codecs according to the m-line fmt list.
+ media_desc->SortCodecs();
+ return media_desc;
+}
+
+bool ParseMediaDescription(const std::string& message,
+ const TransportDescription& session_td,
+ const RtpHeaderExtensions& session_extmaps,
+ size_t* pos,
+ cricket::SessionDescription* desc,
+ std::vector<JsepIceCandidate*>* candidates,
+ SdpParseError* error) {
+ ASSERT(desc != NULL);
+ std::string line;
+ int mline_index = -1;
+
+ // Zero or more media descriptions
+ // RFC 4566
+ // m=<media> <port> <proto> <fmt>
+ while (GetLineWithType(message, pos, &line, kLineTypeMedia)) {
+ ++mline_index;
+
+ std::vector<std::string> fields;
+ rtc::split(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpace, &fields);
+ const size_t expected_min_fields = 4;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ bool rejected = false;
+ // RFC 3264
+ // To reject an offered stream, the port number in the corresponding stream
+ // in the answer MUST be set to zero.
+ if (fields[1] == kMediaPortRejected) {
+ rejected = true;
+ }
+
+ std::string protocol = fields[2];
+
+ // <fmt>
+ std::vector<int> codec_preference;
+ if (IsRtp(protocol)) {
+ for (size_t j = 3 ; j < fields.size(); ++j) {
+ // TODO(wu): Remove when below bug is fixed.
+ // https://bugzilla.mozilla.org/show_bug.cgi?id=996329
+ if (fields[j].empty() && j == fields.size() - 1) {
+ continue;
+ }
+
+ int pl = 0;
+ if (!GetPayloadTypeFromString(line, fields[j], &pl, error)) {
+ return false;
+ }
+ codec_preference.push_back(pl);
+ }
+ }
+
+ // Make a temporary TransportDescription based on |session_td|.
+ // Some of this gets overwritten by ParseContent.
+ TransportDescription transport(session_td.transport_options,
+ session_td.ice_ufrag,
+ session_td.ice_pwd,
+ session_td.ice_mode,
+ session_td.connection_role,
+ session_td.identity_fingerprint.get(),
+ Candidates());
+
+ rtc::scoped_ptr<MediaContentDescription> content;
+ std::string content_name;
+ if (HasAttribute(line, kMediaTypeVideo)) {
+ content.reset(ParseContentDescription<VideoContentDescription>(
+ message, cricket::MEDIA_TYPE_VIDEO, mline_index, protocol,
+ codec_preference, pos, &content_name,
+ &transport, candidates, error));
+ } else if (HasAttribute(line, kMediaTypeAudio)) {
+ content.reset(ParseContentDescription<AudioContentDescription>(
+ message, cricket::MEDIA_TYPE_AUDIO, mline_index, protocol,
+ codec_preference, pos, &content_name,
+ &transport, candidates, error));
+ } else if (HasAttribute(line, kMediaTypeData)) {
+ DataContentDescription* data_desc =
+ ParseContentDescription<DataContentDescription>(
+ message, cricket::MEDIA_TYPE_DATA, mline_index, protocol,
+ codec_preference, pos, &content_name,
+ &transport, candidates, error);
+ content.reset(data_desc);
+
+ int p;
+ if (data_desc && IsDtlsSctp(protocol) && rtc::FromString(fields[3], &p)) {
+ if (!AddSctpDataCodec(data_desc, p))
+ return false;
+ }
+ } else {
+ LOG(LS_WARNING) << "Unsupported media type: " << line;
+ continue;
+ }
+ if (!content.get()) {
+ // ParseContentDescription returns NULL if failed.
+ return false;
+ }
+
+ if (IsRtp(protocol)) {
+ // Set the extmap.
+ if (!session_extmaps.empty() &&
+ !content->rtp_header_extensions().empty()) {
+ return ParseFailed("",
+ "The a=extmap MUST be either all session level or "
+ "all media level.",
+ error);
+ }
+ for (size_t i = 0; i < session_extmaps.size(); ++i) {
+ content->AddRtpHeaderExtension(session_extmaps[i]);
+ }
+ }
+ content->set_protocol(protocol);
+ desc->AddContent(content_name,
+ IsDtlsSctp(protocol) ? cricket::NS_JINGLE_DRAFT_SCTP :
+ cricket::NS_JINGLE_RTP,
+ rejected,
+ content.release());
+ // Create TransportInfo with the media level "ice-pwd" and "ice-ufrag".
+ TransportInfo transport_info(content_name, transport);
+
+ if (!desc->AddTransportInfo(transport_info)) {
+ std::ostringstream description;
+ description << "Failed to AddTransportInfo with content name: "
+ << content_name;
+ return ParseFailed("", description.str(), error);
+ }
+ }
+
+ size_t end_of_message = message.size();
+ if (mline_index == -1 && *pos != end_of_message) {
+ ParseFailed(message, *pos, "Expects m line.", error);
+ return false;
+ }
+ return true;
+}
+
+bool VerifyCodec(const cricket::Codec& codec) {
+ // Codec has not been populated correctly unless the name has been set. This
+ // can happen if an SDP has an fmtp or rtcp-fb with a payload type but doesn't
+ // have a corresponding "rtpmap" line.
+ cricket::Codec default_codec;
+ return default_codec.name != codec.name;
+}
+
+bool VerifyAudioCodecs(const AudioContentDescription* audio_desc) {
+ const std::vector<cricket::AudioCodec>& codecs = audio_desc->codecs();
+ for (std::vector<cricket::AudioCodec>::const_iterator iter = codecs.begin();
+ iter != codecs.end(); ++iter) {
+ if (!VerifyCodec(*iter)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool VerifyVideoCodecs(const VideoContentDescription* video_desc) {
+ const std::vector<cricket::VideoCodec>& codecs = video_desc->codecs();
+ for (std::vector<cricket::VideoCodec>::const_iterator iter = codecs.begin();
+ iter != codecs.end(); ++iter) {
+ if (!VerifyCodec(*iter)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void AddParameters(const cricket::CodecParameterMap& parameters,
+ cricket::Codec* codec) {
+ for (cricket::CodecParameterMap::const_iterator iter =
+ parameters.begin(); iter != parameters.end(); ++iter) {
+ codec->SetParam(iter->first, iter->second);
+ }
+}
+
+void AddFeedbackParameter(const cricket::FeedbackParam& feedback_param,
+ cricket::Codec* codec) {
+ codec->AddFeedbackParam(feedback_param);
+}
+
+void AddFeedbackParameters(const cricket::FeedbackParams& feedback_params,
+ cricket::Codec* codec) {
+ for (std::vector<cricket::FeedbackParam>::const_iterator iter =
+ feedback_params.params().begin();
+ iter != feedback_params.params().end(); ++iter) {
+ codec->AddFeedbackParam(*iter);
+ }
+}
+
+// Gets the current codec setting associated with |payload_type|. If there
+// is no Codec associated with that payload type it returns an empty codec
+// with that payload type.
+template <class T>
+T GetCodecWithPayloadType(const std::vector<T>& codecs, int payload_type) {
+ T ret_val;
+ if (!FindCodecById(codecs, payload_type, &ret_val)) {
+ ret_val.id = payload_type;
+ }
+ return ret_val;
+}
+
+// Updates or creates a new codec entry in the audio description.
+template <class T, class U>
+void AddOrReplaceCodec(MediaContentDescription* content_desc, const U& codec) {
+ T* desc = static_cast<T*>(content_desc);
+ std::vector<U> codecs = desc->codecs();
+ bool found = false;
+
+ typename std::vector<U>::iterator iter;
+ for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
+ if (iter->id == codec.id) {
+ *iter = codec;
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ desc->AddCodec(codec);
+ return;
+ }
+ desc->set_codecs(codecs);
+}
+
+// Adds or updates existing codec corresponding to |payload_type| according
+// to |parameters|.
+template <class T, class U>
+void UpdateCodec(MediaContentDescription* content_desc, int payload_type,
+ const cricket::CodecParameterMap& parameters) {
+ // Codec might already have been populated (from rtpmap).
+ U new_codec = GetCodecWithPayloadType(static_cast<T*>(content_desc)->codecs(),
+ payload_type);
+ AddParameters(parameters, &new_codec);
+ AddOrReplaceCodec<T, U>(content_desc, new_codec);
+}
+
+// Adds or updates existing codec corresponding to |payload_type| according
+// to |feedback_param|.
+template <class T, class U>
+void UpdateCodec(MediaContentDescription* content_desc, int payload_type,
+ const cricket::FeedbackParam& feedback_param) {
+ // Codec might already have been populated (from rtpmap).
+ U new_codec = GetCodecWithPayloadType(static_cast<T*>(content_desc)->codecs(),
+ payload_type);
+ AddFeedbackParameter(feedback_param, &new_codec);
+ AddOrReplaceCodec<T, U>(content_desc, new_codec);
+}
+
+template <class T>
+bool PopWildcardCodec(std::vector<T>* codecs, T* wildcard_codec) {
+ for (auto iter = codecs->begin(); iter != codecs->end(); ++iter) {
+ if (iter->id == kWildcardPayloadType) {
+ *wildcard_codec = *iter;
+ codecs->erase(iter);
+ return true;
+ }
+ }
+ return false;
+}
+
+template<class T>
+void UpdateFromWildcardCodecs(cricket::MediaContentDescriptionImpl<T>* desc) {
+ auto codecs = desc->codecs();
+ T wildcard_codec;
+ if (!PopWildcardCodec(&codecs, &wildcard_codec)) {
+ return;
+ }
+ for (auto& codec : codecs) {
+ AddFeedbackParameters(wildcard_codec.feedback_params, &codec);
+ }
+ desc->set_codecs(codecs);
+}
+
+void AddAudioAttribute(const std::string& name, const std::string& value,
+ AudioContentDescription* audio_desc) {
+ if (value.empty()) {
+ return;
+ }
+ std::vector<cricket::AudioCodec> codecs = audio_desc->codecs();
+ for (std::vector<cricket::AudioCodec>::iterator iter = codecs.begin();
+ iter != codecs.end(); ++iter) {
+ iter->params[name] = value;
+ }
+ audio_desc->set_codecs(codecs);
+}
+
+bool ParseContent(const std::string& message,
+ const MediaType media_type,
+ int mline_index,
+ const std::string& protocol,
+ const std::vector<int>& codec_preference,
+ size_t* pos,
+ std::string* content_name,
+ MediaContentDescription* media_desc,
+ TransportDescription* transport,
+ std::vector<JsepIceCandidate*>* candidates,
+ SdpParseError* error) {
+ ASSERT(media_desc != NULL);
+ ASSERT(content_name != NULL);
+ ASSERT(transport != NULL);
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ MaybeCreateStaticPayloadAudioCodecs(
+ codec_preference, static_cast<AudioContentDescription*>(media_desc));
+ }
+
+ // The media level "ice-ufrag" and "ice-pwd".
+ // The candidates before update the media level "ice-pwd" and "ice-ufrag".
+ Candidates candidates_orig;
+ std::string line;
+ std::string mline_id;
+ // Tracks created out of the ssrc attributes.
+ StreamParamsVec tracks;
+ SsrcInfoVec ssrc_infos;
+ SsrcGroupVec ssrc_groups;
+ std::string maxptime_as_string;
+ std::string ptime_as_string;
+
+ // Loop until the next m line
+ while (!IsLineType(message, kLineTypeMedia, *pos)) {
+ if (!GetLine(message, pos, &line)) {
+ if (*pos >= message.size()) {
+ break; // Done parsing
+ } else {
+ return ParseFailed(message, *pos, "Invalid SDP line.", error);
+ }
+ }
+
+ // RFC 4566
+ // b=* (zero or more bandwidth information lines)
+ if (IsLineType(line, kLineTypeSessionBandwidth)) {
+ std::string bandwidth;
+ if (HasAttribute(line, kApplicationSpecificMaximum)) {
+ if (!GetValue(line, kApplicationSpecificMaximum, &bandwidth, error)) {
+ return false;
+ } else {
+ int b = 0;
+ if (!GetValueFromString(line, bandwidth, &b, error)) {
+ return false;
+ }
+ // We should never use more than the default bandwidth for RTP-based
+ // data channels. Don't allow SDP to set the bandwidth, because
+ // that would give JS the opportunity to "break the Internet".
+ // See: https://code.google.com/p/chromium/issues/detail?id=280726
+ if (media_type == cricket::MEDIA_TYPE_DATA && IsRtp(protocol) &&
+ b > cricket::kDataMaxBandwidth / 1000) {
+ std::ostringstream description;
+ description << "RTP-based data channels may not send more than "
+ << cricket::kDataMaxBandwidth / 1000 << "kbps.";
+ return ParseFailed(line, description.str(), error);
+ }
+ media_desc->set_bandwidth(b * 1000);
+ }
+ }
+ continue;
+ }
+
+ if (!IsLineType(line, kLineTypeAttributes)) {
+ // TODO: Handle other lines if needed.
+ LOG(LS_INFO) << "Ignored line: " << line;
+ continue;
+ }
+
+ // Handle attributes common to SCTP and RTP.
+ if (HasAttribute(line, kAttributeMid)) {
+ // RFC 3388
+ // mid-attribute = "a=mid:" identification-tag
+ // identification-tag = token
+ // Use the mid identification-tag as the content name.
+ if (!GetValue(line, kAttributeMid, &mline_id, error)) {
+ return false;
+ }
+ *content_name = mline_id;
+ } else if (HasAttribute(line, kAttributeCandidate)) {
+ Candidate candidate;
+ if (!ParseCandidate(line, &candidate, error, false)) {
+ return false;
+ }
+ candidates_orig.push_back(candidate);
+ } else if (HasAttribute(line, kAttributeIceUfrag)) {
+ if (!GetValue(line, kAttributeIceUfrag, &transport->ice_ufrag, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeIcePwd)) {
+ if (!GetValue(line, kAttributeIcePwd, &transport->ice_pwd, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeIceOption)) {
+ if (!ParseIceOptions(line, &transport->transport_options, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeFmtp)) {
+ if (!ParseFmtpAttributes(line, media_type, media_desc, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeFingerprint)) {
+ rtc::SSLFingerprint* fingerprint = NULL;
+
+ if (!ParseFingerprintAttribute(line, &fingerprint, error)) {
+ return false;
+ }
+ transport->identity_fingerprint.reset(fingerprint);
+ } else if (HasAttribute(line, kAttributeSetup)) {
+ if (!ParseDtlsSetup(line, &(transport->connection_role), error)) {
+ return false;
+ }
+ } else if (IsDtlsSctp(protocol) && HasAttribute(line, kAttributeSctpPort)) {
+ int sctp_port;
+ if (!ParseSctpPort(line, &sctp_port, error)) {
+ return false;
+ }
+ if (!AddSctpDataCodec(static_cast<DataContentDescription*>(media_desc),
+ sctp_port)) {
+ return false;
+ }
+ } else if (IsRtp(protocol)) {
+ //
+ // RTP specific attrubtes
+ //
+ if (HasAttribute(line, kAttributeRtcpMux)) {
+ media_desc->set_rtcp_mux(true);
+ } else if (HasAttribute(line, kAttributeSsrcGroup)) {
+ if (!ParseSsrcGroupAttribute(line, &ssrc_groups, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeSsrc)) {
+ if (!ParseSsrcAttribute(line, &ssrc_infos, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeCrypto)) {
+ if (!ParseCryptoAttribute(line, media_desc, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeRtpmap)) {
+ if (!ParseRtpmapAttribute(line, media_type, codec_preference,
+ media_desc, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kCodecParamMaxPTime)) {
+ if (!GetValue(line, kCodecParamMaxPTime, &maxptime_as_string, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeRtcpFb)) {
+ if (!ParseRtcpFbAttribute(line, media_type, media_desc, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kCodecParamPTime)) {
+ if (!GetValue(line, kCodecParamPTime, &ptime_as_string, error)) {
+ return false;
+ }
+ } else if (HasAttribute(line, kAttributeSendOnly)) {
+ media_desc->set_direction(cricket::MD_SENDONLY);
+ } else if (HasAttribute(line, kAttributeRecvOnly)) {
+ media_desc->set_direction(cricket::MD_RECVONLY);
+ } else if (HasAttribute(line, kAttributeInactive)) {
+ media_desc->set_direction(cricket::MD_INACTIVE);
+ } else if (HasAttribute(line, kAttributeSendRecv)) {
+ media_desc->set_direction(cricket::MD_SENDRECV);
+ } else if (HasAttribute(line, kAttributeExtmap)) {
+ RtpHeaderExtension extmap;
+ if (!ParseExtmap(line, &extmap, error)) {
+ return false;
+ }
+ media_desc->AddRtpHeaderExtension(extmap);
+ } else if (HasAttribute(line, kAttributeXGoogleFlag)) {
+ // Experimental attribute. Conference mode activates more aggressive
+ // AEC and NS settings.
+ // TODO: expose API to set these directly.
+ std::string flag_value;
+ if (!GetValue(line, kAttributeXGoogleFlag, &flag_value, error)) {
+ return false;
+ }
+ if (flag_value.compare(kValueConference) == 0)
+ media_desc->set_conference_mode(true);
+ }
+ } else {
+ // Only parse lines that we are interested of.
+ LOG(LS_INFO) << "Ignored line: " << line;
+ continue;
+ }
+ }
+
+ // Create tracks from the |ssrc_infos|.
+ CreateTracksFromSsrcInfos(ssrc_infos, &tracks);
+
+ // Add the ssrc group to the track.
+ for (SsrcGroupVec::iterator ssrc_group = ssrc_groups.begin();
+ ssrc_group != ssrc_groups.end(); ++ssrc_group) {
+ if (ssrc_group->ssrcs.empty()) {
+ continue;
+ }
+ uint32_t ssrc = ssrc_group->ssrcs.front();
+ for (StreamParamsVec::iterator track = tracks.begin();
+ track != tracks.end(); ++track) {
+ if (track->has_ssrc(ssrc)) {
+ track->ssrc_groups.push_back(*ssrc_group);
+ }
+ }
+ }
+
+ // Add the new tracks to the |media_desc|.
+ for (StreamParamsVec::iterator track = tracks.begin();
+ track != tracks.end(); ++track) {
+ media_desc->AddStream(*track);
+ }
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ AudioContentDescription* audio_desc =
+ static_cast<AudioContentDescription*>(media_desc);
+ UpdateFromWildcardCodecs(audio_desc);
+
+ // Verify audio codec ensures that no audio codec has been populated with
+ // only fmtp.
+ if (!VerifyAudioCodecs(audio_desc)) {
+ return ParseFailed("Failed to parse audio codecs correctly.", error);
+ }
+ AddAudioAttribute(kCodecParamMaxPTime, maxptime_as_string, audio_desc);
+ AddAudioAttribute(kCodecParamPTime, ptime_as_string, audio_desc);
+ }
+
+ if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ VideoContentDescription* video_desc =
+ static_cast<VideoContentDescription*>(media_desc);
+ UpdateFromWildcardCodecs(video_desc);
+ // Verify video codec ensures that no video codec has been populated with
+ // only rtcp-fb.
+ if (!VerifyVideoCodecs(video_desc)) {
+ return ParseFailed("Failed to parse video codecs correctly.", error);
+ }
+ }
+
+ // RFC 5245
+ // Update the candidates with the media level "ice-pwd" and "ice-ufrag".
+ for (Candidates::iterator it = candidates_orig.begin();
+ it != candidates_orig.end(); ++it) {
+ ASSERT((*it).username().empty());
+ (*it).set_username(transport->ice_ufrag);
+ ASSERT((*it).password().empty());
+ (*it).set_password(transport->ice_pwd);
+ candidates->push_back(
+ new JsepIceCandidate(mline_id, mline_index, *it));
+ }
+ return true;
+}
+
+bool ParseSsrcAttribute(const std::string& line, SsrcInfoVec* ssrc_infos,
+ SdpParseError* error) {
+ ASSERT(ssrc_infos != NULL);
+ // RFC 5576
+ // a=ssrc:<ssrc-id> <attribute>
+ // a=ssrc:<ssrc-id> <attribute>:<value>
+ std::string field1, field2;
+ if (!rtc::tokenize_first(line.substr(kLinePrefixLength), kSdpDelimiterSpace,
+ &field1, &field2)) {
+ const size_t expected_fields = 2;
+ return ParseFailedExpectFieldNum(line, expected_fields, error);
+ }
+
+ // ssrc:<ssrc-id>
+ std::string ssrc_id_s;
+ if (!GetValue(field1, kAttributeSsrc, &ssrc_id_s, error)) {
+ return false;
+ }
+ uint32_t ssrc_id = 0;
+ if (!GetValueFromString(line, ssrc_id_s, &ssrc_id, error)) {
+ return false;
+ }
+
+ std::string attribute;
+ std::string value;
+ if (!rtc::tokenize_first(field2, kSdpDelimiterColon, &attribute, &value)) {
+ std::ostringstream description;
+ description << "Failed to get the ssrc attribute value from " << field2
+ << ". Expected format <attribute>:<value>.";
+ return ParseFailed(line, description.str(), error);
+ }
+
+ // Check if there's already an item for this |ssrc_id|. Create a new one if
+ // there isn't.
+ SsrcInfoVec::iterator ssrc_info = ssrc_infos->begin();
+ for (; ssrc_info != ssrc_infos->end(); ++ssrc_info) {
+ if (ssrc_info->ssrc_id == ssrc_id) {
+ break;
+ }
+ }
+ if (ssrc_info == ssrc_infos->end()) {
+ SsrcInfo info;
+ info.ssrc_id = ssrc_id;
+ ssrc_infos->push_back(info);
+ ssrc_info = ssrc_infos->end() - 1;
+ }
+
+ // Store the info to the |ssrc_info|.
+ if (attribute == kSsrcAttributeCname) {
+ // RFC 5576
+ // cname:<value>
+ ssrc_info->cname = value;
+ } else if (attribute == kSsrcAttributeMsid) {
+ // draft-alvestrand-mmusic-msid-00
+ // "msid:" identifier [ " " appdata ]
+ std::vector<std::string> fields;
+ rtc::split(value, kSdpDelimiterSpace, &fields);
+ if (fields.size() < 1 || fields.size() > 2) {
+ return ParseFailed(line,
+ "Expected format \"msid:<identifier>[ <appdata>]\".",
+ error);
+ }
+ ssrc_info->msid_identifier = fields[0];
+ if (fields.size() == 2) {
+ ssrc_info->msid_appdata = fields[1];
+ }
+ } else if (attribute == kSsrcAttributeMslabel) {
+ // draft-alvestrand-rtcweb-mid-01
+ // mslabel:<value>
+ ssrc_info->mslabel = value;
+ } else if (attribute == kSSrcAttributeLabel) {
+ // The label isn't defined.
+ // label:<value>
+ ssrc_info->label = value;
+ }
+ return true;
+}
+
+bool ParseSsrcGroupAttribute(const std::string& line,
+ SsrcGroupVec* ssrc_groups,
+ SdpParseError* error) {
+ ASSERT(ssrc_groups != NULL);
+ // RFC 5576
+ // a=ssrc-group:<semantics> <ssrc-id> ...
+ std::vector<std::string> fields;
+ rtc::split(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpace, &fields);
+ const size_t expected_min_fields = 2;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ std::string semantics;
+ if (!GetValue(fields[0], kAttributeSsrcGroup, &semantics, error)) {
+ return false;
+ }
+ std::vector<uint32_t> ssrcs;
+ for (size_t i = 1; i < fields.size(); ++i) {
+ uint32_t ssrc = 0;
+ if (!GetValueFromString(line, fields[i], &ssrc, error)) {
+ return false;
+ }
+ ssrcs.push_back(ssrc);
+ }
+ ssrc_groups->push_back(SsrcGroup(semantics, ssrcs));
+ return true;
+}
+
+bool ParseCryptoAttribute(const std::string& line,
+ MediaContentDescription* media_desc,
+ SdpParseError* error) {
+ std::vector<std::string> fields;
+ rtc::split(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpace, &fields);
+ // RFC 4568
+ // a=crypto:<tag> <crypto-suite> <key-params> [<session-params>]
+ const size_t expected_min_fields = 3;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ std::string tag_value;
+ if (!GetValue(fields[0], kAttributeCrypto, &tag_value, error)) {
+ return false;
+ }
+ int tag = 0;
+ if (!GetValueFromString(line, tag_value, &tag, error)) {
+ return false;
+ }
+ const std::string& crypto_suite = fields[1];
+ const std::string& key_params = fields[2];
+ std::string session_params;
+ if (fields.size() > 3) {
+ session_params = fields[3];
+ }
+ media_desc->AddCrypto(CryptoParams(tag, crypto_suite, key_params,
+ session_params));
+ return true;
+}
+
+// Updates or creates a new codec entry in the audio description with according
+// to |name|, |clockrate|, |bitrate|, |channels| and |preference|.
+void UpdateCodec(int payload_type, const std::string& name, int clockrate,
+ int bitrate, int channels, int preference,
+ AudioContentDescription* audio_desc) {
+ // Codec may already be populated with (only) optional parameters
+ // (from an fmtp).
+ cricket::AudioCodec codec =
+ GetCodecWithPayloadType(audio_desc->codecs(), payload_type);
+ codec.name = name;
+ codec.clockrate = clockrate;
+ codec.bitrate = bitrate;
+ codec.channels = channels;
+ codec.preference = preference;
+ AddOrReplaceCodec<AudioContentDescription, cricket::AudioCodec>(audio_desc,
+ codec);
+}
+
+// Updates or creates a new codec entry in the video description according to
+// |name|, |width|, |height|, |framerate| and |preference|.
+void UpdateCodec(int payload_type, const std::string& name, int width,
+ int height, int framerate, int preference,
+ VideoContentDescription* video_desc) {
+ // Codec may already be populated with (only) optional parameters
+ // (from an fmtp).
+ cricket::VideoCodec codec =
+ GetCodecWithPayloadType(video_desc->codecs(), payload_type);
+ codec.name = name;
+ codec.width = width;
+ codec.height = height;
+ codec.framerate = framerate;
+ codec.preference = preference;
+ AddOrReplaceCodec<VideoContentDescription, cricket::VideoCodec>(video_desc,
+ codec);
+}
+
+bool ParseRtpmapAttribute(const std::string& line,
+ const MediaType media_type,
+ const std::vector<int>& codec_preference,
+ MediaContentDescription* media_desc,
+ SdpParseError* error) {
+ std::vector<std::string> fields;
+ rtc::split(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpace, &fields);
+ // RFC 4566
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>[/<encodingparameters>]
+ const size_t expected_min_fields = 2;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ std::string payload_type_value;
+ if (!GetValue(fields[0], kAttributeRtpmap, &payload_type_value, error)) {
+ return false;
+ }
+ int payload_type = 0;
+ if (!GetPayloadTypeFromString(line, payload_type_value, &payload_type,
+ error)) {
+ return false;
+ }
+
+ // Set the preference order depending on the order of the pl type in the
+ // <fmt> of the m-line.
+ const int preference = codec_preference.end() -
+ std::find(codec_preference.begin(), codec_preference.end(),
+ payload_type);
+ if (preference == 0) {
+ LOG(LS_WARNING) << "Ignore rtpmap line that did not appear in the "
+ << "<fmt> of the m-line: " << line;
+ return true;
+ }
+ const std::string& encoder = fields[1];
+ std::vector<std::string> codec_params;
+ rtc::split(encoder, '/', &codec_params);
+ // <encoding name>/<clock rate>[/<encodingparameters>]
+ // 2 mandatory fields
+ if (codec_params.size() < 2 || codec_params.size() > 3) {
+ return ParseFailed(line,
+ "Expected format \"<encoding name>/<clock rate>"
+ "[/<encodingparameters>]\".",
+ error);
+ }
+ const std::string& encoding_name = codec_params[0];
+ int clock_rate = 0;
+ if (!GetValueFromString(line, codec_params[1], &clock_rate, error)) {
+ return false;
+ }
+ if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ VideoContentDescription* video_desc =
+ static_cast<VideoContentDescription*>(media_desc);
+ // TODO: We will send resolution in SDP. For now use
+ // JsepSessionDescription::kMaxVideoCodecWidth and kMaxVideoCodecHeight.
+ UpdateCodec(payload_type, encoding_name,
+ JsepSessionDescription::kMaxVideoCodecWidth,
+ JsepSessionDescription::kMaxVideoCodecHeight,
+ JsepSessionDescription::kDefaultVideoCodecFramerate,
+ preference, video_desc);
+ } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ // RFC 4566
+ // For audio streams, <encoding parameters> indicates the number
+ // of audio channels. This parameter is OPTIONAL and may be
+ // omitted if the number of channels is one, provided that no
+ // additional parameters are needed.
+ int channels = 1;
+ if (codec_params.size() == 3) {
+ if (!GetValueFromString(line, codec_params[2], &channels, error)) {
+ return false;
+ }
+ }
+ int bitrate = 0;
+ // The default behavior for ISAC (bitrate == 0) in webrtcvoiceengine.cc
+ // (specifically FindWebRtcCodec) is bandwidth-adaptive variable bitrate.
+ // The bandwidth adaptation doesn't always work well, so this code
+ // sets a fixed target bitrate instead.
+ if (_stricmp(encoding_name.c_str(), kIsacCodecName) == 0) {
+ if (clock_rate <= 16000) {
+ bitrate = kIsacWbDefaultRate;
+ } else {
+ bitrate = kIsacSwbDefaultRate;
+ }
+ }
+ AudioContentDescription* audio_desc =
+ static_cast<AudioContentDescription*>(media_desc);
+ UpdateCodec(payload_type, encoding_name, clock_rate, bitrate, channels,
+ preference, audio_desc);
+ } else if (media_type == cricket::MEDIA_TYPE_DATA) {
+ DataContentDescription* data_desc =
+ static_cast<DataContentDescription*>(media_desc);
+ data_desc->AddCodec(cricket::DataCodec(payload_type, encoding_name,
+ preference));
+ }
+ return true;
+}
+
+bool ParseFmtpParam(const std::string& line, std::string* parameter,
+ std::string* value, SdpParseError* error) {
+ if (!rtc::tokenize_first(line, kSdpDelimiterEqual, parameter, value)) {
+ ParseFailed(line, "Unable to parse fmtp parameter. \'=\' missing.", error);
+ return false;
+ }
+ // a=fmtp:<payload_type> <param1>=<value1>; <param2>=<value2>; ...
+ return true;
+}
+
+bool ParseFmtpAttributes(const std::string& line, const MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error) {
+ if (media_type != cricket::MEDIA_TYPE_AUDIO &&
+ media_type != cricket::MEDIA_TYPE_VIDEO) {
+ return true;
+ }
+
+ std::string line_payload;
+ std::string line_params;
+
+ // RFC 5576
+ // a=fmtp:<format> <format specific parameters>
+ // At least two fields, whereas the second one is any of the optional
+ // parameters.
+ if (!rtc::tokenize_first(line.substr(kLinePrefixLength), kSdpDelimiterSpace,
+ &line_payload, &line_params)) {
+ ParseFailedExpectMinFieldNum(line, 2, error);
+ return false;
+ }
+
+ // Parse out the payload information.
+ std::string payload_type_str;
+ if (!GetValue(line_payload, kAttributeFmtp, &payload_type_str, error)) {
+ return false;
+ }
+
+ int payload_type = 0;
+ if (!GetPayloadTypeFromString(line_payload, payload_type_str, &payload_type,
+ error)) {
+ return false;
+ }
+
+ // Parse out format specific parameters.
+ std::vector<std::string> fields;
+ rtc::split(line_params, kSdpDelimiterSemicolon, &fields);
+
+ cricket::CodecParameterMap codec_params;
+ for (auto& iter : fields) {
+ if (iter.find(kSdpDelimiterEqual) == std::string::npos) {
+ // Only fmtps with equals are currently supported. Other fmtp types
+ // should be ignored. Unknown fmtps do not constitute an error.
+ continue;
+ }
+
+ std::string name;
+ std::string value;
+ if (!ParseFmtpParam(rtc::string_trim(iter), &name, &value, error)) {
+ return false;
+ }
+ codec_params[name] = value;
+ }
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ UpdateCodec<AudioContentDescription, cricket::AudioCodec>(
+ media_desc, payload_type, codec_params);
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ UpdateCodec<VideoContentDescription, cricket::VideoCodec>(
+ media_desc, payload_type, codec_params);
+ }
+ return true;
+}
+
+bool ParseRtcpFbAttribute(const std::string& line, const MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error) {
+ if (media_type != cricket::MEDIA_TYPE_AUDIO &&
+ media_type != cricket::MEDIA_TYPE_VIDEO) {
+ return true;
+ }
+ std::vector<std::string> rtcp_fb_fields;
+ rtc::split(line.c_str(), kSdpDelimiterSpace, &rtcp_fb_fields);
+ if (rtcp_fb_fields.size() < 2) {
+ return ParseFailedGetValue(line, kAttributeRtcpFb, error);
+ }
+ std::string payload_type_string;
+ if (!GetValue(rtcp_fb_fields[0], kAttributeRtcpFb, &payload_type_string,
+ error)) {
+ return false;
+ }
+ int payload_type = kWildcardPayloadType;
+ if (payload_type_string != "*") {
+ if (!GetPayloadTypeFromString(line, payload_type_string, &payload_type,
+ error)) {
+ return false;
+ }
+ }
+ std::string id = rtcp_fb_fields[1];
+ std::string param = "";
+ for (std::vector<std::string>::iterator iter = rtcp_fb_fields.begin() + 2;
+ iter != rtcp_fb_fields.end(); ++iter) {
+ param.append(*iter);
+ }
+ const cricket::FeedbackParam feedback_param(id, param);
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ UpdateCodec<AudioContentDescription, cricket::AudioCodec>(
+ media_desc, payload_type, feedback_param);
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ UpdateCodec<VideoContentDescription, cricket::VideoCodec>(
+ media_desc, payload_type, feedback_param);
+ }
+ return true;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/webrtcsdp.h b/talk/app/webrtc/webrtcsdp.h
new file mode 100644
index 0000000000..fcbbdad3d3
--- /dev/null
+++ b/talk/app/webrtc/webrtcsdp.h
@@ -0,0 +1,81 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contain functions for parsing and serializing SDP messages.
+// Related RFC/draft including:
+// * RFC 4566 - SDP
+// * RFC 5245 - ICE
+// * RFC 3388 - Grouping of Media Lines in SDP
+// * RFC 4568 - SDP Security Descriptions for Media Streams
+// * draft-lennox-mmusic-sdp-source-selection-02 -
+// Mechanisms for Media Source Selection in SDP
+
+#ifndef TALK_APP_WEBRTC_WEBRTCSDP_H_
+#define TALK_APP_WEBRTC_WEBRTCSDP_H_
+
+#include <string>
+
+namespace webrtc {
+
+class IceCandidateInterface;
+class JsepIceCandidate;
+class JsepSessionDescription;
+struct SdpParseError;
+
+// Serializes the passed in JsepSessionDescription.
+// Serialize SessionDescription including candidates if
+// JsepSessionDescription has candidates.
+// jdesc - The JsepSessionDescription object to be serialized.
+// return - SDP string serialized from the arguments.
+std::string SdpSerialize(const JsepSessionDescription& jdesc);
+
+// Serializes the passed in IceCandidateInterface to a SDP string.
+// candidate - The candidate to be serialized.
+std::string SdpSerializeCandidate(const IceCandidateInterface& candidate);
+
+// Deserializes the passed in SDP string to a JsepSessionDescription.
+// message - SDP string to be Deserialized.
+// jdesc - The JsepSessionDescription deserialized from the SDP string.
+// error - The detail error information when parsing fails.
+// return - true on success, false on failure.
+bool SdpDeserialize(const std::string& message,
+ JsepSessionDescription* jdesc,
+ SdpParseError* error);
+
+// Deserializes the passed in SDP string to one JsepIceCandidate.
+// The first line must be a=candidate line and only the first line will be
+// parsed.
+// message - The SDP string to be Deserialized.
+// candidates - The JsepIceCandidate from the SDP string.
+// error - The detail error information when parsing fails.
+// return - true on success, false on failure.
+bool SdpDeserializeCandidate(const std::string& message,
+ JsepIceCandidate* candidate,
+ SdpParseError* error);
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_WEBRTCSDP_H_
diff --git a/talk/app/webrtc/webrtcsdp_unittest.cc b/talk/app/webrtc/webrtcsdp_unittest.cc
new file mode 100644
index 0000000000..cb6a392ab4
--- /dev/null
+++ b/talk/app/webrtc/webrtcsdp_unittest.cc
@@ -0,0 +1,2710 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/jsepsessiondescription.h"
+#include "talk/app/webrtc/webrtcsdp.h"
+#include "talk/media/base/constants.h"
+#include "webrtc/p2p/base/constants.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/messagedigest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sslfingerprint.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+
+using cricket::AudioCodec;
+using cricket::AudioContentDescription;
+using cricket::Candidate;
+using cricket::ContentInfo;
+using cricket::CryptoParams;
+using cricket::ContentGroup;
+using cricket::DataCodec;
+using cricket::DataContentDescription;
+using cricket::ICE_CANDIDATE_COMPONENT_RTCP;
+using cricket::ICE_CANDIDATE_COMPONENT_RTP;
+using cricket::kFecSsrcGroupSemantics;
+using cricket::LOCAL_PORT_TYPE;
+using cricket::NS_JINGLE_DRAFT_SCTP;
+using cricket::NS_JINGLE_RTP;
+using cricket::RtpHeaderExtension;
+using cricket::RELAY_PORT_TYPE;
+using cricket::SessionDescription;
+using cricket::StreamParams;
+using cricket::STUN_PORT_TYPE;
+using cricket::TransportDescription;
+using cricket::TransportInfo;
+using cricket::VideoCodec;
+using cricket::VideoContentDescription;
+using webrtc::IceCandidateCollection;
+using webrtc::IceCandidateInterface;
+using webrtc::JsepIceCandidate;
+using webrtc::JsepSessionDescription;
+using webrtc::SdpParseError;
+using webrtc::SessionDescriptionInterface;
+
+typedef std::vector<AudioCodec> AudioCodecs;
+typedef std::vector<Candidate> Candidates;
+
+static const uint32_t kDefaultSctpPort = 5000;
+static const char kSessionTime[] = "t=0 0\r\n";
+static const uint32_t kCandidatePriority = 2130706432U; // pref = 1.0
+static const char kCandidateUfragVoice[] = "ufrag_voice";
+static const char kCandidatePwdVoice[] = "pwd_voice";
+static const char kAttributeIcePwdVoice[] = "a=ice-pwd:pwd_voice\r\n";
+static const char kCandidateUfragVideo[] = "ufrag_video";
+static const char kCandidatePwdVideo[] = "pwd_video";
+static const char kCandidateUfragData[] = "ufrag_data";
+static const char kCandidatePwdData[] = "pwd_data";
+static const char kAttributeIcePwdVideo[] = "a=ice-pwd:pwd_video\r\n";
+static const uint32_t kCandidateGeneration = 2;
+static const char kCandidateFoundation1[] = "a0+B/1";
+static const char kCandidateFoundation2[] = "a0+B/2";
+static const char kCandidateFoundation3[] = "a0+B/3";
+static const char kCandidateFoundation4[] = "a0+B/4";
+static const char kAttributeCryptoVoice[] =
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n";
+static const char kAttributeCryptoVideo[] =
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n";
+static const char kFingerprint[] = "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n";
+static const int kExtmapId = 1;
+static const char kExtmapUri[] = "http://example.com/082005/ext.htm#ttime";
+static const char kExtmap[] =
+ "a=extmap:1 http://example.com/082005/ext.htm#ttime\r\n";
+static const char kExtmapWithDirectionAndAttribute[] =
+ "a=extmap:1/sendrecv http://example.com/082005/ext.htm#ttime a1 a2\r\n";
+
+static const uint8_t kIdentityDigest[] = {
+ 0x4A, 0xAD, 0xB9, 0xB1, 0x3F, 0x82, 0x18, 0x3B, 0x54, 0x02,
+ 0x12, 0xDF, 0x3E, 0x5D, 0x49, 0x6B, 0x19, 0xE5, 0x7C, 0xAB};
+
+static const char kDtlsSctp[] = "DTLS/SCTP";
+static const char kUdpDtlsSctp[] = "UDP/DTLS/SCTP";
+static const char kTcpDtlsSctp[] = "TCP/DTLS/SCTP";
+
+struct CodecParams {
+ int max_ptime;
+ int ptime;
+ int min_ptime;
+ int sprop_stereo;
+ int stereo;
+ int useinband;
+ int maxaveragebitrate;
+};
+
+// Reference sdp string
+static const char kSdpFullString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS local_stream_1 local_stream_2\r\n"
+ "m=audio 2345 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 74.125.127.126\r\n"
+ "a=rtcp:2347 IN IP4 74.125.127.126\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+ "raddr 192.168.1.5 rport 2346 "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx "
+ "raddr 192.168.1.5 rport 2348 "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n"
+ "a=ssrc:1 mslabel:local_stream_1\r\n"
+ "a=ssrc:1 label:audio_track_id_1\r\n"
+ "a=ssrc:4 cname:stream_2_cname\r\n"
+ "a=ssrc:4 msid:local_stream_2 audio_track_id_2\r\n"
+ "a=ssrc:4 mslabel:local_stream_2\r\n"
+ "a=ssrc:4 label:audio_track_id_2\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "c=IN IP4 74.125.224.39\r\n"
+ "a=rtcp:3456 IN IP4 74.125.224.39\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1236 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1237 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1240 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1241 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 2 udp 2130706432 74.125.224.39 3456 typ relay "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc:2 cname:stream_1_cname\r\n"
+ "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n"
+ "a=ssrc:2 mslabel:local_stream_1\r\n"
+ "a=ssrc:2 label:video_track_id_1\r\n"
+ "a=ssrc:3 cname:stream_1_cname\r\n"
+ "a=ssrc:3 msid:local_stream_1 video_track_id_2\r\n"
+ "a=ssrc:3 mslabel:local_stream_1\r\n"
+ "a=ssrc:3 label:video_track_id_2\r\n"
+ "a=ssrc-group:FEC 5 6\r\n"
+ "a=ssrc:5 cname:stream_2_cname\r\n"
+ "a=ssrc:5 msid:local_stream_2 video_track_id_3\r\n"
+ "a=ssrc:5 mslabel:local_stream_2\r\n"
+ "a=ssrc:5 label:video_track_id_3\r\n"
+ "a=ssrc:6 cname:stream_2_cname\r\n"
+ "a=ssrc:6 msid:local_stream_2 video_track_id_3\r\n"
+ "a=ssrc:6 mslabel:local_stream_2\r\n"
+ "a=ssrc:6 label:video_track_id_3\r\n";
+
+// SDP reference string without the candidates.
+static const char kSdpString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS local_stream_1 local_stream_2\r\n"
+ "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n"
+ "a=ssrc:1 mslabel:local_stream_1\r\n"
+ "a=ssrc:1 label:audio_track_id_1\r\n"
+ "a=ssrc:4 cname:stream_2_cname\r\n"
+ "a=ssrc:4 msid:local_stream_2 audio_track_id_2\r\n"
+ "a=ssrc:4 mslabel:local_stream_2\r\n"
+ "a=ssrc:4 label:audio_track_id_2\r\n"
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc:2 cname:stream_1_cname\r\n"
+ "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n"
+ "a=ssrc:2 mslabel:local_stream_1\r\n"
+ "a=ssrc:2 label:video_track_id_1\r\n"
+ "a=ssrc:3 cname:stream_1_cname\r\n"
+ "a=ssrc:3 msid:local_stream_1 video_track_id_2\r\n"
+ "a=ssrc:3 mslabel:local_stream_1\r\n"
+ "a=ssrc:3 label:video_track_id_2\r\n"
+ "a=ssrc-group:FEC 5 6\r\n"
+ "a=ssrc:5 cname:stream_2_cname\r\n"
+ "a=ssrc:5 msid:local_stream_2 video_track_id_3\r\n"
+ "a=ssrc:5 mslabel:local_stream_2\r\n"
+ "a=ssrc:5 label:video_track_id_3\r\n"
+ "a=ssrc:6 cname:stream_2_cname\r\n"
+ "a=ssrc:6 msid:local_stream_2 video_track_id_3\r\n"
+ "a=ssrc:6 mslabel:local_stream_2\r\n"
+ "a=ssrc:6 label:video_track_id_3\r\n";
+
+static const char kSdpRtpDataChannelString[] =
+ "m=application 9 RTP/SAVPF 101\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_data\r\n"
+ "a=ice-pwd:pwd_data\r\n"
+ "a=mid:data_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:FvLcvU2P3ZWmQxgPAgcDu7Zl9vftYElFOjEzhWs5\r\n"
+ "a=rtpmap:101 google-data/90000\r\n"
+ "a=ssrc:10 cname:data_channel_cname\r\n"
+ "a=ssrc:10 msid:data_channel data_channeld0\r\n"
+ "a=ssrc:10 mslabel:data_channel\r\n"
+ "a=ssrc:10 label:data_channeld0\r\n";
+
+static const char kSdpSctpDataChannelString[] =
+ "m=application 9 DTLS/SCTP 5000\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_data\r\n"
+ "a=ice-pwd:pwd_data\r\n"
+ "a=mid:data_content_name\r\n"
+ "a=sctpmap:5000 webrtc-datachannel 1024\r\n";
+
+// draft-ietf-mmusic-sctp-sdp-12
+static const char kSdpSctpDataChannelStringWithSctpPort[] =
+ "m=application 9 DTLS/SCTP webrtc-datachannel\r\n"
+ "a=max-message-size=100000\r\n"
+ "a=sctp-port 5000\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_data\r\n"
+ "a=ice-pwd:pwd_data\r\n"
+ "a=mid:data_content_name\r\n";
+
+static const char kSdpSctpDataChannelStringWithSctpColonPort[] =
+ "m=application 9 DTLS/SCTP webrtc-datachannel\r\n"
+ "a=max-message-size=100000\r\n"
+ "a=sctp-port:5000\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_data\r\n"
+ "a=ice-pwd:pwd_data\r\n"
+ "a=mid:data_content_name\r\n";
+
+static const char kSdpSctpDataChannelWithCandidatesString[] =
+ "m=application 2345 DTLS/SCTP 5000\r\n"
+ "c=IN IP4 74.125.127.126\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+ "raddr 192.168.1.5 rport 2346 "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_data\r\n"
+ "a=ice-pwd:pwd_data\r\n"
+ "a=mid:data_content_name\r\n"
+ "a=sctpmap:5000 webrtc-datachannel 1024\r\n";
+
+static const char kSdpConferenceString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS\r\n"
+ "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=x-google-flag:conference\r\n"
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=x-google-flag:conference\r\n";
+
+static const char kSdpSessionString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS local_stream\r\n";
+
+static const char kSdpAudioString[] =
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ "a=ssrc:1 msid:local_stream audio_track_id_1\r\n"
+ "a=ssrc:1 mslabel:local_stream\r\n"
+ "a=ssrc:1 label:audio_track_id_1\r\n";
+
+static const char kSdpVideoString[] =
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc:2 cname:stream_1_cname\r\n"
+ "a=ssrc:2 msid:local_stream video_track_id_1\r\n"
+ "a=ssrc:2 mslabel:local_stream\r\n"
+ "a=ssrc:2 label:video_track_id_1\r\n";
+
+
+// One candidate reference string as per W3c spec.
+// candidate:<blah> not a=candidate:<blah>CRLF
+static const char kRawCandidate[] =
+ "candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host generation 2";
+// One candidate reference string.
+static const char kSdpOneCandidate[] =
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n";
+
+static const char kSdpTcpActiveCandidate[] =
+ "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+ "tcptype active generation 2";
+static const char kSdpTcpPassiveCandidate[] =
+ "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+ "tcptype passive generation 2";
+static const char kSdpTcpSOCandidate[] =
+ "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+ "tcptype so generation 2";
+static const char kSdpTcpInvalidCandidate[] =
+ "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+ "tcptype invalid generation 2";
+
+// One candidate reference string with IPV6 address.
+static const char kRawIPV6Candidate[] =
+ "candidate:a0+B/1 1 udp 2130706432 "
+ "abcd::abcd::abcd::abcd::abcd::abcd::abcd::abcd 1234 typ host generation 2";
+
+// One candidate reference string.
+static const char kSdpOneCandidateOldFormat[] =
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host network_name"
+ " eth0 username user_rtp password password_rtp generation 2\r\n";
+
+// Session id and version
+static const char kSessionId[] = "18446744069414584320";
+static const char kSessionVersion[] = "18446462598732840960";
+
+// Ice options
+static const char kIceOption1[] = "iceoption1";
+static const char kIceOption2[] = "iceoption2";
+static const char kIceOption3[] = "iceoption3";
+
+// Content name
+static const char kAudioContentName[] = "audio_content_name";
+static const char kVideoContentName[] = "video_content_name";
+static const char kDataContentName[] = "data_content_name";
+
+// MediaStream 1
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kStream1Cname[] = "stream_1_cname";
+static const char kAudioTrackId1[] = "audio_track_id_1";
+static const uint32_t kAudioTrack1Ssrc = 1;
+static const char kVideoTrackId1[] = "video_track_id_1";
+static const uint32_t kVideoTrack1Ssrc = 2;
+static const char kVideoTrackId2[] = "video_track_id_2";
+static const uint32_t kVideoTrack2Ssrc = 3;
+
+// MediaStream 2
+static const char kStreamLabel2[] = "local_stream_2";
+static const char kStream2Cname[] = "stream_2_cname";
+static const char kAudioTrackId2[] = "audio_track_id_2";
+static const uint32_t kAudioTrack2Ssrc = 4;
+static const char kVideoTrackId3[] = "video_track_id_3";
+static const uint32_t kVideoTrack3Ssrc = 5;
+static const uint32_t kVideoTrack4Ssrc = 6;
+
+// DataChannel
+static const char kDataChannelLabel[] = "data_channel";
+static const char kDataChannelMsid[] = "data_channeld0";
+static const char kDataChannelCname[] = "data_channel_cname";
+static const uint32_t kDataChannelSsrc = 10;
+
+// Candidate
+static const char kDummyMid[] = "dummy_mid";
+static const int kDummyIndex = 123;
+
+// Misc
+static const char kDummyString[] = "dummy";
+
+// Helper functions
+
+static bool SdpDeserialize(const std::string& message,
+ JsepSessionDescription* jdesc) {
+ return webrtc::SdpDeserialize(message, jdesc, NULL);
+}
+
+static bool SdpDeserializeCandidate(const std::string& message,
+ JsepIceCandidate* candidate) {
+ return webrtc::SdpDeserializeCandidate(message, candidate, NULL);
+}
+
+// Add some extra |newlines| to the |message| after |line|.
+static void InjectAfter(const std::string& line,
+ const std::string& newlines,
+ std::string* message) {
+ const std::string tmp = line + newlines;
+ rtc::replace_substrs(line.c_str(), line.length(),
+ tmp.c_str(), tmp.length(), message);
+}
+
+static void Replace(const std::string& line,
+ const std::string& newlines,
+ std::string* message) {
+ rtc::replace_substrs(line.c_str(), line.length(),
+ newlines.c_str(), newlines.length(), message);
+}
+
+// Expect fail to parase |bad_sdp| and expect |bad_part| be part of the error
+// message.
+static void ExpectParseFailure(const std::string& bad_sdp,
+ const std::string& bad_part) {
+ JsepSessionDescription desc(kDummyString);
+ SdpParseError error;
+ bool ret = webrtc::SdpDeserialize(bad_sdp, &desc, &error);
+ EXPECT_FALSE(ret);
+ EXPECT_NE(std::string::npos, error.line.find(bad_part.c_str()));
+}
+
+// Expect fail to parse kSdpFullString if replace |good_part| with |bad_part|.
+static void ExpectParseFailure(const char* good_part, const char* bad_part) {
+ std::string bad_sdp = kSdpFullString;
+ Replace(good_part, bad_part, &bad_sdp);
+ ExpectParseFailure(bad_sdp, bad_part);
+}
+
+// Expect fail to parse kSdpFullString if add |newlines| after |injectpoint|.
+static void ExpectParseFailureWithNewLines(const std::string& injectpoint,
+ const std::string& newlines,
+ const std::string& bad_part) {
+ std::string bad_sdp = kSdpFullString;
+ InjectAfter(injectpoint, newlines, &bad_sdp);
+ ExpectParseFailure(bad_sdp, bad_part);
+}
+
+static void ReplaceDirection(cricket::MediaContentDirection direction,
+ std::string* message) {
+ std::string new_direction;
+ switch (direction) {
+ case cricket::MD_INACTIVE:
+ new_direction = "a=inactive";
+ break;
+ case cricket::MD_SENDONLY:
+ new_direction = "a=sendonly";
+ break;
+ case cricket::MD_RECVONLY:
+ new_direction = "a=recvonly";
+ break;
+ case cricket::MD_SENDRECV:
+ default:
+ new_direction = "a=sendrecv";
+ break;
+ }
+ Replace("a=sendrecv", new_direction, message);
+}
+
+static void ReplaceRejected(bool audio_rejected, bool video_rejected,
+ std::string* message) {
+ if (audio_rejected) {
+ Replace("m=audio 2345", "m=audio 0", message);
+ }
+ if (video_rejected) {
+ Replace("m=video 3457", "m=video 0", message);
+ }
+}
+
+// WebRtcSdpTest
+
+class WebRtcSdpTest : public testing::Test {
+ public:
+ WebRtcSdpTest()
+ : jdesc_(kDummyString) {
+ // AudioContentDescription
+ audio_desc_ = CreateAudioContentDescription();
+ AudioCodec opus(111, "opus", 48000, 0, 2, 3);
+ audio_desc_->AddCodec(opus);
+ audio_desc_->AddCodec(AudioCodec(103, "ISAC", 16000, 32000, 1, 2));
+ audio_desc_->AddCodec(AudioCodec(104, "ISAC", 32000, 56000, 1, 1));
+ desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_desc_);
+
+ // VideoContentDescription
+ rtc::scoped_ptr<VideoContentDescription> video(
+ new VideoContentDescription());
+ video_desc_ = video.get();
+ StreamParams video_stream1;
+ video_stream1.id = kVideoTrackId1;
+ video_stream1.cname = kStream1Cname;
+ video_stream1.sync_label = kStreamLabel1;
+ video_stream1.ssrcs.push_back(kVideoTrack1Ssrc);
+ video->AddStream(video_stream1);
+ StreamParams video_stream2;
+ video_stream2.id = kVideoTrackId2;
+ video_stream2.cname = kStream1Cname;
+ video_stream2.sync_label = kStreamLabel1;
+ video_stream2.ssrcs.push_back(kVideoTrack2Ssrc);
+ video->AddStream(video_stream2);
+ StreamParams video_stream3;
+ video_stream3.id = kVideoTrackId3;
+ video_stream3.cname = kStream2Cname;
+ video_stream3.sync_label = kStreamLabel2;
+ video_stream3.ssrcs.push_back(kVideoTrack3Ssrc);
+ video_stream3.ssrcs.push_back(kVideoTrack4Ssrc);
+ cricket::SsrcGroup ssrc_group(kFecSsrcGroupSemantics, video_stream3.ssrcs);
+ video_stream3.ssrc_groups.push_back(ssrc_group);
+ video->AddStream(video_stream3);
+ video->AddCrypto(CryptoParams(1, "AES_CM_128_HMAC_SHA1_80",
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32", ""));
+ video->set_protocol(cricket::kMediaProtocolSavpf);
+ video->AddCodec(VideoCodec(
+ 120,
+ JsepSessionDescription::kDefaultVideoCodecName,
+ JsepSessionDescription::kMaxVideoCodecWidth,
+ JsepSessionDescription::kMaxVideoCodecHeight,
+ JsepSessionDescription::kDefaultVideoCodecFramerate,
+ JsepSessionDescription::kDefaultVideoCodecPreference));
+
+ desc_.AddContent(kVideoContentName, NS_JINGLE_RTP,
+ video.release());
+
+ // TransportInfo
+ EXPECT_TRUE(desc_.AddTransportInfo(
+ TransportInfo(kAudioContentName,
+ TransportDescription(kCandidateUfragVoice,
+ kCandidatePwdVoice))));
+ EXPECT_TRUE(desc_.AddTransportInfo(
+ TransportInfo(kVideoContentName,
+ TransportDescription(kCandidateUfragVideo,
+ kCandidatePwdVideo))));
+
+ // v4 host
+ int port = 1234;
+ rtc::SocketAddress address("192.168.1.5", port++);
+ Candidate candidate1(ICE_CANDIDATE_COMPONENT_RTP, "udp", address,
+ kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation1);
+ address.SetPort(port++);
+ Candidate candidate2(ICE_CANDIDATE_COMPONENT_RTCP, "udp", address,
+ kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation1);
+ address.SetPort(port++);
+ Candidate candidate3(ICE_CANDIDATE_COMPONENT_RTCP, "udp", address,
+ kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation1);
+ address.SetPort(port++);
+ Candidate candidate4(ICE_CANDIDATE_COMPONENT_RTP, "udp", address,
+ kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation1);
+
+ // v6 host
+ rtc::SocketAddress v6_address("::1", port++);
+ cricket::Candidate candidate5(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ v6_address, kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation2);
+ v6_address.SetPort(port++);
+ cricket::Candidate candidate6(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+ v6_address, kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation2);
+ v6_address.SetPort(port++);
+ cricket::Candidate candidate7(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+ v6_address, kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation2);
+ v6_address.SetPort(port++);
+ cricket::Candidate candidate8(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ v6_address, kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation2);
+
+ // stun
+ int port_stun = 2345;
+ rtc::SocketAddress address_stun("74.125.127.126", port_stun++);
+ rtc::SocketAddress rel_address_stun("192.168.1.5", port_stun++);
+ cricket::Candidate candidate9(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ address_stun, kCandidatePriority, "", "",
+ STUN_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation3);
+ candidate9.set_related_address(rel_address_stun);
+
+ address_stun.SetPort(port_stun++);
+ rel_address_stun.SetPort(port_stun++);
+ cricket::Candidate candidate10(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+ address_stun, kCandidatePriority, "", "",
+ STUN_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation3);
+ candidate10.set_related_address(rel_address_stun);
+
+ // relay
+ int port_relay = 3456;
+ rtc::SocketAddress address_relay("74.125.224.39", port_relay++);
+ cricket::Candidate candidate11(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+ address_relay, kCandidatePriority, "", "",
+ cricket::RELAY_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation4);
+ address_relay.SetPort(port_relay++);
+ cricket::Candidate candidate12(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ address_relay, kCandidatePriority, "", "",
+ RELAY_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation4);
+
+ // voice
+ candidates_.push_back(candidate1);
+ candidates_.push_back(candidate2);
+ candidates_.push_back(candidate5);
+ candidates_.push_back(candidate6);
+ candidates_.push_back(candidate9);
+ candidates_.push_back(candidate10);
+
+ // video
+ candidates_.push_back(candidate3);
+ candidates_.push_back(candidate4);
+ candidates_.push_back(candidate7);
+ candidates_.push_back(candidate8);
+ candidates_.push_back(candidate11);
+ candidates_.push_back(candidate12);
+
+ jcandidate_.reset(new JsepIceCandidate(std::string("audio_content_name"),
+ 0, candidate1));
+
+ // Set up JsepSessionDescription.
+ jdesc_.Initialize(desc_.Copy(), kSessionId, kSessionVersion);
+ std::string mline_id;
+ int mline_index = 0;
+ for (size_t i = 0; i< candidates_.size(); ++i) {
+ // In this test, the audio m line index will be 0, and the video m line
+ // will be 1.
+ bool is_video = (i > 5);
+ mline_id = is_video ? "video_content_name" : "audio_content_name";
+ mline_index = is_video ? 1 : 0;
+ JsepIceCandidate jice(mline_id,
+ mline_index,
+ candidates_.at(i));
+ jdesc_.AddCandidate(&jice);
+ }
+ }
+
+ AudioContentDescription* CreateAudioContentDescription() {
+ AudioContentDescription* audio = new AudioContentDescription();
+ audio->set_rtcp_mux(true);
+ StreamParams audio_stream1;
+ audio_stream1.id = kAudioTrackId1;
+ audio_stream1.cname = kStream1Cname;
+ audio_stream1.sync_label = kStreamLabel1;
+ audio_stream1.ssrcs.push_back(kAudioTrack1Ssrc);
+ audio->AddStream(audio_stream1);
+ StreamParams audio_stream2;
+ audio_stream2.id = kAudioTrackId2;
+ audio_stream2.cname = kStream2Cname;
+ audio_stream2.sync_label = kStreamLabel2;
+ audio_stream2.ssrcs.push_back(kAudioTrack2Ssrc);
+ audio->AddStream(audio_stream2);
+ audio->AddCrypto(CryptoParams(1, "AES_CM_128_HMAC_SHA1_32",
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32",
+ "dummy_session_params"));
+ audio->set_protocol(cricket::kMediaProtocolSavpf);
+ return audio;
+ }
+
+ template <class MCD>
+ void CompareMediaContentDescription(const MCD* cd1,
+ const MCD* cd2) {
+ // type
+ EXPECT_EQ(cd1->type(), cd1->type());
+
+ // content direction
+ EXPECT_EQ(cd1->direction(), cd2->direction());
+
+ // rtcp_mux
+ EXPECT_EQ(cd1->rtcp_mux(), cd2->rtcp_mux());
+
+ // cryptos
+ EXPECT_EQ(cd1->cryptos().size(), cd2->cryptos().size());
+ if (cd1->cryptos().size() != cd2->cryptos().size()) {
+ ADD_FAILURE();
+ return;
+ }
+ for (size_t i = 0; i< cd1->cryptos().size(); ++i) {
+ const CryptoParams c1 = cd1->cryptos().at(i);
+ const CryptoParams c2 = cd2->cryptos().at(i);
+ EXPECT_TRUE(c1.Matches(c2));
+ EXPECT_EQ(c1.key_params, c2.key_params);
+ EXPECT_EQ(c1.session_params, c2.session_params);
+ }
+
+ // protocol
+ // Use an equivalence class here, for old and new versions of the
+ // protocol description.
+ if (cd1->protocol() == cricket::kMediaProtocolDtlsSctp
+ || cd1->protocol() == cricket::kMediaProtocolUdpDtlsSctp
+ || cd1->protocol() == cricket::kMediaProtocolTcpDtlsSctp) {
+ const bool cd2_is_also_dtls_sctp =
+ cd2->protocol() == cricket::kMediaProtocolDtlsSctp
+ || cd2->protocol() == cricket::kMediaProtocolUdpDtlsSctp
+ || cd2->protocol() == cricket::kMediaProtocolTcpDtlsSctp;
+ EXPECT_TRUE(cd2_is_also_dtls_sctp);
+ } else {
+ EXPECT_EQ(cd1->protocol(), cd2->protocol());
+ }
+
+ // codecs
+ EXPECT_EQ(cd1->codecs(), cd2->codecs());
+
+ // bandwidth
+ EXPECT_EQ(cd1->bandwidth(), cd2->bandwidth());
+
+ // streams
+ EXPECT_EQ(cd1->streams(), cd2->streams());
+
+ // extmap
+ ASSERT_EQ(cd1->rtp_header_extensions().size(),
+ cd2->rtp_header_extensions().size());
+ for (size_t i = 0; i< cd1->rtp_header_extensions().size(); ++i) {
+ const RtpHeaderExtension ext1 = cd1->rtp_header_extensions().at(i);
+ const RtpHeaderExtension ext2 = cd2->rtp_header_extensions().at(i);
+ EXPECT_EQ(ext1.uri, ext2.uri);
+ EXPECT_EQ(ext1.id, ext2.id);
+ }
+ }
+
+
+ void CompareSessionDescription(const SessionDescription& desc1,
+ const SessionDescription& desc2) {
+ // Compare content descriptions.
+ if (desc1.contents().size() != desc2.contents().size()) {
+ ADD_FAILURE();
+ return;
+ }
+ for (size_t i = 0 ; i < desc1.contents().size(); ++i) {
+ const cricket::ContentInfo& c1 = desc1.contents().at(i);
+ const cricket::ContentInfo& c2 = desc2.contents().at(i);
+ // content name
+ EXPECT_EQ(c1.name, c2.name);
+ // content type
+ // Note, ASSERT will return from the function, but will not stop the test.
+ ASSERT_EQ(c1.type, c2.type);
+
+ ASSERT_EQ(IsAudioContent(&c1), IsAudioContent(&c2));
+ if (IsAudioContent(&c1)) {
+ const AudioContentDescription* acd1 =
+ static_cast<const AudioContentDescription*>(c1.description);
+ const AudioContentDescription* acd2 =
+ static_cast<const AudioContentDescription*>(c2.description);
+ CompareMediaContentDescription<AudioContentDescription>(acd1, acd2);
+ }
+
+ ASSERT_EQ(IsVideoContent(&c1), IsVideoContent(&c2));
+ if (IsVideoContent(&c1)) {
+ const VideoContentDescription* vcd1 =
+ static_cast<const VideoContentDescription*>(c1.description);
+ const VideoContentDescription* vcd2 =
+ static_cast<const VideoContentDescription*>(c2.description);
+ CompareMediaContentDescription<VideoContentDescription>(vcd1, vcd2);
+ }
+
+ ASSERT_EQ(IsDataContent(&c1), IsDataContent(&c2));
+ if (IsDataContent(&c1)) {
+ const DataContentDescription* dcd1 =
+ static_cast<const DataContentDescription*>(c1.description);
+ const DataContentDescription* dcd2 =
+ static_cast<const DataContentDescription*>(c2.description);
+ CompareMediaContentDescription<DataContentDescription>(dcd1, dcd2);
+ }
+ }
+
+ // group
+ const cricket::ContentGroups groups1 = desc1.groups();
+ const cricket::ContentGroups groups2 = desc2.groups();
+ EXPECT_EQ(groups1.size(), groups1.size());
+ if (groups1.size() != groups2.size()) {
+ ADD_FAILURE();
+ return;
+ }
+ for (size_t i = 0; i < groups1.size(); ++i) {
+ const cricket::ContentGroup group1 = groups1.at(i);
+ const cricket::ContentGroup group2 = groups2.at(i);
+ EXPECT_EQ(group1.semantics(), group2.semantics());
+ const cricket::ContentNames names1 = group1.content_names();
+ const cricket::ContentNames names2 = group2.content_names();
+ EXPECT_EQ(names1.size(), names2.size());
+ if (names1.size() != names2.size()) {
+ ADD_FAILURE();
+ return;
+ }
+ cricket::ContentNames::const_iterator iter1 = names1.begin();
+ cricket::ContentNames::const_iterator iter2 = names2.begin();
+ while (iter1 != names1.end()) {
+ EXPECT_EQ(*iter1++, *iter2++);
+ }
+ }
+
+ // transport info
+ const cricket::TransportInfos transports1 = desc1.transport_infos();
+ const cricket::TransportInfos transports2 = desc2.transport_infos();
+ EXPECT_EQ(transports1.size(), transports2.size());
+ if (transports1.size() != transports2.size()) {
+ ADD_FAILURE();
+ return;
+ }
+ for (size_t i = 0; i < transports1.size(); ++i) {
+ const cricket::TransportInfo transport1 = transports1.at(i);
+ const cricket::TransportInfo transport2 = transports2.at(i);
+ EXPECT_EQ(transport1.content_name, transport2.content_name);
+ EXPECT_EQ(transport1.description.ice_ufrag,
+ transport2.description.ice_ufrag);
+ EXPECT_EQ(transport1.description.ice_pwd,
+ transport2.description.ice_pwd);
+ if (transport1.description.identity_fingerprint) {
+ EXPECT_EQ(*transport1.description.identity_fingerprint,
+ *transport2.description.identity_fingerprint);
+ } else {
+ EXPECT_EQ(transport1.description.identity_fingerprint.get(),
+ transport2.description.identity_fingerprint.get());
+ }
+ EXPECT_EQ(transport1.description.transport_options,
+ transport2.description.transport_options);
+ EXPECT_TRUE(CompareCandidates(transport1.description.candidates,
+ transport2.description.candidates));
+ }
+
+ // global attributes
+ EXPECT_EQ(desc1.msid_supported(), desc2.msid_supported());
+ }
+
+ bool CompareCandidates(const Candidates& cs1, const Candidates& cs2) {
+ EXPECT_EQ(cs1.size(), cs2.size());
+ if (cs1.size() != cs2.size())
+ return false;
+ for (size_t i = 0; i< cs1.size(); ++i) {
+ const Candidate c1 = cs1.at(i);
+ const Candidate c2 = cs2.at(i);
+ EXPECT_TRUE(c1.IsEquivalent(c2));
+ }
+ return true;
+ }
+
+ bool CompareSessionDescription(
+ const JsepSessionDescription& desc1,
+ const JsepSessionDescription& desc2) {
+ EXPECT_EQ(desc1.session_id(), desc2.session_id());
+ EXPECT_EQ(desc1.session_version(), desc2.session_version());
+ CompareSessionDescription(*desc1.description(), *desc2.description());
+ if (desc1.number_of_mediasections() != desc2.number_of_mediasections())
+ return false;
+ for (size_t i = 0; i < desc1.number_of_mediasections(); ++i) {
+ const IceCandidateCollection* cc1 = desc1.candidates(i);
+ const IceCandidateCollection* cc2 = desc2.candidates(i);
+ if (cc1->count() != cc2->count())
+ return false;
+ for (size_t j = 0; j < cc1->count(); ++j) {
+ const IceCandidateInterface* c1 = cc1->at(j);
+ const IceCandidateInterface* c2 = cc2->at(j);
+ EXPECT_EQ(c1->sdp_mid(), c2->sdp_mid());
+ EXPECT_EQ(c1->sdp_mline_index(), c2->sdp_mline_index());
+ EXPECT_TRUE(c1->candidate().IsEquivalent(c2->candidate()));
+ }
+ }
+ return true;
+ }
+
+ // Disable the ice-ufrag and ice-pwd in given |sdp| message by replacing
+ // them with invalid keywords so that the parser will just ignore them.
+ bool RemoveCandidateUfragPwd(std::string* sdp) {
+ const char ice_ufrag[] = "a=ice-ufrag";
+ const char ice_ufragx[] = "a=xice-ufrag";
+ const char ice_pwd[] = "a=ice-pwd";
+ const char ice_pwdx[] = "a=xice-pwd";
+ rtc::replace_substrs(ice_ufrag, strlen(ice_ufrag),
+ ice_ufragx, strlen(ice_ufragx), sdp);
+ rtc::replace_substrs(ice_pwd, strlen(ice_pwd),
+ ice_pwdx, strlen(ice_pwdx), sdp);
+ return true;
+ }
+
+ // Update the candidates in |jdesc| to use the given |ufrag| and |pwd|.
+ bool UpdateCandidateUfragPwd(JsepSessionDescription* jdesc, int mline_index,
+ const std::string& ufrag, const std::string& pwd) {
+ std::string content_name;
+ if (mline_index == 0) {
+ content_name = kAudioContentName;
+ } else if (mline_index == 1) {
+ content_name = kVideoContentName;
+ } else {
+ ASSERT(false);
+ }
+ TransportInfo transport_info(
+ content_name, TransportDescription(ufrag, pwd));
+ SessionDescription* desc =
+ const_cast<SessionDescription*>(jdesc->description());
+ desc->RemoveTransportInfoByName(content_name);
+ EXPECT_TRUE(desc->AddTransportInfo(transport_info));
+ for (size_t i = 0; i < jdesc_.number_of_mediasections(); ++i) {
+ const IceCandidateCollection* cc = jdesc_.candidates(i);
+ for (size_t j = 0; j < cc->count(); ++j) {
+ if (cc->at(j)->sdp_mline_index() == mline_index) {
+ const_cast<Candidate&>(cc->at(j)->candidate()).set_username(
+ ufrag);
+ const_cast<Candidate&>(cc->at(j)->candidate()).set_password(
+ pwd);
+ }
+ }
+ }
+ return true;
+ }
+
+ void AddIceOptions(const std::string& content_name,
+ const std::vector<std::string>& transport_options) {
+ ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
+ cricket::TransportInfo transport_info =
+ *(desc_.GetTransportInfoByName(content_name));
+ desc_.RemoveTransportInfoByName(content_name);
+ transport_info.description.transport_options = transport_options;
+ desc_.AddTransportInfo(transport_info);
+ }
+
+ void AddFingerprint() {
+ desc_.RemoveTransportInfoByName(kAudioContentName);
+ desc_.RemoveTransportInfoByName(kVideoContentName);
+ rtc::SSLFingerprint fingerprint(rtc::DIGEST_SHA_1,
+ kIdentityDigest,
+ sizeof(kIdentityDigest));
+ EXPECT_TRUE(desc_.AddTransportInfo(
+ TransportInfo(kAudioContentName,
+ TransportDescription(std::vector<std::string>(),
+ kCandidateUfragVoice,
+ kCandidatePwdVoice,
+ cricket::ICEMODE_FULL,
+ cricket::CONNECTIONROLE_NONE,
+ &fingerprint, Candidates()))));
+ EXPECT_TRUE(desc_.AddTransportInfo(
+ TransportInfo(kVideoContentName,
+ TransportDescription(std::vector<std::string>(),
+ kCandidateUfragVideo,
+ kCandidatePwdVideo,
+ cricket::ICEMODE_FULL,
+ cricket::CONNECTIONROLE_NONE,
+ &fingerprint, Candidates()))));
+ }
+
+ void AddExtmap() {
+ audio_desc_ = static_cast<AudioContentDescription*>(
+ audio_desc_->Copy());
+ video_desc_ = static_cast<VideoContentDescription*>(
+ video_desc_->Copy());
+ audio_desc_->AddRtpHeaderExtension(
+ RtpHeaderExtension(kExtmapUri, kExtmapId));
+ video_desc_->AddRtpHeaderExtension(
+ RtpHeaderExtension(kExtmapUri, kExtmapId));
+ desc_.RemoveContentByName(kAudioContentName);
+ desc_.RemoveContentByName(kVideoContentName);
+ desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_desc_);
+ desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_desc_);
+ }
+
+ void RemoveCryptos() {
+ audio_desc_->set_cryptos(std::vector<CryptoParams>());
+ video_desc_->set_cryptos(std::vector<CryptoParams>());
+ }
+
+ bool TestSerializeDirection(cricket::MediaContentDirection direction) {
+ audio_desc_->set_direction(direction);
+ video_desc_->set_direction(direction);
+ std::string new_sdp = kSdpFullString;
+ ReplaceDirection(direction, &new_sdp);
+
+ if (!jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version())) {
+ return false;
+ }
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ EXPECT_EQ(new_sdp, message);
+ return true;
+ }
+
+ bool TestSerializeRejected(bool audio_rejected, bool video_rejected) {
+ audio_desc_ = static_cast<AudioContentDescription*>(
+ audio_desc_->Copy());
+ video_desc_ = static_cast<VideoContentDescription*>(
+ video_desc_->Copy());
+ desc_.RemoveContentByName(kAudioContentName);
+ desc_.RemoveContentByName(kVideoContentName);
+ desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_rejected,
+ audio_desc_);
+ desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_rejected,
+ video_desc_);
+ std::string new_sdp = kSdpFullString;
+ ReplaceRejected(audio_rejected, video_rejected, &new_sdp);
+
+ if (!jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version())) {
+ return false;
+ }
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ EXPECT_EQ(new_sdp, message);
+ return true;
+ }
+
+ void AddSctpDataChannel() {
+ rtc::scoped_ptr<DataContentDescription> data(
+ new DataContentDescription());
+ data_desc_ = data.get();
+ data_desc_->set_protocol(cricket::kMediaProtocolDtlsSctp);
+ DataCodec codec(cricket::kGoogleSctpDataCodecId,
+ cricket::kGoogleSctpDataCodecName, 0);
+ codec.SetParam(cricket::kCodecParamPort, kDefaultSctpPort);
+ data_desc_->AddCodec(codec);
+ desc_.AddContent(kDataContentName, NS_JINGLE_DRAFT_SCTP, data.release());
+ EXPECT_TRUE(desc_.AddTransportInfo(
+ TransportInfo(kDataContentName,
+ TransportDescription(kCandidateUfragData,
+ kCandidatePwdData))));
+ }
+
+ void AddRtpDataChannel() {
+ rtc::scoped_ptr<DataContentDescription> data(
+ new DataContentDescription());
+ data_desc_ = data.get();
+
+ data_desc_->AddCodec(DataCodec(101, "google-data", 1));
+ StreamParams data_stream;
+ data_stream.id = kDataChannelMsid;
+ data_stream.cname = kDataChannelCname;
+ data_stream.sync_label = kDataChannelLabel;
+ data_stream.ssrcs.push_back(kDataChannelSsrc);
+ data_desc_->AddStream(data_stream);
+ data_desc_->AddCrypto(CryptoParams(
+ 1, "AES_CM_128_HMAC_SHA1_80",
+ "inline:FvLcvU2P3ZWmQxgPAgcDu7Zl9vftYElFOjEzhWs5", ""));
+ data_desc_->set_protocol(cricket::kMediaProtocolSavpf);
+ desc_.AddContent(kDataContentName, NS_JINGLE_RTP, data.release());
+ EXPECT_TRUE(desc_.AddTransportInfo(
+ TransportInfo(kDataContentName,
+ TransportDescription(kCandidateUfragData,
+ kCandidatePwdData))));
+ }
+
+ bool TestDeserializeDirection(cricket::MediaContentDirection direction) {
+ std::string new_sdp = kSdpFullString;
+ ReplaceDirection(direction, &new_sdp);
+ JsepSessionDescription new_jdesc(kDummyString);
+
+ EXPECT_TRUE(SdpDeserialize(new_sdp, &new_jdesc));
+
+ audio_desc_->set_direction(direction);
+ video_desc_->set_direction(direction);
+ if (!jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version())) {
+ return false;
+ }
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, new_jdesc));
+ return true;
+ }
+
+ bool TestDeserializeRejected(bool audio_rejected, bool video_rejected) {
+ std::string new_sdp = kSdpFullString;
+ ReplaceRejected(audio_rejected, video_rejected, &new_sdp);
+ JsepSessionDescription new_jdesc(JsepSessionDescription::kOffer);
+
+ EXPECT_TRUE(SdpDeserialize(new_sdp, &new_jdesc));
+ audio_desc_ = static_cast<AudioContentDescription*>(
+ audio_desc_->Copy());
+ video_desc_ = static_cast<VideoContentDescription*>(
+ video_desc_->Copy());
+ desc_.RemoveContentByName(kAudioContentName);
+ desc_.RemoveContentByName(kVideoContentName);
+ desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_rejected,
+ audio_desc_);
+ desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_rejected,
+ video_desc_);
+ if (!jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version())) {
+ return false;
+ }
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, new_jdesc));
+ return true;
+ }
+
+ void TestDeserializeExtmap(bool session_level, bool media_level) {
+ AddExtmap();
+ JsepSessionDescription new_jdesc("dummy");
+ ASSERT_TRUE(new_jdesc.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+ JsepSessionDescription jdesc_with_extmap("dummy");
+ std::string sdp_with_extmap = kSdpString;
+ if (session_level) {
+ InjectAfter(kSessionTime, kExtmapWithDirectionAndAttribute,
+ &sdp_with_extmap);
+ }
+ if (media_level) {
+ InjectAfter(kAttributeIcePwdVoice, kExtmapWithDirectionAndAttribute,
+ &sdp_with_extmap);
+ InjectAfter(kAttributeIcePwdVideo, kExtmapWithDirectionAndAttribute,
+ &sdp_with_extmap);
+ }
+ // The extmap can't be present at the same time in both session level and
+ // media level.
+ if (session_level && media_level) {
+ SdpParseError error;
+ EXPECT_FALSE(webrtc::SdpDeserialize(sdp_with_extmap,
+ &jdesc_with_extmap, &error));
+ EXPECT_NE(std::string::npos, error.description.find("a=extmap"));
+ } else {
+ EXPECT_TRUE(SdpDeserialize(sdp_with_extmap, &jdesc_with_extmap));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_with_extmap, new_jdesc));
+ }
+ }
+
+ void VerifyCodecParameter(const cricket::CodecParameterMap& params,
+ const std::string& name, int expected_value) {
+ cricket::CodecParameterMap::const_iterator found = params.find(name);
+ ASSERT_TRUE(found != params.end());
+ EXPECT_EQ(found->second, rtc::ToString<int>(expected_value));
+ }
+
+ void TestDeserializeCodecParams(const CodecParams& params,
+ JsepSessionDescription* jdesc_output) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ // Include semantics for WebRTC Media Streams since it is supported by
+ // this parser, and will be added to the SDP when serializing a session
+ // description.
+ "a=msid-semantic: WMS\r\n"
+ // Pl type 111 preferred.
+ "m=audio 9 RTP/SAVPF 111 104 103\r\n"
+ // Pltype 111 listed before 103 and 104 in the map.
+ "a=rtpmap:111 opus/48000/2\r\n"
+ // Pltype 103 listed before 104.
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=fmtp:111 0-15,66,70\r\n"
+ "a=fmtp:111 ";
+ std::ostringstream os;
+ os << "minptime=" << params.min_ptime << "; stereo=" << params.stereo
+ << "; sprop-stereo=" << params.sprop_stereo
+ << "; useinbandfec=" << params.useinband
+ << "; maxaveragebitrate=" << params.maxaveragebitrate << "\r\n"
+ << "a=ptime:" << params.ptime << "\r\n"
+ << "a=maxptime:" << params.max_ptime << "\r\n";
+ sdp += os.str();
+
+ os.clear();
+ os.str("");
+ // Pl type 100 preferred.
+ os << "m=video 9 RTP/SAVPF 99 95\r\n"
+ << "a=rtpmap:99 VP8/90000\r\n"
+ << "a=rtpmap:95 RTX/90000\r\n"
+ << "a=fmtp:95 apt=99;\r\n";
+ sdp += os.str();
+
+ // Deserialize
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, jdesc_output, &error));
+
+ const ContentInfo* ac = GetFirstAudioContent(jdesc_output->description());
+ ASSERT_TRUE(ac != NULL);
+ const AudioContentDescription* acd =
+ static_cast<const AudioContentDescription*>(ac->description);
+ ASSERT_FALSE(acd->codecs().empty());
+ cricket::AudioCodec opus = acd->codecs()[0];
+ EXPECT_EQ("opus", opus.name);
+ EXPECT_EQ(111, opus.id);
+ VerifyCodecParameter(opus.params, "minptime", params.min_ptime);
+ VerifyCodecParameter(opus.params, "stereo", params.stereo);
+ VerifyCodecParameter(opus.params, "sprop-stereo", params.sprop_stereo);
+ VerifyCodecParameter(opus.params, "useinbandfec", params.useinband);
+ VerifyCodecParameter(opus.params, "maxaveragebitrate",
+ params.maxaveragebitrate);
+ for (size_t i = 0; i < acd->codecs().size(); ++i) {
+ cricket::AudioCodec codec = acd->codecs()[i];
+ VerifyCodecParameter(codec.params, "ptime", params.ptime);
+ VerifyCodecParameter(codec.params, "maxptime", params.max_ptime);
+ if (codec.name == "ISAC") {
+ if (codec.clockrate == 16000) {
+ EXPECT_EQ(32000, codec.bitrate);
+ } else {
+ EXPECT_EQ(56000, codec.bitrate);
+ }
+ }
+ }
+
+ const ContentInfo* vc = GetFirstVideoContent(jdesc_output->description());
+ ASSERT_TRUE(vc != NULL);
+ const VideoContentDescription* vcd =
+ static_cast<const VideoContentDescription*>(vc->description);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_EQ("VP8", vp8.name);
+ EXPECT_EQ(99, vp8.id);
+ cricket::VideoCodec rtx = vcd->codecs()[1];
+ EXPECT_EQ("RTX", rtx.name);
+ EXPECT_EQ(95, rtx.id);
+ VerifyCodecParameter(rtx.params, "apt", vp8.id);
+ }
+
+ void TestDeserializeRtcpFb(JsepSessionDescription* jdesc_output,
+ bool use_wildcard) {
+ std::string sdp_session_and_audio =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ // Include semantics for WebRTC Media Streams since it is supported by
+ // this parser, and will be added to the SDP when serializing a session
+ // description.
+ "a=msid-semantic: WMS\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n";
+ std::string sdp_video =
+ "m=video 3457 RTP/SAVPF 101\r\n"
+ "a=rtpmap:101 VP8/90000\r\n"
+ "a=rtcp-fb:101 nack\r\n"
+ "a=rtcp-fb:101 nack pli\r\n"
+ "a=rtcp-fb:101 goog-remb\r\n";
+ std::ostringstream os;
+ os << sdp_session_and_audio;
+ os << "a=rtcp-fb:" << (use_wildcard ? "*" : "111") << " nack\r\n";
+ os << sdp_video;
+ os << "a=rtcp-fb:" << (use_wildcard ? "*" : "101") << " ccm fir\r\n";
+ std::string sdp = os.str();
+ // Deserialize
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, jdesc_output, &error));
+ const ContentInfo* ac = GetFirstAudioContent(jdesc_output->description());
+ ASSERT_TRUE(ac != NULL);
+ const AudioContentDescription* acd =
+ static_cast<const AudioContentDescription*>(ac->description);
+ ASSERT_FALSE(acd->codecs().empty());
+ cricket::AudioCodec opus = acd->codecs()[0];
+ EXPECT_EQ(111, opus.id);
+ EXPECT_TRUE(opus.HasFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kParamValueEmpty)));
+
+ const ContentInfo* vc = GetFirstVideoContent(jdesc_output->description());
+ ASSERT_TRUE(vc != NULL);
+ const VideoContentDescription* vcd =
+ static_cast<const VideoContentDescription*>(vc->description);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_STREQ(webrtc::JsepSessionDescription::kDefaultVideoCodecName,
+ vp8.name.c_str());
+ EXPECT_EQ(101, vp8.id);
+ EXPECT_TRUE(vp8.HasFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kParamValueEmpty)));
+ EXPECT_TRUE(vp8.HasFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kRtcpFbNackParamPli)));
+ EXPECT_TRUE(vp8.HasFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamRemb,
+ cricket::kParamValueEmpty)));
+ EXPECT_TRUE(vp8.HasFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamCcm,
+ cricket::kRtcpFbCcmParamFir)));
+ }
+
+ // Two SDP messages can mean the same thing but be different strings, e.g.
+ // some of the lines can be serialized in different order.
+ // However, a deserialized description can be compared field by field and has
+ // no order. If deserializer has already been tested, serializing then
+ // deserializing and comparing JsepSessionDescription will test
+ // the serializer sufficiently.
+ void TestSerialize(const JsepSessionDescription& jdesc) {
+ std::string message = webrtc::SdpSerialize(jdesc);
+ JsepSessionDescription jdesc_output_des(kDummyString);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(message, &jdesc_output_des, &error));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output_des));
+ }
+
+ protected:
+ SessionDescription desc_;
+ AudioContentDescription* audio_desc_;
+ VideoContentDescription* video_desc_;
+ DataContentDescription* data_desc_;
+ Candidates candidates_;
+ rtc::scoped_ptr<IceCandidateInterface> jcandidate_;
+ JsepSessionDescription jdesc_;
+};
+
+void TestMismatch(const std::string& string1, const std::string& string2) {
+ int position = 0;
+ for (size_t i = 0; i < string1.length() && i < string2.length(); ++i) {
+ if (string1.c_str()[i] != string2.c_str()[i]) {
+ position = static_cast<int>(i);
+ break;
+ }
+ }
+ EXPECT_EQ(0, position) << "Strings mismatch at the " << position
+ << " character\n"
+ << " 1: " << string1.substr(position, 20) << "\n"
+ << " 2: " << string2.substr(position, 20) << "\n";
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescription) {
+ // SessionDescription with desc and candidates.
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ TestMismatch(std::string(kSdpFullString), message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionEmpty) {
+ JsepSessionDescription jdesc_empty(kDummyString);
+ EXPECT_EQ("", webrtc::SdpSerialize(jdesc_empty));
+}
+
+// This tests serialization of SDP with only IPv6 candidates and verifies that
+// IPv6 is used as default address in c line according to preference.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithIPv6Only) {
+ // Only test 1 m line.
+ desc_.RemoveContentByName("video_content_name");
+ // Stun has a high preference than local host.
+ cricket::Candidate candidate1(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+ cricket::Candidate candidate2(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+ JsepSessionDescription jdesc(kDummyString);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ // Only add the candidates to audio m line.
+ JsepIceCandidate jice1("audio_content_name", 0, candidate1);
+ JsepIceCandidate jice2("audio_content_name", 0, candidate2);
+ ASSERT_TRUE(jdesc.AddCandidate(&jice1));
+ ASSERT_TRUE(jdesc.AddCandidate(&jice2));
+ std::string message = webrtc::SdpSerialize(jdesc);
+
+ // Audio line should have a c line like this one.
+ EXPECT_NE(message.find("c=IN IP6 ::1"), std::string::npos);
+ // Shouldn't have a IP4 c line.
+ EXPECT_EQ(message.find("c=IN IP4"), std::string::npos);
+}
+
+// This tests serialization of SDP with both IPv4 and IPv6 candidates and
+// verifies that IPv4 is used as default address in c line even if the
+// preference of IPv4 is lower.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBothIPFamilies) {
+ // Only test 1 m line.
+ desc_.RemoveContentByName("video_content_name");
+ cricket::Candidate candidate_v4(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("192.168.1.5", 1234), kCandidatePriority, "", "",
+ cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+ cricket::Candidate candidate_v6(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+ JsepSessionDescription jdesc(kDummyString);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ // Only add the candidates to audio m line.
+ JsepIceCandidate jice_v4("audio_content_name", 0, candidate_v4);
+ JsepIceCandidate jice_v6("audio_content_name", 0, candidate_v6);
+ ASSERT_TRUE(jdesc.AddCandidate(&jice_v4));
+ ASSERT_TRUE(jdesc.AddCandidate(&jice_v6));
+ std::string message = webrtc::SdpSerialize(jdesc);
+
+ // Audio line should have a c line like this one.
+ EXPECT_NE(message.find("c=IN IP4 192.168.1.5"), std::string::npos);
+ // Shouldn't have a IP6 c line.
+ EXPECT_EQ(message.find("c=IN IP6"), std::string::npos);
+}
+
+// This tests serialization of SDP with both UDP and TCP candidates and
+// verifies that UDP is used as default address in c line even if the
+// preference of UDP is lower.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBothProtocols) {
+ // Only test 1 m line.
+ desc_.RemoveContentByName("video_content_name");
+ // Stun has a high preference than local host.
+ cricket::Candidate candidate1(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+ cricket::Candidate candidate2(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("fe80::1234:5678:abcd:ef12", 1235), kCandidatePriority,
+ "", "", cricket::LOCAL_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation1);
+ JsepSessionDescription jdesc(kDummyString);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ // Only add the candidates to audio m line.
+ JsepIceCandidate jice1("audio_content_name", 0, candidate1);
+ JsepIceCandidate jice2("audio_content_name", 0, candidate2);
+ ASSERT_TRUE(jdesc.AddCandidate(&jice1));
+ ASSERT_TRUE(jdesc.AddCandidate(&jice2));
+ std::string message = webrtc::SdpSerialize(jdesc);
+
+ // Audio line should have a c line like this one.
+ EXPECT_NE(message.find("c=IN IP6 fe80::1234:5678:abcd:ef12"),
+ std::string::npos);
+ // Shouldn't have a IP4 c line.
+ EXPECT_EQ(message.find("c=IN IP4"), std::string::npos);
+}
+
+// This tests serialization of SDP with only TCP candidates and verifies that
+// null IPv4 is used as default address in c line.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithTCPOnly) {
+ // Only test 1 m line.
+ desc_.RemoveContentByName("video_content_name");
+ // Stun has a high preference than local host.
+ cricket::Candidate candidate1(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+ cricket::Candidate candidate2(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+ JsepSessionDescription jdesc(kDummyString);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ // Only add the candidates to audio m line.
+ JsepIceCandidate jice1("audio_content_name", 0, candidate1);
+ JsepIceCandidate jice2("audio_content_name", 0, candidate2);
+ ASSERT_TRUE(jdesc.AddCandidate(&jice1));
+ ASSERT_TRUE(jdesc.AddCandidate(&jice2));
+ std::string message = webrtc::SdpSerialize(jdesc);
+
+ // Audio line should have a c line like this one when no any default exists.
+ EXPECT_NE(message.find("c=IN IP4 0.0.0.0"), std::string::npos);
+}
+
+// This tests serialization of SDP with a=crypto and a=fingerprint, as would be
+// the case in a DTLS offer.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithFingerprint) {
+ AddFingerprint();
+ JsepSessionDescription jdesc_with_fingerprint(kDummyString);
+ ASSERT_TRUE(jdesc_with_fingerprint.Initialize(desc_.Copy(),
+ kSessionId, kSessionVersion));
+ std::string message = webrtc::SdpSerialize(jdesc_with_fingerprint);
+
+ std::string sdp_with_fingerprint = kSdpString;
+ InjectAfter(kAttributeIcePwdVoice,
+ kFingerprint, &sdp_with_fingerprint);
+ InjectAfter(kAttributeIcePwdVideo,
+ kFingerprint, &sdp_with_fingerprint);
+
+ EXPECT_EQ(sdp_with_fingerprint, message);
+}
+
+// This tests serialization of SDP with a=fingerprint with no a=crypto, as would
+// be the case in a DTLS answer.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithFingerprintNoCryptos) {
+ AddFingerprint();
+ RemoveCryptos();
+ JsepSessionDescription jdesc_with_fingerprint(kDummyString);
+ ASSERT_TRUE(jdesc_with_fingerprint.Initialize(desc_.Copy(),
+ kSessionId, kSessionVersion));
+ std::string message = webrtc::SdpSerialize(jdesc_with_fingerprint);
+
+ std::string sdp_with_fingerprint = kSdpString;
+ Replace(kAttributeCryptoVoice, "", &sdp_with_fingerprint);
+ Replace(kAttributeCryptoVideo, "", &sdp_with_fingerprint);
+ InjectAfter(kAttributeIcePwdVoice,
+ kFingerprint, &sdp_with_fingerprint);
+ InjectAfter(kAttributeIcePwdVideo,
+ kFingerprint, &sdp_with_fingerprint);
+
+ EXPECT_EQ(sdp_with_fingerprint, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithoutCandidates) {
+ // JsepSessionDescription with desc but without candidates.
+ JsepSessionDescription jdesc_no_candidates(kDummyString);
+ ASSERT_TRUE(jdesc_no_candidates.Initialize(desc_.Copy(),
+ kSessionId, kSessionVersion));
+ std::string message = webrtc::SdpSerialize(jdesc_no_candidates);
+ EXPECT_EQ(std::string(kSdpString), message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBundle) {
+ ContentGroup group(cricket::GROUP_TYPE_BUNDLE);
+ group.AddContentName(kAudioContentName);
+ group.AddContentName(kVideoContentName);
+ desc_.AddGroup(group);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_bundle = kSdpFullString;
+ InjectAfter(kSessionTime,
+ "a=group:BUNDLE audio_content_name video_content_name\r\n",
+ &sdp_with_bundle);
+ EXPECT_EQ(sdp_with_bundle, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBandwidth) {
+ VideoContentDescription* vcd = static_cast<VideoContentDescription*>(
+ GetFirstVideoContent(&desc_)->description);
+ vcd->set_bandwidth(100 * 1000);
+ AudioContentDescription* acd = static_cast<AudioContentDescription*>(
+ GetFirstAudioContent(&desc_)->description);
+ acd->set_bandwidth(50 * 1000);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_bandwidth = kSdpFullString;
+ InjectAfter("c=IN IP4 74.125.224.39\r\n",
+ "b=AS:100\r\n",
+ &sdp_with_bandwidth);
+ InjectAfter("c=IN IP4 74.125.127.126\r\n",
+ "b=AS:50\r\n",
+ &sdp_with_bandwidth);
+ EXPECT_EQ(sdp_with_bandwidth, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithIceOptions) {
+ std::vector<std::string> transport_options;
+ transport_options.push_back(kIceOption1);
+ transport_options.push_back(kIceOption3);
+ AddIceOptions(kAudioContentName, transport_options);
+ transport_options.clear();
+ transport_options.push_back(kIceOption2);
+ transport_options.push_back(kIceOption3);
+ AddIceOptions(kVideoContentName, transport_options);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_ice_options = kSdpFullString;
+ InjectAfter(kAttributeIcePwdVoice,
+ "a=ice-options:iceoption1 iceoption3\r\n",
+ &sdp_with_ice_options);
+ InjectAfter(kAttributeIcePwdVideo,
+ "a=ice-options:iceoption2 iceoption3\r\n",
+ &sdp_with_ice_options);
+ EXPECT_EQ(sdp_with_ice_options, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithRecvOnlyContent) {
+ EXPECT_TRUE(TestSerializeDirection(cricket::MD_RECVONLY));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithSendOnlyContent) {
+ EXPECT_TRUE(TestSerializeDirection(cricket::MD_SENDONLY));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithInactiveContent) {
+ EXPECT_TRUE(TestSerializeDirection(cricket::MD_INACTIVE));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAudioRejected) {
+ EXPECT_TRUE(TestSerializeRejected(true, false));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithVideoRejected) {
+ EXPECT_TRUE(TestSerializeRejected(false, true));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAudioVideoRejected) {
+ EXPECT_TRUE(TestSerializeRejected(true, true));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithRtpDataChannel) {
+ AddRtpDataChannel();
+ JsepSessionDescription jsep_desc(kDummyString);
+
+ ASSERT_TRUE(jsep_desc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+ std::string message = webrtc::SdpSerialize(jsep_desc);
+
+ std::string expected_sdp = kSdpString;
+ expected_sdp.append(kSdpRtpDataChannelString);
+ EXPECT_EQ(expected_sdp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithSctpDataChannel) {
+ AddSctpDataChannel();
+ JsepSessionDescription jsep_desc(kDummyString);
+
+ ASSERT_TRUE(jsep_desc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+ std::string message = webrtc::SdpSerialize(jsep_desc);
+
+ std::string expected_sdp = kSdpString;
+ expected_sdp.append(kSdpSctpDataChannelString);
+ EXPECT_EQ(message, expected_sdp);
+}
+
+TEST_F(WebRtcSdpTest, SerializeWithSctpDataChannelAndNewPort) {
+ AddSctpDataChannel();
+ JsepSessionDescription jsep_desc(kDummyString);
+
+ ASSERT_TRUE(jsep_desc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+ DataContentDescription* dcdesc = static_cast<DataContentDescription*>(
+ jsep_desc.description()->GetContentDescriptionByName(kDataContentName));
+
+ const int kNewPort = 1234;
+ cricket::DataCodec codec(
+ cricket::kGoogleSctpDataCodecId, cricket::kGoogleSctpDataCodecName, 0);
+ codec.SetParam(cricket::kCodecParamPort, kNewPort);
+ dcdesc->AddOrReplaceCodec(codec);
+
+ std::string message = webrtc::SdpSerialize(jsep_desc);
+
+ std::string expected_sdp = kSdpString;
+ expected_sdp.append(kSdpSctpDataChannelString);
+
+ char default_portstr[16];
+ char new_portstr[16];
+ rtc::sprintfn(default_portstr, sizeof(default_portstr), "%d",
+ kDefaultSctpPort);
+ rtc::sprintfn(new_portstr, sizeof(new_portstr), "%d", kNewPort);
+ rtc::replace_substrs(default_portstr, strlen(default_portstr),
+ new_portstr, strlen(new_portstr),
+ &expected_sdp);
+
+ EXPECT_EQ(expected_sdp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithDataChannelAndBandwidth) {
+ AddRtpDataChannel();
+ data_desc_->set_bandwidth(100*1000);
+ JsepSessionDescription jsep_desc(kDummyString);
+
+ ASSERT_TRUE(jsep_desc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+ std::string message = webrtc::SdpSerialize(jsep_desc);
+
+ std::string expected_sdp = kSdpString;
+ expected_sdp.append(kSdpRtpDataChannelString);
+ // Serializing data content shouldn't ignore bandwidth settings.
+ InjectAfter("m=application 9 RTP/SAVPF 101\r\nc=IN IP4 0.0.0.0\r\n",
+ "b=AS:100\r\n",
+ &expected_sdp);
+ EXPECT_EQ(expected_sdp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithExtmap) {
+ AddExtmap();
+ JsepSessionDescription desc_with_extmap("dummy");
+ ASSERT_TRUE(desc_with_extmap.Initialize(desc_.Copy(),
+ kSessionId, kSessionVersion));
+ std::string message = webrtc::SdpSerialize(desc_with_extmap);
+
+ std::string sdp_with_extmap = kSdpString;
+ InjectAfter("a=mid:audio_content_name\r\n",
+ kExtmap, &sdp_with_extmap);
+ InjectAfter("a=mid:video_content_name\r\n",
+ kExtmap, &sdp_with_extmap);
+
+ EXPECT_EQ(sdp_with_extmap, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeCandidates) {
+ std::string message = webrtc::SdpSerializeCandidate(*jcandidate_);
+ EXPECT_EQ(std::string(kRawCandidate), message);
+}
+
+// TODO(mallinath) : Enable this test once WebRTCSdp capable of parsing
+// RFC 6544.
+TEST_F(WebRtcSdpTest, SerializeTcpCandidates) {
+ Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority,
+ "", "", LOCAL_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation1);
+ candidate.set_tcptype(cricket::TCPTYPE_ACTIVE_STR);
+ rtc::scoped_ptr<IceCandidateInterface> jcandidate(
+ new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
+
+ std::string message = webrtc::SdpSerializeCandidate(*jcandidate);
+ EXPECT_EQ(std::string(kSdpTcpActiveCandidate), message);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescription) {
+ JsepSessionDescription jdesc(kDummyString);
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(kSdpFullString, &jdesc));
+ // Verify
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutMline) {
+ JsepSessionDescription jdesc(kDummyString);
+ const char kSdpWithoutMline[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS local_stream_1 local_stream_2\r\n";
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(kSdpWithoutMline, &jdesc));
+ EXPECT_EQ(0u, jdesc.description()->contents().size());
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCarriageReturn) {
+ JsepSessionDescription jdesc(kDummyString);
+ std::string sdp_without_carriage_return = kSdpFullString;
+ Replace("\r\n", "\n", &sdp_without_carriage_return);
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(sdp_without_carriage_return, &jdesc));
+ // Verify
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCandidates) {
+ // SessionDescription with desc but without candidates.
+ JsepSessionDescription jdesc_no_candidates(kDummyString);
+ ASSERT_TRUE(jdesc_no_candidates.Initialize(desc_.Copy(),
+ kSessionId, kSessionVersion));
+ JsepSessionDescription new_jdesc(kDummyString);
+ EXPECT_TRUE(SdpDeserialize(kSdpString, &new_jdesc));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_no_candidates, new_jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmap) {
+ static const char kSdpNoRtpmapString[] =
+ "v=0\r\n"
+ "o=- 11 22 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 49232 RTP/AVP 0 18 103\r\n"
+ // Codec that doesn't appear in the m= line will be ignored.
+ "a=rtpmap:104 ISAC/32000\r\n"
+ // The rtpmap line for static payload codec is optional.
+ "a=rtpmap:18 G729/16000\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n";
+
+ JsepSessionDescription jdesc(kDummyString);
+ EXPECT_TRUE(SdpDeserialize(kSdpNoRtpmapString, &jdesc));
+ cricket::AudioContentDescription* audio =
+ static_cast<AudioContentDescription*>(
+ jdesc.description()->GetContentDescriptionByName(cricket::CN_AUDIO));
+ AudioCodecs ref_codecs;
+ // The codecs in the AudioContentDescription will be sorted by preference.
+ ref_codecs.push_back(AudioCodec(0, "PCMU", 8000, 0, 1, 3));
+ ref_codecs.push_back(AudioCodec(18, "G729", 16000, 0, 1, 2));
+ ref_codecs.push_back(AudioCodec(103, "ISAC", 16000, 32000, 1, 1));
+ EXPECT_EQ(ref_codecs, audio->codecs());
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmapButWithFmtp) {
+ static const char kSdpNoRtpmapString[] =
+ "v=0\r\n"
+ "o=- 11 22 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 49232 RTP/AVP 18 103\r\n"
+ "a=fmtp:18 annexb=yes\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n";
+
+ JsepSessionDescription jdesc(kDummyString);
+ EXPECT_TRUE(SdpDeserialize(kSdpNoRtpmapString, &jdesc));
+ cricket::AudioContentDescription* audio =
+ static_cast<AudioContentDescription*>(
+ jdesc.description()->GetContentDescriptionByName(cricket::CN_AUDIO));
+
+ cricket::AudioCodec g729 = audio->codecs()[0];
+ EXPECT_EQ("G729", g729.name);
+ EXPECT_EQ(8000, g729.clockrate);
+ EXPECT_EQ(18, g729.id);
+ cricket::CodecParameterMap::iterator found =
+ g729.params.find("annexb");
+ ASSERT_TRUE(found != g729.params.end());
+ EXPECT_EQ(found->second, "yes");
+
+ cricket::AudioCodec isac = audio->codecs()[1];
+ EXPECT_EQ("ISAC", isac.name);
+ EXPECT_EQ(103, isac.id);
+ EXPECT_EQ(16000, isac.clockrate);
+}
+
+// Ensure that we can deserialize SDP with a=fingerprint properly.
+TEST_F(WebRtcSdpTest, DeserializeJsepSessionDescriptionWithFingerprint) {
+ // Add a DTLS a=fingerprint attribute to our session description.
+ AddFingerprint();
+ JsepSessionDescription new_jdesc(kDummyString);
+ ASSERT_TRUE(new_jdesc.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+
+ JsepSessionDescription jdesc_with_fingerprint(kDummyString);
+ std::string sdp_with_fingerprint = kSdpString;
+ InjectAfter(kAttributeIcePwdVoice, kFingerprint, &sdp_with_fingerprint);
+ InjectAfter(kAttributeIcePwdVideo, kFingerprint, &sdp_with_fingerprint);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_fingerprint, &jdesc_with_fingerprint));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_with_fingerprint, new_jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithBundle) {
+ JsepSessionDescription jdesc_with_bundle(kDummyString);
+ std::string sdp_with_bundle = kSdpFullString;
+ InjectAfter(kSessionTime,
+ "a=group:BUNDLE audio_content_name video_content_name\r\n",
+ &sdp_with_bundle);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_bundle, &jdesc_with_bundle));
+ ContentGroup group(cricket::GROUP_TYPE_BUNDLE);
+ group.AddContentName(kAudioContentName);
+ group.AddContentName(kVideoContentName);
+ desc_.AddGroup(group);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bundle));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithBandwidth) {
+ JsepSessionDescription jdesc_with_bandwidth(kDummyString);
+ std::string sdp_with_bandwidth = kSdpFullString;
+ InjectAfter("a=mid:video_content_name\r\na=sendrecv\r\n",
+ "b=AS:100\r\n",
+ &sdp_with_bandwidth);
+ InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n",
+ "b=AS:50\r\n",
+ &sdp_with_bandwidth);
+ EXPECT_TRUE(
+ SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+ VideoContentDescription* vcd = static_cast<VideoContentDescription*>(
+ GetFirstVideoContent(&desc_)->description);
+ vcd->set_bandwidth(100 * 1000);
+ AudioContentDescription* acd = static_cast<AudioContentDescription*>(
+ GetFirstAudioContent(&desc_)->description);
+ acd->set_bandwidth(50 * 1000);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithIceOptions) {
+ JsepSessionDescription jdesc_with_ice_options(kDummyString);
+ std::string sdp_with_ice_options = kSdpFullString;
+ InjectAfter(kSessionTime,
+ "a=ice-options:iceoption3\r\n",
+ &sdp_with_ice_options);
+ InjectAfter(kAttributeIcePwdVoice,
+ "a=ice-options:iceoption1\r\n",
+ &sdp_with_ice_options);
+ InjectAfter(kAttributeIcePwdVideo,
+ "a=ice-options:iceoption2\r\n",
+ &sdp_with_ice_options);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_ice_options, &jdesc_with_ice_options));
+ std::vector<std::string> transport_options;
+ transport_options.push_back(kIceOption3);
+ transport_options.push_back(kIceOption1);
+ AddIceOptions(kAudioContentName, transport_options);
+ transport_options.clear();
+ transport_options.push_back(kIceOption3);
+ transport_options.push_back(kIceOption2);
+ AddIceOptions(kVideoContentName, transport_options);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_ice_options));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithUfragPwd) {
+ // Remove the original ice-ufrag and ice-pwd
+ JsepSessionDescription jdesc_with_ufrag_pwd(kDummyString);
+ std::string sdp_with_ufrag_pwd = kSdpFullString;
+ EXPECT_TRUE(RemoveCandidateUfragPwd(&sdp_with_ufrag_pwd));
+ // Add session level ufrag and pwd
+ InjectAfter(kSessionTime,
+ "a=ice-pwd:session+level+icepwd\r\n"
+ "a=ice-ufrag:session+level+iceufrag\r\n",
+ &sdp_with_ufrag_pwd);
+ // Add media level ufrag and pwd for audio
+ InjectAfter("a=mid:audio_content_name\r\n",
+ "a=ice-pwd:media+level+icepwd\r\na=ice-ufrag:media+level+iceufrag\r\n",
+ &sdp_with_ufrag_pwd);
+ // Update the candidate ufrag and pwd to the expected ones.
+ EXPECT_TRUE(UpdateCandidateUfragPwd(&jdesc_, 0,
+ "media+level+iceufrag", "media+level+icepwd"));
+ EXPECT_TRUE(UpdateCandidateUfragPwd(&jdesc_, 1,
+ "session+level+iceufrag", "session+level+icepwd"));
+ EXPECT_TRUE(SdpDeserialize(sdp_with_ufrag_pwd, &jdesc_with_ufrag_pwd));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_ufrag_pwd));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRecvOnlyContent) {
+ EXPECT_TRUE(TestDeserializeDirection(cricket::MD_RECVONLY));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithSendOnlyContent) {
+ EXPECT_TRUE(TestDeserializeDirection(cricket::MD_SENDONLY));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithInactiveContent) {
+ EXPECT_TRUE(TestDeserializeDirection(cricket::MD_INACTIVE));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRejectedAudio) {
+ EXPECT_TRUE(TestDeserializeRejected(true, false));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRejectedVideo) {
+ EXPECT_TRUE(TestDeserializeRejected(false, true));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRejectedAudioVideo) {
+ EXPECT_TRUE(TestDeserializeRejected(true, true));
+}
+
+// Tests that we can still handle the sdp uses mslabel and label instead of
+// msid for backward compatibility.
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutMsid) {
+ jdesc_.description()->set_msid_supported(false);
+ JsepSessionDescription jdesc(kDummyString);
+ std::string sdp_without_msid = kSdpFullString;
+ Replace("msid", "xmsid", &sdp_without_msid);
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(sdp_without_msid, &jdesc));
+ // Verify
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeCandidate) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+
+ std::string sdp = kSdpOneCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
+
+ // Candidate line without generation extension.
+ sdp = kSdpOneCandidate;
+ Replace(" generation 2", "", &sdp);
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ Candidate expected = jcandidate_->candidate();
+ expected.set_generation(0);
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(expected));
+
+ sdp = kSdpTcpActiveCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ // Make a cricket::Candidate equivalent to kSdpTcpCandidate string.
+ Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority,
+ "", "", LOCAL_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation1);
+ rtc::scoped_ptr<IceCandidateInterface> jcandidate_template(
+ new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(
+ jcandidate_template->candidate()));
+ sdp = kSdpTcpPassiveCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ sdp = kSdpTcpSOCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+}
+
+// This test verifies the deserialization of candidate-attribute
+// as per RFC 5245. Candiate-attribute will be of the format
+// candidate:<blah>. This format will be used when candidates
+// are trickled.
+TEST_F(WebRtcSdpTest, DeserializeRawCandidateAttribute) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+
+ std::string candidate_attribute = kRawCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
+ EXPECT_EQ(2u, jcandidate.candidate().generation());
+
+ // Candidate line without generation extension.
+ candidate_attribute = kRawCandidate;
+ Replace(" generation 2", "", &candidate_attribute);
+ EXPECT_TRUE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ Candidate expected = jcandidate_->candidate();
+ expected.set_generation(0);
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(expected));
+
+ // Candidate line without candidate:
+ candidate_attribute = kRawCandidate;
+ Replace("candidate:", "", &candidate_attribute);
+ EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+ // Candidate line with IPV6 address.
+ EXPECT_TRUE(SdpDeserializeCandidate(kRawIPV6Candidate, &jcandidate));
+}
+
+// This test verifies that the deserialization of an invalid candidate string
+// fails.
+TEST_F(WebRtcSdpTest, DeserializeInvalidCandidiate) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+
+ std::string candidate_attribute = kRawCandidate;
+ candidate_attribute.replace(0, 1, "x");
+ EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+ candidate_attribute = kSdpOneCandidate;
+ candidate_attribute.replace(0, 1, "x");
+ EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+ candidate_attribute = kRawCandidate;
+ candidate_attribute.append("\r\n");
+ candidate_attribute.append(kRawCandidate);
+ EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+ EXPECT_FALSE(SdpDeserializeCandidate(kSdpTcpInvalidCandidate, &jcandidate));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpDataChannels) {
+ AddRtpDataChannel();
+ JsepSessionDescription jdesc(kDummyString);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpRtpDataChannelString);
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ // Verify
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannels) {
+ AddSctpDataChannel();
+ JsepSessionDescription jdesc(kDummyString);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelString);
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ // Verify with DTLS/SCTP (already in kSdpSctpDataChannelString).
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+ // Verify with UDP/DTLS/SCTP.
+ sdp_with_data.replace(sdp_with_data.find(kDtlsSctp),
+ strlen(kDtlsSctp), kUdpDtlsSctp);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+ // Verify with TCP/DTLS/SCTP.
+ sdp_with_data.replace(sdp_with_data.find(kUdpDtlsSctp),
+ strlen(kUdpDtlsSctp), kTcpDtlsSctp);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsWithSctpPort) {
+ AddSctpDataChannel();
+ JsepSessionDescription jdesc(kDummyString);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelStringWithSctpPort);
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ // Verify with DTLS/SCTP (already in kSdpSctpDataChannelStringWithSctpPort).
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+ // Verify with UDP/DTLS/SCTP.
+ sdp_with_data.replace(sdp_with_data.find(kDtlsSctp),
+ strlen(kDtlsSctp), kUdpDtlsSctp);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+ // Verify with TCP/DTLS/SCTP.
+ sdp_with_data.replace(sdp_with_data.find(kUdpDtlsSctp),
+ strlen(kUdpDtlsSctp), kTcpDtlsSctp);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsWithSctpColonPort) {
+ AddSctpDataChannel();
+ JsepSessionDescription jdesc(kDummyString);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelStringWithSctpColonPort);
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ // Verify with DTLS/SCTP.
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+ // Verify with UDP/DTLS/SCTP.
+ sdp_with_data.replace(sdp_with_data.find(kDtlsSctp),
+ strlen(kDtlsSctp), kUdpDtlsSctp);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+ // Verify with TCP/DTLS/SCTP.
+ sdp_with_data.replace(sdp_with_data.find(kUdpDtlsSctp),
+ strlen(kUdpDtlsSctp), kTcpDtlsSctp);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+// Test to check the behaviour if sctp-port is specified
+// on the m= line and in a=sctp-port.
+TEST_F(WebRtcSdpTest, DeserializeSdpWithMultiSctpPort) {
+ AddSctpDataChannel();
+ JsepSessionDescription jdesc(kDummyString);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_data = kSdpString;
+ // Append m= attributes
+ sdp_with_data.append(kSdpSctpDataChannelString);
+ // Append a=sctp-port attribute
+ sdp_with_data.append("a=sctp-port 5000\r\n");
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ EXPECT_FALSE(SdpDeserialize(sdp_with_data, &jdesc_output));
+}
+
+// For crbug/344475.
+TEST_F(WebRtcSdpTest, DeserializeSdpWithCorruptedSctpDataChannels) {
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelString);
+ // Remove the "\n" at the end.
+ sdp_with_data = sdp_with_data.substr(0, sdp_with_data.size() - 1);
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ EXPECT_FALSE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ // No crash is a pass.
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelAndNewPort) {
+ AddSctpDataChannel();
+ const uint16_t kUnusualSctpPort = 9556;
+ char default_portstr[16];
+ char unusual_portstr[16];
+ rtc::sprintfn(default_portstr, sizeof(default_portstr), "%d",
+ kDefaultSctpPort);
+ rtc::sprintfn(unusual_portstr, sizeof(unusual_portstr), "%d",
+ kUnusualSctpPort);
+
+ // First setup the expected JsepSessionDescription.
+ JsepSessionDescription jdesc(kDummyString);
+ // take our pre-built session description and change the SCTP port.
+ cricket::SessionDescription* mutant = desc_.Copy();
+ DataContentDescription* dcdesc = static_cast<DataContentDescription*>(
+ mutant->GetContentDescriptionByName(kDataContentName));
+ std::vector<cricket::DataCodec> codecs(dcdesc->codecs());
+ EXPECT_EQ(1U, codecs.size());
+ EXPECT_EQ(cricket::kGoogleSctpDataCodecId, codecs[0].id);
+ codecs[0].SetParam(cricket::kCodecParamPort, kUnusualSctpPort);
+ dcdesc->set_codecs(codecs);
+
+ // note: mutant's owned by jdesc now.
+ ASSERT_TRUE(jdesc.Initialize(mutant, kSessionId, kSessionVersion));
+ mutant = NULL;
+
+ // Then get the deserialized JsepSessionDescription.
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelString);
+ rtc::replace_substrs(default_portstr, strlen(default_portstr),
+ unusual_portstr, strlen(unusual_portstr),
+ &sdp_with_data);
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+ // We need to test the deserialized JsepSessionDescription from
+ // kSdpSctpDataChannelStringWithSctpPort for
+ // draft-ietf-mmusic-sctp-sdp-07
+ // a=sctp-port
+ sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelStringWithSctpPort);
+ rtc::replace_substrs(default_portstr, strlen(default_portstr),
+ unusual_portstr, strlen(unusual_portstr),
+ &sdp_with_data);
+
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpDataChannelsAndBandwidth) {
+ // We want to test that deserializing data content limits bandwidth
+ // settings (it should never be greater than the default).
+ // This should prevent someone from using unlimited data bandwidth through
+ // JS and "breaking the Internet".
+ // See: https://code.google.com/p/chromium/issues/detail?id=280726
+ std::string sdp_with_bandwidth = kSdpString;
+ sdp_with_bandwidth.append(kSdpRtpDataChannelString);
+ InjectAfter("a=mid:data_content_name\r\n",
+ "b=AS:100\r\n",
+ &sdp_with_bandwidth);
+ JsepSessionDescription jdesc_with_bandwidth(kDummyString);
+
+ EXPECT_FALSE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsAndBandwidth) {
+ AddSctpDataChannel();
+ JsepSessionDescription jdesc(kDummyString);
+ DataContentDescription* dcd = static_cast<DataContentDescription*>(
+ GetFirstDataContent(&desc_)->description);
+ dcd->set_bandwidth(100 * 1000);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_bandwidth = kSdpString;
+ sdp_with_bandwidth.append(kSdpSctpDataChannelString);
+ InjectAfter("a=mid:data_content_name\r\n",
+ "b=AS:100\r\n",
+ &sdp_with_bandwidth);
+ JsepSessionDescription jdesc_with_bandwidth(kDummyString);
+
+ // SCTP has congestion control, so we shouldn't limit the bandwidth
+ // as we do for RTP.
+ EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithSessionLevelExtmap) {
+ TestDeserializeExtmap(true, false);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithMediaLevelExtmap) {
+ TestDeserializeExtmap(false, true);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithInvalidExtmap) {
+ TestDeserializeExtmap(true, true);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutEndLineBreak) {
+ JsepSessionDescription jdesc(kDummyString);
+ std::string sdp = kSdpFullString;
+ sdp = sdp.substr(0, sdp.size() - 2); // Remove \r\n at the end.
+ // Deserialize
+ SdpParseError error;
+ EXPECT_FALSE(webrtc::SdpDeserialize(sdp, &jdesc, &error));
+ const std::string lastline = "a=ssrc:6 label:video_track_id_3";
+ EXPECT_EQ(lastline, error.line);
+ EXPECT_EQ("Invalid SDP line.", error.description);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeCandidateWithDifferentTransport) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+ std::string new_sdp = kSdpOneCandidate;
+ Replace("udp", "unsupported_transport", &new_sdp);
+ EXPECT_FALSE(SdpDeserializeCandidate(new_sdp, &jcandidate));
+ new_sdp = kSdpOneCandidate;
+ Replace("udp", "uDP", &new_sdp);
+ EXPECT_TRUE(SdpDeserializeCandidate(new_sdp, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeCandidateOldFormat) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+ EXPECT_TRUE(SdpDeserializeCandidate(kSdpOneCandidateOldFormat,&jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ Candidate ref_candidate = jcandidate_->candidate();
+ ref_candidate.set_username("user_rtp");
+ ref_candidate.set_password("password_rtp");
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(ref_candidate));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithConferenceFlag) {
+ JsepSessionDescription jdesc(kDummyString);
+
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(kSdpConferenceString, &jdesc));
+
+ // Verify
+ cricket::AudioContentDescription* audio =
+ static_cast<AudioContentDescription*>(
+ jdesc.description()->GetContentDescriptionByName(cricket::CN_AUDIO));
+ EXPECT_TRUE(audio->conference_mode());
+
+ cricket::VideoContentDescription* video =
+ static_cast<VideoContentDescription*>(
+ jdesc.description()->GetContentDescriptionByName(cricket::CN_VIDEO));
+ EXPECT_TRUE(video->conference_mode());
+}
+
+TEST_F(WebRtcSdpTest, DeserializeBrokenSdp) {
+ const char kSdpDestroyer[] = "!@#$%^&";
+ const char kSdpEmptyType[] = " =candidate";
+ const char kSdpEqualAsPlus[] = "a+candidate";
+ const char kSdpSpaceAfterEqual[] = "a= candidate";
+ const char kSdpUpperType[] = "A=candidate";
+ const char kSdpEmptyLine[] = "";
+ const char kSdpMissingValue[] = "a=";
+
+ const char kSdpBrokenFingerprint[] = "a=fingerprint:sha-1 "
+ "4AAD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB";
+ const char kSdpExtraField[] = "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB XXX";
+ const char kSdpMissingSpace[] = "a=fingerprint:sha-1"
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB";
+ // MD5 is not allowed in fingerprints.
+ const char kSdpMd5[] = "a=fingerprint:md5 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B";
+
+ // Broken session description
+ ExpectParseFailure("v=", kSdpDestroyer);
+ ExpectParseFailure("o=", kSdpDestroyer);
+ ExpectParseFailure("s=-", kSdpDestroyer);
+ // Broken time description
+ ExpectParseFailure("t=", kSdpDestroyer);
+
+ // Broken media description
+ ExpectParseFailure("m=audio", "c=IN IP4 74.125.224.39");
+ ExpectParseFailure("m=video", kSdpDestroyer);
+
+ // Invalid lines
+ ExpectParseFailure("a=candidate", kSdpEmptyType);
+ ExpectParseFailure("a=candidate", kSdpEqualAsPlus);
+ ExpectParseFailure("a=candidate", kSdpSpaceAfterEqual);
+ ExpectParseFailure("a=candidate", kSdpUpperType);
+
+ // Bogus fingerprint replacing a=sendrev. We selected this attribute
+ // because it's orthogonal to what we are replacing and hence
+ // safe.
+ ExpectParseFailure("a=sendrecv", kSdpBrokenFingerprint);
+ ExpectParseFailure("a=sendrecv", kSdpExtraField);
+ ExpectParseFailure("a=sendrecv", kSdpMissingSpace);
+ ExpectParseFailure("a=sendrecv", kSdpMd5);
+
+ // Empty Line
+ ExpectParseFailure("a=rtcp:2347 IN IP4 74.125.127.126", kSdpEmptyLine);
+ ExpectParseFailure("a=rtcp:2347 IN IP4 74.125.127.126", kSdpMissingValue);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithInvalidAttributeValue) {
+ // ssrc
+ ExpectParseFailure("a=ssrc:1", "a=ssrc:badvalue");
+ ExpectParseFailure("a=ssrc-group:FEC 5 6", "a=ssrc-group:FEC badvalue 6");
+ // crypto
+ ExpectParseFailure("a=crypto:1 ", "a=crypto:badvalue ");
+ // rtpmap
+ ExpectParseFailure("a=rtpmap:111 ", "a=rtpmap:badvalue ");
+ ExpectParseFailure("opus/48000/2", "opus/badvalue/2");
+ ExpectParseFailure("opus/48000/2", "opus/48000/badvalue");
+ // candidate
+ ExpectParseFailure("1 udp 2130706432", "badvalue udp 2130706432");
+ ExpectParseFailure("1 udp 2130706432", "1 udp badvalue");
+ ExpectParseFailure("192.168.1.5 1234", "192.168.1.5 badvalue");
+ ExpectParseFailure("rport 2346", "rport badvalue");
+ ExpectParseFailure("rport 2346 generation 2",
+ "rport 2346 generation badvalue");
+ // m line
+ ExpectParseFailure("m=audio 2345 RTP/SAVPF 111 103 104",
+ "m=audio 2345 RTP/SAVPF 111 badvalue 104");
+
+ // bandwidth
+ ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+ "b=AS:badvalue\r\n",
+ "b=AS:badvalue");
+ // rtcp-fb
+ ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+ "a=rtcp-fb:badvalue nack\r\n",
+ "a=rtcp-fb:badvalue nack");
+ // extmap
+ ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+ "a=extmap:badvalue http://example.com\r\n",
+ "a=extmap:badvalue http://example.com");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithReorderedPltypes) {
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ const char kSdpWithReorderedPlTypesString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 104 103\r\n" // Pl type 104 preferred.
+ "a=rtpmap:111 opus/48000/2\r\n" // Pltype 111 listed before 103 and 104
+ // in the map.
+ "a=rtpmap:103 ISAC/16000\r\n" // Pltype 103 listed before 104 in the map.
+ "a=rtpmap:104 ISAC/32000\r\n";
+
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(kSdpWithReorderedPlTypesString, &jdesc_output));
+
+ const ContentInfo* ac = GetFirstAudioContent(jdesc_output.description());
+ ASSERT_TRUE(ac != NULL);
+ const AudioContentDescription* acd =
+ static_cast<const AudioContentDescription*>(ac->description);
+ ASSERT_FALSE(acd->codecs().empty());
+ EXPECT_EQ("ISAC", acd->codecs()[0].name);
+ EXPECT_EQ(32000, acd->codecs()[0].clockrate);
+ EXPECT_EQ(104, acd->codecs()[0].id);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSerializeCodecParams) {
+ JsepSessionDescription jdesc_output(kDummyString);
+ CodecParams params;
+ params.max_ptime = 40;
+ params.ptime = 30;
+ params.min_ptime = 10;
+ params.sprop_stereo = 1;
+ params.stereo = 1;
+ params.useinband = 1;
+ params.maxaveragebitrate = 128000;
+ TestDeserializeCodecParams(params, &jdesc_output);
+ TestSerialize(jdesc_output);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSerializeRtcpFb) {
+ const bool kUseWildcard = false;
+ JsepSessionDescription jdesc_output(kDummyString);
+ TestDeserializeRtcpFb(&jdesc_output, kUseWildcard);
+ TestSerialize(jdesc_output);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSerializeRtcpFbWildcard) {
+ const bool kUseWildcard = true;
+ JsepSessionDescription jdesc_output(kDummyString);
+ TestDeserializeRtcpFb(&jdesc_output, kUseWildcard);
+ TestSerialize(jdesc_output);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeVideoFmtp) {
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ const char kSdpWithFmtpString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=fmtp:120 x-google-min-bitrate=10;x-google-max-quantization=40\r\n";
+
+ // Deserialize
+ SdpParseError error;
+ EXPECT_TRUE(
+ webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output, &error));
+
+ const ContentInfo* vc = GetFirstVideoContent(jdesc_output.description());
+ ASSERT_TRUE(vc != NULL);
+ const VideoContentDescription* vcd =
+ static_cast<const VideoContentDescription*>(vc->description);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_EQ("VP8", vp8.name);
+ EXPECT_EQ(120, vp8.id);
+ cricket::CodecParameterMap::iterator found =
+ vp8.params.find("x-google-min-bitrate");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "10");
+ found = vp8.params.find("x-google-max-quantization");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "40");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeVideoFmtpWithSpace) {
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ const char kSdpWithFmtpString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=fmtp:120 x-google-min-bitrate=10; x-google-max-quantization=40\r\n";
+
+ // Deserialize
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output,
+ &error));
+
+ const ContentInfo* vc = GetFirstVideoContent(jdesc_output.description());
+ ASSERT_TRUE(vc != NULL);
+ const VideoContentDescription* vcd =
+ static_cast<const VideoContentDescription*>(vc->description);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_EQ("VP8", vp8.name);
+ EXPECT_EQ(120, vp8.id);
+ cricket::CodecParameterMap::iterator found =
+ vp8.params.find("x-google-min-bitrate");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "10");
+ found = vp8.params.find("x-google-max-quantization");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "40");
+}
+
+TEST_F(WebRtcSdpTest, SerializeVideoFmtp) {
+ VideoContentDescription* vcd = static_cast<VideoContentDescription*>(
+ GetFirstVideoContent(&desc_)->description);
+
+ cricket::VideoCodecs codecs = vcd->codecs();
+ codecs[0].params["x-google-min-bitrate"] = "10";
+ vcd->set_codecs(codecs);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_fmtp = kSdpFullString;
+ InjectAfter("a=rtpmap:120 VP8/90000\r\n",
+ "a=fmtp:120 x-google-min-bitrate=10\r\n",
+ &sdp_with_fmtp);
+ EXPECT_EQ(sdp_with_fmtp, message);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithIceLite) {
+ JsepSessionDescription jdesc_with_icelite(kDummyString);
+ std::string sdp_with_icelite = kSdpFullString;
+ EXPECT_TRUE(SdpDeserialize(sdp_with_icelite, &jdesc_with_icelite));
+ cricket::SessionDescription* desc = jdesc_with_icelite.description();
+ const cricket::TransportInfo* tinfo1 =
+ desc->GetTransportInfoByName("audio_content_name");
+ EXPECT_EQ(cricket::ICEMODE_FULL, tinfo1->description.ice_mode);
+ const cricket::TransportInfo* tinfo2 =
+ desc->GetTransportInfoByName("video_content_name");
+ EXPECT_EQ(cricket::ICEMODE_FULL, tinfo2->description.ice_mode);
+ InjectAfter(kSessionTime,
+ "a=ice-lite\r\n",
+ &sdp_with_icelite);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_icelite, &jdesc_with_icelite));
+ desc = jdesc_with_icelite.description();
+ const cricket::TransportInfo* atinfo =
+ desc->GetTransportInfoByName("audio_content_name");
+ EXPECT_EQ(cricket::ICEMODE_LITE, atinfo->description.ice_mode);
+ const cricket::TransportInfo* vtinfo =
+ desc->GetTransportInfoByName("video_content_name");
+ EXPECT_EQ(cricket::ICEMODE_LITE, vtinfo->description.ice_mode);
+}
+
+// Verifies that the candidates in the input SDP are parsed and serialized
+// correctly in the output SDP.
+TEST_F(WebRtcSdpTest, RoundTripSdpWithSctpDataChannelsWithCandidates) {
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelWithCandidatesString);
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_EQ(sdp_with_data, webrtc::SdpSerialize(jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, SerializeDtlsSetupAttribute) {
+ AddFingerprint();
+ TransportInfo audio_transport_info =
+ *(desc_.GetTransportInfoByName(kAudioContentName));
+ EXPECT_EQ(cricket::CONNECTIONROLE_NONE,
+ audio_transport_info.description.connection_role);
+ audio_transport_info.description.connection_role =
+ cricket::CONNECTIONROLE_ACTIVE;
+
+ TransportInfo video_transport_info =
+ *(desc_.GetTransportInfoByName(kVideoContentName));
+ EXPECT_EQ(cricket::CONNECTIONROLE_NONE,
+ video_transport_info.description.connection_role);
+ video_transport_info.description.connection_role =
+ cricket::CONNECTIONROLE_ACTIVE;
+
+ desc_.RemoveTransportInfoByName(kAudioContentName);
+ desc_.RemoveTransportInfoByName(kVideoContentName);
+
+ desc_.AddTransportInfo(audio_transport_info);
+ desc_.AddTransportInfo(video_transport_info);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+ jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_dtlssetup = kSdpFullString;
+
+ // Fingerprint attribute is necessary to add DTLS setup attribute.
+ InjectAfter(kAttributeIcePwdVoice,
+ kFingerprint, &sdp_with_dtlssetup);
+ InjectAfter(kAttributeIcePwdVideo,
+ kFingerprint, &sdp_with_dtlssetup);
+ // Now adding |setup| attribute.
+ InjectAfter(kFingerprint,
+ "a=setup:active\r\n", &sdp_with_dtlssetup);
+ EXPECT_EQ(sdp_with_dtlssetup, message);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeDtlsSetupAttribute) {
+ JsepSessionDescription jdesc_with_dtlssetup(kDummyString);
+ std::string sdp_with_dtlssetup = kSdpFullString;
+ InjectAfter(kSessionTime,
+ "a=setup:actpass\r\n",
+ &sdp_with_dtlssetup);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_dtlssetup, &jdesc_with_dtlssetup));
+ cricket::SessionDescription* desc = jdesc_with_dtlssetup.description();
+ const cricket::TransportInfo* atinfo =
+ desc->GetTransportInfoByName("audio_content_name");
+ EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS,
+ atinfo->description.connection_role);
+ const cricket::TransportInfo* vtinfo =
+ desc->GetTransportInfoByName("video_content_name");
+ EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS,
+ vtinfo->description.connection_role);
+}
+
+// Verifies that the order of the serialized m-lines follows the order of the
+// ContentInfo in SessionDescription, and vise versa for deserialization.
+TEST_F(WebRtcSdpTest, MediaContentOrderMaintainedRoundTrip) {
+ JsepSessionDescription jdesc(kDummyString);
+ const std::string media_content_sdps[3] = {
+ kSdpAudioString,
+ kSdpVideoString,
+ kSdpSctpDataChannelString
+ };
+ const cricket::MediaType media_types[3] = {
+ cricket::MEDIA_TYPE_AUDIO,
+ cricket::MEDIA_TYPE_VIDEO,
+ cricket::MEDIA_TYPE_DATA
+ };
+
+ // Verifies all 6 permutations.
+ for (size_t i = 0; i < 6; ++i) {
+ size_t media_content_in_sdp[3];
+ // The index of the first media content.
+ media_content_in_sdp[0] = i / 2;
+ // The index of the second media content.
+ media_content_in_sdp[1] = (media_content_in_sdp[0] + i % 2 + 1) % 3;
+ // The index of the third media content.
+ media_content_in_sdp[2] = (media_content_in_sdp[0] + (i + 1) % 2 + 1) % 3;
+
+ std::string sdp_string = kSdpSessionString;
+ for (size_t i = 0; i < 3; ++i)
+ sdp_string += media_content_sdps[media_content_in_sdp[i]];
+
+ EXPECT_TRUE(SdpDeserialize(sdp_string, &jdesc));
+ cricket::SessionDescription* desc = jdesc.description();
+ EXPECT_EQ(3u, desc->contents().size());
+
+ for (size_t i = 0; i < 3; ++i) {
+ const cricket::MediaContentDescription* mdesc =
+ static_cast<const cricket::MediaContentDescription*>(
+ desc->contents()[i].description);
+ EXPECT_EQ(media_types[media_content_in_sdp[i]], mdesc->type());
+ }
+
+ std::string serialized_sdp = webrtc::SdpSerialize(jdesc);
+ EXPECT_EQ(sdp_string, serialized_sdp);
+ }
+}
diff --git a/talk/app/webrtc/webrtcsession.cc b/talk/app/webrtc/webrtcsession.cc
new file mode 100644
index 0000000000..95abeab77a
--- /dev/null
+++ b/talk/app/webrtc/webrtcsession.cc
@@ -0,0 +1,2204 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/webrtcsession.h"
+
+#include <limits.h>
+
+#include <algorithm>
+#include <vector>
+#include <set>
+
+#include "talk/app/webrtc/jsepicecandidate.h"
+#include "talk/app/webrtc/jsepsessiondescription.h"
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "talk/app/webrtc/mediastreamsignaling.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/sctputils.h"
+#include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
+#include "talk/media/base/constants.h"
+#include "talk/media/base/videocapturer.h"
+#include "talk/session/media/channel.h"
+#include "talk/session/media/channelmanager.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/call.h"
+#include "webrtc/p2p/base/portallocator.h"
+#include "webrtc/p2p/base/transportchannel.h"
+
+using cricket::ContentInfo;
+using cricket::ContentInfos;
+using cricket::MediaContentDescription;
+using cricket::SessionDescription;
+using cricket::TransportInfo;
+
+using cricket::LOCAL_PORT_TYPE;
+using cricket::STUN_PORT_TYPE;
+using cricket::RELAY_PORT_TYPE;
+using cricket::PRFLX_PORT_TYPE;
+
+namespace webrtc {
+
+// Error messages
+const char kBundleWithoutRtcpMux[] = "RTCP-MUX must be enabled when BUNDLE "
+ "is enabled.";
+const char kCreateChannelFailed[] = "Failed to create channels.";
+const char kInvalidCandidates[] = "Description contains invalid candidates.";
+const char kInvalidSdp[] = "Invalid session description.";
+const char kMlineMismatch[] =
+ "Offer and answer descriptions m-lines are not matching. Rejecting answer.";
+const char kPushDownTDFailed[] =
+ "Failed to push down transport description:";
+const char kSdpWithoutDtlsFingerprint[] =
+ "Called with SDP without DTLS fingerprint.";
+const char kSdpWithoutSdesCrypto[] =
+ "Called with SDP without SDES crypto.";
+const char kSdpWithoutIceUfragPwd[] =
+ "Called with SDP without ice-ufrag and ice-pwd.";
+const char kSessionError[] = "Session error code: ";
+const char kSessionErrorDesc[] = "Session error description: ";
+const char kDtlsSetupFailureRtp[] =
+ "Couldn't set up DTLS-SRTP on RTP channel.";
+const char kDtlsSetupFailureRtcp[] =
+ "Couldn't set up DTLS-SRTP on RTCP channel.";
+const char kEnableBundleFailed[] = "Failed to enable BUNDLE.";
+const int kMaxUnsignalledRecvStreams = 20;
+
+IceCandidatePairType GetIceCandidatePairCounter(
+ const cricket::Candidate& local,
+ const cricket::Candidate& remote) {
+ const auto& l = local.type();
+ const auto& r = remote.type();
+ const auto& host = LOCAL_PORT_TYPE;
+ const auto& srflx = STUN_PORT_TYPE;
+ const auto& relay = RELAY_PORT_TYPE;
+ const auto& prflx = PRFLX_PORT_TYPE;
+ if (l == host && r == host) {
+ bool local_private = IPIsPrivate(local.address().ipaddr());
+ bool remote_private = IPIsPrivate(remote.address().ipaddr());
+ if (local_private) {
+ if (remote_private) {
+ return kIceCandidatePairHostPrivateHostPrivate;
+ } else {
+ return kIceCandidatePairHostPrivateHostPublic;
+ }
+ } else {
+ if (remote_private) {
+ return kIceCandidatePairHostPublicHostPrivate;
+ } else {
+ return kIceCandidatePairHostPublicHostPublic;
+ }
+ }
+ }
+ if (l == host && r == srflx)
+ return kIceCandidatePairHostSrflx;
+ if (l == host && r == relay)
+ return kIceCandidatePairHostRelay;
+ if (l == host && r == prflx)
+ return kIceCandidatePairHostPrflx;
+ if (l == srflx && r == host)
+ return kIceCandidatePairSrflxHost;
+ if (l == srflx && r == srflx)
+ return kIceCandidatePairSrflxSrflx;
+ if (l == srflx && r == relay)
+ return kIceCandidatePairSrflxRelay;
+ if (l == srflx && r == prflx)
+ return kIceCandidatePairSrflxPrflx;
+ if (l == relay && r == host)
+ return kIceCandidatePairRelayHost;
+ if (l == relay && r == srflx)
+ return kIceCandidatePairRelaySrflx;
+ if (l == relay && r == relay)
+ return kIceCandidatePairRelayRelay;
+ if (l == relay && r == prflx)
+ return kIceCandidatePairRelayPrflx;
+ if (l == prflx && r == host)
+ return kIceCandidatePairPrflxHost;
+ if (l == prflx && r == srflx)
+ return kIceCandidatePairPrflxSrflx;
+ if (l == prflx && r == relay)
+ return kIceCandidatePairPrflxRelay;
+ return kIceCandidatePairMax;
+}
+
+// Compares |answer| against |offer|. Comparision is done
+// for number of m-lines in answer against offer. If matches true will be
+// returned otherwise false.
+static bool VerifyMediaDescriptions(
+ const SessionDescription* answer, const SessionDescription* offer) {
+ if (offer->contents().size() != answer->contents().size())
+ return false;
+
+ for (size_t i = 0; i < offer->contents().size(); ++i) {
+ if ((offer->contents()[i].name) != answer->contents()[i].name) {
+ return false;
+ }
+ const MediaContentDescription* offer_mdesc =
+ static_cast<const MediaContentDescription*>(
+ offer->contents()[i].description);
+ const MediaContentDescription* answer_mdesc =
+ static_cast<const MediaContentDescription*>(
+ answer->contents()[i].description);
+ if (offer_mdesc->type() != answer_mdesc->type()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+// Checks that each non-rejected content has SDES crypto keys or a DTLS
+// fingerprint. Mismatches, such as replying with a DTLS fingerprint to SDES
+// keys, will be caught in Transport negotiation, and backstopped by Channel's
+// |secure_required| check.
+static bool VerifyCrypto(const SessionDescription* desc,
+ bool dtls_enabled,
+ std::string* error) {
+ const ContentInfos& contents = desc->contents();
+ for (size_t index = 0; index < contents.size(); ++index) {
+ const ContentInfo* cinfo = &contents[index];
+ if (cinfo->rejected) {
+ continue;
+ }
+
+ // If the content isn't rejected, crypto must be present.
+ const MediaContentDescription* media =
+ static_cast<const MediaContentDescription*>(cinfo->description);
+ const TransportInfo* tinfo = desc->GetTransportInfoByName(cinfo->name);
+ if (!media || !tinfo) {
+ // Something is not right.
+ LOG(LS_ERROR) << kInvalidSdp;
+ *error = kInvalidSdp;
+ return false;
+ }
+ if (dtls_enabled) {
+ if (!tinfo->description.identity_fingerprint) {
+ LOG(LS_WARNING) <<
+ "Session description must have DTLS fingerprint if DTLS enabled.";
+ *error = kSdpWithoutDtlsFingerprint;
+ return false;
+ }
+ } else {
+ if (media->cryptos().empty()) {
+ LOG(LS_WARNING) <<
+ "Session description must have SDES when DTLS disabled.";
+ *error = kSdpWithoutSdesCrypto;
+ return false;
+ }
+ }
+ }
+
+ return true;
+}
+
+// Checks that each non-rejected content has ice-ufrag and ice-pwd set.
+static bool VerifyIceUfragPwdPresent(const SessionDescription* desc) {
+ const ContentInfos& contents = desc->contents();
+ for (size_t index = 0; index < contents.size(); ++index) {
+ const ContentInfo* cinfo = &contents[index];
+ if (cinfo->rejected) {
+ continue;
+ }
+
+ // If the content isn't rejected, ice-ufrag and ice-pwd must be present.
+ const TransportInfo* tinfo = desc->GetTransportInfoByName(cinfo->name);
+ if (!tinfo) {
+ // Something is not right.
+ LOG(LS_ERROR) << kInvalidSdp;
+ return false;
+ }
+ if (tinfo->description.ice_ufrag.empty() ||
+ tinfo->description.ice_pwd.empty()) {
+ LOG(LS_ERROR) << "Session description must have ice ufrag and pwd.";
+ return false;
+ }
+ }
+ return true;
+}
+
+// Forces |sdesc->crypto_required| to the appropriate state based on the
+// current security policy, to ensure a failure occurs if there is an error
+// in crypto negotiation.
+// Called when processing the local session description.
+static void UpdateSessionDescriptionSecurePolicy(cricket::CryptoType type,
+ SessionDescription* sdesc) {
+ if (!sdesc) {
+ return;
+ }
+
+ // Updating the |crypto_required_| in MediaContentDescription to the
+ // appropriate state based on the current security policy.
+ for (cricket::ContentInfos::iterator iter = sdesc->contents().begin();
+ iter != sdesc->contents().end(); ++iter) {
+ if (cricket::IsMediaContent(&*iter)) {
+ MediaContentDescription* mdesc =
+ static_cast<MediaContentDescription*> (iter->description);
+ if (mdesc) {
+ mdesc->set_crypto_required(type);
+ }
+ }
+ }
+}
+
+static bool GetAudioSsrcByTrackId(const SessionDescription* session_description,
+ const std::string& track_id,
+ uint32_t* ssrc) {
+ const cricket::ContentInfo* audio_info =
+ cricket::GetFirstAudioContent(session_description);
+ if (!audio_info) {
+ LOG(LS_ERROR) << "Audio not used in this call";
+ return false;
+ }
+
+ const cricket::MediaContentDescription* audio_content =
+ static_cast<const cricket::MediaContentDescription*>(
+ audio_info->description);
+ const cricket::StreamParams* stream =
+ cricket::GetStreamByIds(audio_content->streams(), "", track_id);
+ if (!stream) {
+ return false;
+ }
+
+ *ssrc = stream->first_ssrc();
+ return true;
+}
+
+static bool GetTrackIdBySsrc(const SessionDescription* session_description,
+ uint32_t ssrc,
+ std::string* track_id) {
+ ASSERT(track_id != NULL);
+
+ const cricket::ContentInfo* audio_info =
+ cricket::GetFirstAudioContent(session_description);
+ if (audio_info) {
+ const cricket::MediaContentDescription* audio_content =
+ static_cast<const cricket::MediaContentDescription*>(
+ audio_info->description);
+
+ const auto* found =
+ cricket::GetStreamBySsrc(audio_content->streams(), ssrc);
+ if (found) {
+ *track_id = found->id;
+ return true;
+ }
+ }
+
+ const cricket::ContentInfo* video_info =
+ cricket::GetFirstVideoContent(session_description);
+ if (video_info) {
+ const cricket::MediaContentDescription* video_content =
+ static_cast<const cricket::MediaContentDescription*>(
+ video_info->description);
+
+ const auto* found =
+ cricket::GetStreamBySsrc(video_content->streams(), ssrc);
+ if (found) {
+ *track_id = found->id;
+ return true;
+ }
+ }
+ return false;
+}
+
+static bool BadSdp(const std::string& source,
+ const std::string& type,
+ const std::string& reason,
+ std::string* err_desc) {
+ std::ostringstream desc;
+ desc << "Failed to set " << source;
+ if (!type.empty()) {
+ desc << " " << type;
+ }
+ desc << " sdp: " << reason;
+
+ if (err_desc) {
+ *err_desc = desc.str();
+ }
+ LOG(LS_ERROR) << desc.str();
+ return false;
+}
+
+static bool BadSdp(cricket::ContentSource source,
+ const std::string& type,
+ const std::string& reason,
+ std::string* err_desc) {
+ if (source == cricket::CS_LOCAL) {
+ return BadSdp("local", type, reason, err_desc);
+ } else {
+ return BadSdp("remote", type, reason, err_desc);
+ }
+}
+
+static bool BadLocalSdp(const std::string& type,
+ const std::string& reason,
+ std::string* err_desc) {
+ return BadSdp(cricket::CS_LOCAL, type, reason, err_desc);
+}
+
+static bool BadRemoteSdp(const std::string& type,
+ const std::string& reason,
+ std::string* err_desc) {
+ return BadSdp(cricket::CS_REMOTE, type, reason, err_desc);
+}
+
+static bool BadOfferSdp(cricket::ContentSource source,
+ const std::string& reason,
+ std::string* err_desc) {
+ return BadSdp(source, SessionDescriptionInterface::kOffer, reason, err_desc);
+}
+
+static bool BadPranswerSdp(cricket::ContentSource source,
+ const std::string& reason,
+ std::string* err_desc) {
+ return BadSdp(source, SessionDescriptionInterface::kPrAnswer,
+ reason, err_desc);
+}
+
+static bool BadAnswerSdp(cricket::ContentSource source,
+ const std::string& reason,
+ std::string* err_desc) {
+ return BadSdp(source, SessionDescriptionInterface::kAnswer, reason, err_desc);
+}
+
+#define GET_STRING_OF_STATE(state) \
+ case webrtc::WebRtcSession::state: \
+ result = #state; \
+ break;
+
+static std::string GetStateString(webrtc::WebRtcSession::State state) {
+ std::string result;
+ switch (state) {
+ GET_STRING_OF_STATE(STATE_INIT)
+ GET_STRING_OF_STATE(STATE_SENTOFFER)
+ GET_STRING_OF_STATE(STATE_RECEIVEDOFFER)
+ GET_STRING_OF_STATE(STATE_SENTPRANSWER)
+ GET_STRING_OF_STATE(STATE_RECEIVEDPRANSWER)
+ GET_STRING_OF_STATE(STATE_INPROGRESS)
+ GET_STRING_OF_STATE(STATE_CLOSED)
+ default:
+ ASSERT(false);
+ break;
+ }
+ return result;
+}
+
+#define GET_STRING_OF_ERROR_CODE(err) \
+ case webrtc::WebRtcSession::err: \
+ result = #err; \
+ break;
+
+static std::string GetErrorCodeString(webrtc::WebRtcSession::Error err) {
+ std::string result;
+ switch (err) {
+ GET_STRING_OF_ERROR_CODE(ERROR_NONE)
+ GET_STRING_OF_ERROR_CODE(ERROR_CONTENT)
+ GET_STRING_OF_ERROR_CODE(ERROR_TRANSPORT)
+ default:
+ RTC_DCHECK(false);
+ break;
+ }
+ return result;
+}
+
+static std::string MakeErrorString(const std::string& error,
+ const std::string& desc) {
+ std::ostringstream ret;
+ ret << error << " " << desc;
+ return ret.str();
+}
+
+static std::string MakeTdErrorString(const std::string& desc) {
+ return MakeErrorString(kPushDownTDFailed, desc);
+}
+
+// Set |option| to the highest-priority value of |key| in the optional
+// constraints if the key is found and has a valid value.
+template<typename T>
+static void SetOptionFromOptionalConstraint(
+ const MediaConstraintsInterface* constraints,
+ const std::string& key, cricket::Settable<T>* option) {
+ if (!constraints) {
+ return;
+ }
+ std::string string_value;
+ T value;
+ if (constraints->GetOptional().FindFirst(key, &string_value)) {
+ if (rtc::FromString(string_value, &value)) {
+ option->Set(value);
+ }
+ }
+}
+
+uint32_t ConvertIceTransportTypeToCandidateFilter(
+ PeerConnectionInterface::IceTransportsType type) {
+ switch (type) {
+ case PeerConnectionInterface::kNone:
+ return cricket::CF_NONE;
+ case PeerConnectionInterface::kRelay:
+ return cricket::CF_RELAY;
+ case PeerConnectionInterface::kNoHost:
+ return (cricket::CF_ALL & ~cricket::CF_HOST);
+ case PeerConnectionInterface::kAll:
+ return cricket::CF_ALL;
+ default: ASSERT(false);
+ }
+ return cricket::CF_NONE;
+}
+
+// Help class used to remember if a a remote peer has requested ice restart by
+// by sending a description with new ice ufrag and password.
+class IceRestartAnswerLatch {
+ public:
+ IceRestartAnswerLatch() : ice_restart_(false) { }
+
+ // Returns true if CheckForRemoteIceRestart has been called with a new session
+ // description where ice password and ufrag has changed since last time
+ // Reset() was called.
+ bool Get() const {
+ return ice_restart_;
+ }
+
+ void Reset() {
+ if (ice_restart_) {
+ ice_restart_ = false;
+ }
+ }
+
+ bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc,
+ const SessionDescriptionInterface* new_desc) {
+ if (!old_desc || new_desc->type() != SessionDescriptionInterface::kOffer) {
+ return false;
+ }
+ const SessionDescription* new_sd = new_desc->description();
+ const SessionDescription* old_sd = old_desc->description();
+ const ContentInfos& contents = new_sd->contents();
+ for (size_t index = 0; index < contents.size(); ++index) {
+ const ContentInfo* cinfo = &contents[index];
+ if (cinfo->rejected) {
+ continue;
+ }
+ // If the content isn't rejected, check if ufrag and password has
+ // changed.
+ const cricket::TransportDescription* new_transport_desc =
+ new_sd->GetTransportDescriptionByName(cinfo->name);
+ const cricket::TransportDescription* old_transport_desc =
+ old_sd->GetTransportDescriptionByName(cinfo->name);
+ if (!new_transport_desc || !old_transport_desc) {
+ // No transport description exist. This is not an ice restart.
+ continue;
+ }
+ if (cricket::IceCredentialsChanged(old_transport_desc->ice_ufrag,
+ old_transport_desc->ice_pwd,
+ new_transport_desc->ice_ufrag,
+ new_transport_desc->ice_pwd)) {
+ LOG(LS_INFO) << "Remote peer request ice restart.";
+ ice_restart_ = true;
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private:
+ bool ice_restart_;
+};
+
+WebRtcSession::WebRtcSession(webrtc::MediaControllerInterface* media_controller,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ cricket::PortAllocator* port_allocator)
+ : signaling_thread_(signaling_thread),
+ worker_thread_(worker_thread),
+ port_allocator_(port_allocator),
+ // RFC 3264: The numeric value of the session id and version in the
+ // o line MUST be representable with a "64 bit signed integer".
+ // Due to this constraint session id |sid_| is max limited to LLONG_MAX.
+ sid_(rtc::ToString(rtc::CreateRandomId64() & LLONG_MAX)),
+ transport_controller_(new cricket::TransportController(signaling_thread,
+ worker_thread,
+ port_allocator)),
+ media_controller_(media_controller),
+ channel_manager_(media_controller_->channel_manager()),
+ ice_observer_(NULL),
+ ice_connection_state_(PeerConnectionInterface::kIceConnectionNew),
+ ice_connection_receiving_(true),
+ older_version_remote_peer_(false),
+ dtls_enabled_(false),
+ data_channel_type_(cricket::DCT_NONE),
+ ice_restart_latch_(new IceRestartAnswerLatch),
+ metrics_observer_(NULL) {
+ transport_controller_->SetIceRole(cricket::ICEROLE_CONTROLLED);
+ transport_controller_->SignalConnectionState.connect(
+ this, &WebRtcSession::OnTransportControllerConnectionState);
+ transport_controller_->SignalReceiving.connect(
+ this, &WebRtcSession::OnTransportControllerReceiving);
+ transport_controller_->SignalGatheringState.connect(
+ this, &WebRtcSession::OnTransportControllerGatheringState);
+ transport_controller_->SignalCandidatesGathered.connect(
+ this, &WebRtcSession::OnTransportControllerCandidatesGathered);
+}
+
+WebRtcSession::~WebRtcSession() {
+ ASSERT(signaling_thread()->IsCurrent());
+ // Destroy video_channel_ first since it may have a pointer to the
+ // voice_channel_.
+ if (video_channel_) {
+ SignalVideoChannelDestroyed();
+ channel_manager_->DestroyVideoChannel(video_channel_.release());
+ }
+ if (voice_channel_) {
+ SignalVoiceChannelDestroyed();
+ channel_manager_->DestroyVoiceChannel(voice_channel_.release());
+ }
+ if (data_channel_) {
+ SignalDataChannelDestroyed();
+ channel_manager_->DestroyDataChannel(data_channel_.release());
+ }
+
+ LOG(LS_INFO) << "Session: " << id() << " is destroyed.";
+}
+
+bool WebRtcSession::Initialize(
+ const PeerConnectionFactoryInterface::Options& options,
+ const MediaConstraintsInterface* constraints,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
+ bundle_policy_ = rtc_configuration.bundle_policy;
+ rtcp_mux_policy_ = rtc_configuration.rtcp_mux_policy;
+ transport_controller_->SetSslMaxProtocolVersion(options.ssl_max_version);
+
+ // Obtain a certificate from RTCConfiguration if any were provided (optional).
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate;
+ if (!rtc_configuration.certificates.empty()) {
+ // TODO(hbos,torbjorng): Decide on certificate-selection strategy instead of
+ // just picking the first one. The decision should be made based on the DTLS
+ // handshake. The DTLS negotiations need to know about all certificates.
+ certificate = rtc_configuration.certificates[0];
+ }
+
+ SetIceConfig(ParseIceConfig(rtc_configuration));
+
+ // TODO(perkj): Take |constraints| into consideration. Return false if not all
+ // mandatory constraints can be fulfilled. Note that |constraints|
+ // can be null.
+ bool value;
+
+ if (options.disable_encryption) {
+ dtls_enabled_ = false;
+ } else {
+ // Enable DTLS by default if we have an identity store or a certificate.
+ dtls_enabled_ = (dtls_identity_store || certificate);
+ // |constraints| can override the default |dtls_enabled_| value.
+ if (FindConstraint(constraints, MediaConstraintsInterface::kEnableDtlsSrtp,
+ &value, nullptr)) {
+ dtls_enabled_ = value;
+ }
+ }
+
+ // Enable creation of RTP data channels if the kEnableRtpDataChannels is set.
+ // It takes precendence over the disable_sctp_data_channels
+ // PeerConnectionFactoryInterface::Options.
+ if (FindConstraint(
+ constraints, MediaConstraintsInterface::kEnableRtpDataChannels,
+ &value, NULL) && value) {
+ LOG(LS_INFO) << "Allowing RTP data engine.";
+ data_channel_type_ = cricket::DCT_RTP;
+ } else {
+ // DTLS has to be enabled to use SCTP.
+ if (!options.disable_sctp_data_channels && dtls_enabled_) {
+ LOG(LS_INFO) << "Allowing SCTP data engine.";
+ data_channel_type_ = cricket::DCT_SCTP;
+ }
+ }
+
+ // Find DSCP constraint.
+ if (FindConstraint(
+ constraints,
+ MediaConstraintsInterface::kEnableDscp,
+ &value, NULL)) {
+ audio_options_.dscp.Set(value);
+ video_options_.dscp.Set(value);
+ }
+
+ // Find Suspend Below Min Bitrate constraint.
+ if (FindConstraint(
+ constraints,
+ MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate,
+ &value,
+ NULL)) {
+ video_options_.suspend_below_min_bitrate.Set(value);
+ }
+
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kScreencastMinBitrate,
+ &video_options_.screencast_min_bitrate);
+
+ // Find constraints for cpu overuse detection.
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kCpuUnderuseThreshold,
+ &video_options_.cpu_underuse_threshold);
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kCpuOveruseThreshold,
+ &video_options_.cpu_overuse_threshold);
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kCpuOveruseDetection,
+ &video_options_.cpu_overuse_detection);
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kCpuOveruseEncodeUsage,
+ &video_options_.cpu_overuse_encode_usage);
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kCpuUnderuseEncodeRsdThreshold,
+ &video_options_.cpu_underuse_encode_rsd_threshold);
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kCpuOveruseEncodeRsdThreshold,
+ &video_options_.cpu_overuse_encode_rsd_threshold);
+
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kNumUnsignalledRecvStreams,
+ &video_options_.unsignalled_recv_stream_limit);
+ if (video_options_.unsignalled_recv_stream_limit.IsSet()) {
+ int stream_limit;
+ video_options_.unsignalled_recv_stream_limit.Get(&stream_limit);
+ stream_limit = std::min(kMaxUnsignalledRecvStreams, stream_limit);
+ stream_limit = std::max(0, stream_limit);
+ video_options_.unsignalled_recv_stream_limit.Set(stream_limit);
+ }
+
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kHighStartBitrate,
+ &video_options_.video_start_bitrate);
+
+ SetOptionFromOptionalConstraint(constraints,
+ MediaConstraintsInterface::kCombinedAudioVideoBwe,
+ &audio_options_.combined_audio_video_bwe);
+
+ audio_options_.audio_jitter_buffer_max_packets.Set(
+ rtc_configuration.audio_jitter_buffer_max_packets);
+
+ audio_options_.audio_jitter_buffer_fast_accelerate.Set(
+ rtc_configuration.audio_jitter_buffer_fast_accelerate);
+
+ const cricket::VideoCodec default_codec(
+ JsepSessionDescription::kDefaultVideoCodecId,
+ JsepSessionDescription::kDefaultVideoCodecName,
+ JsepSessionDescription::kMaxVideoCodecWidth,
+ JsepSessionDescription::kMaxVideoCodecHeight,
+ JsepSessionDescription::kDefaultVideoCodecFramerate,
+ JsepSessionDescription::kDefaultVideoCodecPreference);
+ channel_manager_->SetDefaultVideoEncoderConfig(
+ cricket::VideoEncoderConfig(default_codec));
+
+ if (!dtls_enabled_) {
+ // Construct with DTLS disabled.
+ webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
+ signaling_thread(), channel_manager_, this, id()));
+ } else {
+ // Construct with DTLS enabled.
+ if (!certificate) {
+ // Use the |dtls_identity_store| to generate a certificate.
+ RTC_DCHECK(dtls_identity_store);
+ webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
+ signaling_thread(), channel_manager_, dtls_identity_store.Pass(),
+ this, id()));
+ } else {
+ // Use the already generated certificate.
+ webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
+ signaling_thread(), channel_manager_, certificate, this, id()));
+ }
+ }
+
+ webrtc_session_desc_factory_->SignalCertificateReady.connect(
+ this, &WebRtcSession::OnCertificateReady);
+
+ if (options.disable_encryption) {
+ webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED);
+ }
+ port_allocator()->set_candidate_filter(
+ ConvertIceTransportTypeToCandidateFilter(rtc_configuration.type));
+
+ if (rtc_configuration.enable_localhost_ice_candidate) {
+ port_allocator()->set_flags(
+ port_allocator()->flags() |
+ cricket::PORTALLOCATOR_ENABLE_LOCALHOST_CANDIDATE);
+ }
+
+ return true;
+}
+
+void WebRtcSession::Close() {
+ SetState(STATE_CLOSED);
+ RemoveUnusedChannels(nullptr);
+ ASSERT(!voice_channel_);
+ ASSERT(!video_channel_);
+ ASSERT(!data_channel_);
+}
+
+void WebRtcSession::SetSdesPolicy(cricket::SecurePolicy secure_policy) {
+ webrtc_session_desc_factory_->SetSdesPolicy(secure_policy);
+}
+
+cricket::SecurePolicy WebRtcSession::SdesPolicy() const {
+ return webrtc_session_desc_factory_->SdesPolicy();
+}
+
+bool WebRtcSession::GetSslRole(rtc::SSLRole* role) {
+ if (!local_desc_ || !remote_desc_) {
+ LOG(LS_INFO) << "Local and Remote descriptions must be applied to get "
+ << "SSL Role of the session.";
+ return false;
+ }
+
+ return transport_controller_->GetSslRole(role);
+}
+
+void WebRtcSession::CreateOffer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ const cricket::MediaSessionOptions& session_options) {
+ webrtc_session_desc_factory_->CreateOffer(observer, options, session_options);
+}
+
+void WebRtcSession::CreateAnswer(
+ CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints,
+ const cricket::MediaSessionOptions& session_options) {
+ webrtc_session_desc_factory_->CreateAnswer(observer, constraints,
+ session_options);
+}
+
+bool WebRtcSession::SetLocalDescription(SessionDescriptionInterface* desc,
+ std::string* err_desc) {
+ ASSERT(signaling_thread()->IsCurrent());
+
+ // Takes the ownership of |desc| regardless of the result.
+ rtc::scoped_ptr<SessionDescriptionInterface> desc_temp(desc);
+
+ // Validate SDP.
+ if (!ValidateSessionDescription(desc, cricket::CS_LOCAL, err_desc)) {
+ return false;
+ }
+
+ // Update the initial_offerer flag if this session is the initial_offerer.
+ Action action = GetAction(desc->type());
+ if (state() == STATE_INIT && action == kOffer) {
+ initial_offerer_ = true;
+ transport_controller_->SetIceRole(cricket::ICEROLE_CONTROLLING);
+ }
+
+ cricket::SecurePolicy sdes_policy =
+ webrtc_session_desc_factory_->SdesPolicy();
+ cricket::CryptoType crypto_required = dtls_enabled_ ?
+ cricket::CT_DTLS : (sdes_policy == cricket::SEC_REQUIRED ?
+ cricket::CT_SDES : cricket::CT_NONE);
+ // Update the MediaContentDescription crypto settings as per the policy set.
+ UpdateSessionDescriptionSecurePolicy(crypto_required, desc->description());
+
+ local_desc_.reset(desc_temp.release());
+
+ // Transport and Media channels will be created only when offer is set.
+ if (action == kOffer && !CreateChannels(local_desc_->description())) {
+ // TODO(mallinath) - Handle CreateChannel failure, as new local description
+ // is applied. Restore back to old description.
+ return BadLocalSdp(desc->type(), kCreateChannelFailed, err_desc);
+ }
+
+ // Remove unused channels if MediaContentDescription is rejected.
+ RemoveUnusedChannels(local_desc_->description());
+
+ if (!UpdateSessionState(action, cricket::CS_LOCAL, err_desc)) {
+ return false;
+ }
+
+ if (remote_desc_) {
+ // Now that we have a local description, we can push down remote candidates.
+ UseCandidatesInSessionDescription(remote_desc_.get());
+ }
+
+ if (error() != ERROR_NONE) {
+ return BadLocalSdp(desc->type(), GetSessionErrorMsg(), err_desc);
+ }
+ return true;
+}
+
+bool WebRtcSession::SetRemoteDescription(SessionDescriptionInterface* desc,
+ std::string* err_desc) {
+ ASSERT(signaling_thread()->IsCurrent());
+
+ // Takes the ownership of |desc| regardless of the result.
+ rtc::scoped_ptr<SessionDescriptionInterface> desc_temp(desc);
+
+ // Validate SDP.
+ if (!ValidateSessionDescription(desc, cricket::CS_REMOTE, err_desc)) {
+ return false;
+ }
+
+ rtc::scoped_ptr<SessionDescriptionInterface> old_remote_desc(
+ remote_desc_.release());
+ remote_desc_.reset(desc_temp.release());
+
+ // Transport and Media channels will be created only when offer is set.
+ Action action = GetAction(desc->type());
+ if (action == kOffer && !CreateChannels(desc->description())) {
+ // TODO(mallinath) - Handle CreateChannel failure, as new local description
+ // is applied. Restore back to old description.
+ return BadRemoteSdp(desc->type(), kCreateChannelFailed, err_desc);
+ }
+
+ // Remove unused channels if MediaContentDescription is rejected.
+ RemoveUnusedChannels(desc->description());
+
+ // NOTE: Candidates allocation will be initiated only when SetLocalDescription
+ // is called.
+ if (!UpdateSessionState(action, cricket::CS_REMOTE, err_desc)) {
+ return false;
+ }
+
+ if (local_desc_ && !UseCandidatesInSessionDescription(desc)) {
+ return BadRemoteSdp(desc->type(), kInvalidCandidates, err_desc);
+ }
+
+ // Check if this new SessionDescription contains new ice ufrag and password
+ // that indicates the remote peer requests ice restart.
+ bool ice_restart =
+ ice_restart_latch_->CheckForRemoteIceRestart(old_remote_desc.get(), desc);
+ // We retain all received candidates only if ICE is not restarted.
+ // When ICE is restarted, all previous candidates belong to an old generation
+ // and should not be kept.
+ // TODO(deadbeef): This goes against the W3C spec which says the remote
+ // description should only contain candidates from the last set remote
+ // description plus any candidates added since then. We should remove this
+ // once we're sure it won't break anything.
+ if (!ice_restart) {
+ WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription(
+ old_remote_desc.get(), desc);
+ }
+
+ if (error() != ERROR_NONE) {
+ return BadRemoteSdp(desc->type(), GetSessionErrorMsg(), err_desc);
+ }
+
+ // Set the the ICE connection state to connecting since the connection may
+ // become writable with peer reflexive candidates before any remote candidate
+ // is signaled.
+ // TODO(pthatcher): This is a short-term solution for crbug/446908. A real fix
+ // is to have a new signal the indicates a change in checking state from the
+ // transport and expose a new checking() member from transport that can be
+ // read to determine the current checking state. The existing SignalConnecting
+ // actually means "gathering candidates", so cannot be be used here.
+ if (desc->type() != SessionDescriptionInterface::kOffer &&
+ ice_connection_state_ == PeerConnectionInterface::kIceConnectionNew) {
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking);
+ }
+ return true;
+}
+
+void WebRtcSession::LogState(State old_state, State new_state) {
+ LOG(LS_INFO) << "Session:" << id()
+ << " Old state:" << GetStateString(old_state)
+ << " New state:" << GetStateString(new_state);
+}
+
+void WebRtcSession::SetState(State state) {
+ ASSERT(signaling_thread_->IsCurrent());
+ if (state != state_) {
+ LogState(state_, state);
+ state_ = state;
+ SignalState(this, state_);
+ }
+}
+
+void WebRtcSession::SetError(Error error, const std::string& error_desc) {
+ ASSERT(signaling_thread_->IsCurrent());
+ if (error != error_) {
+ error_ = error;
+ error_desc_ = error_desc;
+ }
+}
+
+bool WebRtcSession::UpdateSessionState(
+ Action action, cricket::ContentSource source,
+ std::string* err_desc) {
+ ASSERT(signaling_thread()->IsCurrent());
+
+ // If there's already a pending error then no state transition should happen.
+ // But all call-sites should be verifying this before calling us!
+ ASSERT(error() == ERROR_NONE);
+ std::string td_err;
+ if (action == kOffer) {
+ if (!PushdownTransportDescription(source, cricket::CA_OFFER, &td_err)) {
+ return BadOfferSdp(source, MakeTdErrorString(td_err), err_desc);
+ }
+ SetState(source == cricket::CS_LOCAL ? STATE_SENTOFFER
+ : STATE_RECEIVEDOFFER);
+ if (!PushdownMediaDescription(cricket::CA_OFFER, source, err_desc)) {
+ SetError(ERROR_CONTENT, *err_desc);
+ }
+ if (error() != ERROR_NONE) {
+ return BadOfferSdp(source, GetSessionErrorMsg(), err_desc);
+ }
+ } else if (action == kPrAnswer) {
+ if (!PushdownTransportDescription(source, cricket::CA_PRANSWER, &td_err)) {
+ return BadPranswerSdp(source, MakeTdErrorString(td_err), err_desc);
+ }
+ EnableChannels();
+ SetState(source == cricket::CS_LOCAL ? STATE_SENTPRANSWER
+ : STATE_RECEIVEDPRANSWER);
+ if (!PushdownMediaDescription(cricket::CA_PRANSWER, source, err_desc)) {
+ SetError(ERROR_CONTENT, *err_desc);
+ }
+ if (error() != ERROR_NONE) {
+ return BadPranswerSdp(source, GetSessionErrorMsg(), err_desc);
+ }
+ } else if (action == kAnswer) {
+ if (!PushdownTransportDescription(source, cricket::CA_ANSWER, &td_err)) {
+ return BadAnswerSdp(source, MakeTdErrorString(td_err), err_desc);
+ }
+ const cricket::ContentGroup* local_bundle =
+ local_desc_->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ const cricket::ContentGroup* remote_bundle =
+ remote_desc_->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ if (local_bundle && remote_bundle) {
+ // The answerer decides the transport to bundle on
+ const cricket::ContentGroup* answer_bundle =
+ (source == cricket::CS_LOCAL ? local_bundle : remote_bundle);
+ if (!EnableBundle(*answer_bundle)) {
+ LOG(LS_WARNING) << "Failed to enable BUNDLE.";
+ return BadAnswerSdp(source, kEnableBundleFailed, err_desc);
+ }
+ }
+ EnableChannels();
+ SetState(STATE_INPROGRESS);
+ if (!PushdownMediaDescription(cricket::CA_ANSWER, source, err_desc)) {
+ SetError(ERROR_CONTENT, *err_desc);
+ }
+ if (error() != ERROR_NONE) {
+ return BadAnswerSdp(source, GetSessionErrorMsg(), err_desc);
+ }
+ }
+ return true;
+}
+
+WebRtcSession::Action WebRtcSession::GetAction(const std::string& type) {
+ if (type == SessionDescriptionInterface::kOffer) {
+ return WebRtcSession::kOffer;
+ } else if (type == SessionDescriptionInterface::kPrAnswer) {
+ return WebRtcSession::kPrAnswer;
+ } else if (type == SessionDescriptionInterface::kAnswer) {
+ return WebRtcSession::kAnswer;
+ }
+ ASSERT(false && "unknown action type");
+ return WebRtcSession::kOffer;
+}
+
+bool WebRtcSession::PushdownMediaDescription(
+ cricket::ContentAction action,
+ cricket::ContentSource source,
+ std::string* err) {
+ auto set_content = [this, action, source, err](cricket::BaseChannel* ch) {
+ if (!ch) {
+ return true;
+ } else if (source == cricket::CS_LOCAL) {
+ return ch->PushdownLocalDescription(local_desc_->description(), action,
+ err);
+ } else {
+ return ch->PushdownRemoteDescription(remote_desc_->description(), action,
+ err);
+ }
+ };
+
+ return (set_content(voice_channel()) &&
+ set_content(video_channel()) &&
+ set_content(data_channel()));
+}
+
+bool WebRtcSession::PushdownTransportDescription(cricket::ContentSource source,
+ cricket::ContentAction action,
+ std::string* error_desc) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+
+ if (source == cricket::CS_LOCAL) {
+ return PushdownLocalTransportDescription(local_desc_->description(), action,
+ error_desc);
+ }
+ return PushdownRemoteTransportDescription(remote_desc_->description(), action,
+ error_desc);
+}
+
+bool WebRtcSession::PushdownLocalTransportDescription(
+ const SessionDescription* sdesc,
+ cricket::ContentAction action,
+ std::string* err) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+
+ if (!sdesc) {
+ return false;
+ }
+
+ for (const TransportInfo& tinfo : sdesc->transport_infos()) {
+ if (!transport_controller_->SetLocalTransportDescription(
+ tinfo.content_name, tinfo.description, action, err)) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool WebRtcSession::PushdownRemoteTransportDescription(
+ const SessionDescription* sdesc,
+ cricket::ContentAction action,
+ std::string* err) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+
+ if (!sdesc) {
+ return false;
+ }
+
+ for (const TransportInfo& tinfo : sdesc->transport_infos()) {
+ if (!transport_controller_->SetRemoteTransportDescription(
+ tinfo.content_name, tinfo.description, action, err)) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool WebRtcSession::GetTransportDescription(
+ const SessionDescription* description,
+ const std::string& content_name,
+ cricket::TransportDescription* tdesc) {
+ if (!description || !tdesc) {
+ return false;
+ }
+ const TransportInfo* transport_info =
+ description->GetTransportInfoByName(content_name);
+ if (!transport_info) {
+ return false;
+ }
+ *tdesc = transport_info->description;
+ return true;
+}
+
+bool WebRtcSession::GetTransportStats(SessionStats* stats) {
+ ASSERT(signaling_thread()->IsCurrent());
+ return (GetChannelTransportStats(voice_channel(), stats) &&
+ GetChannelTransportStats(video_channel(), stats) &&
+ GetChannelTransportStats(data_channel(), stats));
+}
+
+bool WebRtcSession::GetChannelTransportStats(cricket::BaseChannel* ch,
+ SessionStats* stats) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!ch) {
+ // Not using this channel.
+ return true;
+ }
+
+ const std::string& content_name = ch->content_name();
+ const std::string& transport_name = ch->transport_name();
+ stats->proxy_to_transport[content_name] = transport_name;
+ if (stats->transport_stats.find(transport_name) !=
+ stats->transport_stats.end()) {
+ // Transport stats already done for this transport.
+ return true;
+ }
+
+ cricket::TransportStats tstats;
+ if (!transport_controller_->GetStats(transport_name, &tstats)) {
+ return false;
+ }
+
+ stats->transport_stats[transport_name] = tstats;
+ return true;
+}
+
+bool WebRtcSession::GetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) {
+ ASSERT(signaling_thread()->IsCurrent());
+ return transport_controller_->GetLocalCertificate(transport_name,
+ certificate);
+}
+
+bool WebRtcSession::GetRemoteSSLCertificate(const std::string& transport_name,
+ rtc::SSLCertificate** cert) {
+ ASSERT(signaling_thread()->IsCurrent());
+ return transport_controller_->GetRemoteSSLCertificate(transport_name, cert);
+}
+
+cricket::BaseChannel* WebRtcSession::GetChannel(
+ const std::string& content_name) {
+ if (voice_channel() && voice_channel()->content_name() == content_name) {
+ return voice_channel();
+ }
+ if (video_channel() && video_channel()->content_name() == content_name) {
+ return video_channel();
+ }
+ if (data_channel() && data_channel()->content_name() == content_name) {
+ return data_channel();
+ }
+ return nullptr;
+}
+
+bool WebRtcSession::EnableBundle(const cricket::ContentGroup& bundle) {
+ const std::string* first_content_name = bundle.FirstContentName();
+ if (!first_content_name) {
+ LOG(LS_WARNING) << "Tried to BUNDLE with no contents.";
+ return false;
+ }
+ const std::string& transport_name = *first_content_name;
+ cricket::BaseChannel* first_channel = GetChannel(transport_name);
+
+ auto maybe_set_transport = [this, bundle, transport_name,
+ first_channel](cricket::BaseChannel* ch) {
+ if (!ch || !bundle.HasContentName(ch->content_name())) {
+ return true;
+ }
+
+ if (ch->transport_name() == transport_name) {
+ LOG(LS_INFO) << "BUNDLE already enabled for " << ch->content_name()
+ << " on " << transport_name << ".";
+ return true;
+ }
+
+ if (!ch->SetTransport(transport_name)) {
+ LOG(LS_WARNING) << "Failed to enable BUNDLE for " << ch->content_name();
+ return false;
+ }
+ LOG(LS_INFO) << "Enabled BUNDLE for " << ch->content_name() << " on "
+ << transport_name << ".";
+ return true;
+ };
+
+ if (!maybe_set_transport(voice_channel()) ||
+ !maybe_set_transport(video_channel()) ||
+ !maybe_set_transport(data_channel())) {
+ return false;
+ }
+
+ return true;
+}
+
+bool WebRtcSession::ProcessIceMessage(const IceCandidateInterface* candidate) {
+ if (!remote_desc_) {
+ LOG(LS_ERROR) << "ProcessIceMessage: ICE candidates can't be added "
+ << "without any remote session description.";
+ return false;
+ }
+
+ if (!candidate) {
+ LOG(LS_ERROR) << "ProcessIceMessage: Candidate is NULL.";
+ return false;
+ }
+
+ bool valid = false;
+ bool ready = ReadyToUseRemoteCandidate(candidate, NULL, &valid);
+ if (!valid) {
+ return false;
+ }
+
+ // Add this candidate to the remote session description.
+ if (!remote_desc_->AddCandidate(candidate)) {
+ LOG(LS_ERROR) << "ProcessIceMessage: Candidate cannot be used.";
+ return false;
+ }
+
+ if (ready) {
+ return UseCandidate(candidate);
+ } else {
+ LOG(LS_INFO) << "ProcessIceMessage: Not ready to use candidate.";
+ return true;
+ }
+}
+
+bool WebRtcSession::SetIceTransports(
+ PeerConnectionInterface::IceTransportsType type) {
+ return port_allocator()->set_candidate_filter(
+ ConvertIceTransportTypeToCandidateFilter(type));
+}
+
+cricket::IceConfig WebRtcSession::ParseIceConfig(
+ const PeerConnectionInterface::RTCConfiguration& config) const {
+ cricket::IceConfig ice_config;
+ ice_config.receiving_timeout_ms = config.ice_connection_receiving_timeout;
+ ice_config.gather_continually = (config.continual_gathering_policy ==
+ PeerConnectionInterface::GATHER_CONTINUALLY);
+ return ice_config;
+}
+
+void WebRtcSession::SetIceConfig(const cricket::IceConfig& config) {
+ transport_controller_->SetIceConfig(config);
+}
+
+void WebRtcSession::MaybeStartGathering() {
+ transport_controller_->MaybeStartGathering();
+}
+
+bool WebRtcSession::GetLocalTrackIdBySsrc(uint32_t ssrc,
+ std::string* track_id) {
+ if (!local_desc_) {
+ return false;
+ }
+ return webrtc::GetTrackIdBySsrc(local_desc_->description(), ssrc, track_id);
+}
+
+bool WebRtcSession::GetRemoteTrackIdBySsrc(uint32_t ssrc,
+ std::string* track_id) {
+ if (!remote_desc_) {
+ return false;
+ }
+ return webrtc::GetTrackIdBySsrc(remote_desc_->description(), ssrc, track_id);
+}
+
+std::string WebRtcSession::BadStateErrMsg(State state) {
+ std::ostringstream desc;
+ desc << "Called in wrong state: " << GetStateString(state);
+ return desc.str();
+}
+
+void WebRtcSession::SetAudioPlayout(uint32_t ssrc, bool enable) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!voice_channel_) {
+ LOG(LS_ERROR) << "SetAudioPlayout: No audio channel exists.";
+ return;
+ }
+ if (!voice_channel_->SetOutputVolume(ssrc, enable ? 1 : 0)) {
+ // Allow that SetOutputVolume fail if |enable| is false but assert
+ // otherwise. This in the normal case when the underlying media channel has
+ // already been deleted.
+ ASSERT(enable == false);
+ }
+}
+
+void WebRtcSession::SetAudioSend(uint32_t ssrc,
+ bool enable,
+ const cricket::AudioOptions& options,
+ cricket::AudioRenderer* renderer) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!voice_channel_) {
+ LOG(LS_ERROR) << "SetAudioSend: No audio channel exists.";
+ return;
+ }
+ if (!voice_channel_->SetAudioSend(ssrc, enable, &options, renderer)) {
+ LOG(LS_ERROR) << "SetAudioSend: ssrc is incorrect: " << ssrc;
+ }
+}
+
+void WebRtcSession::SetAudioPlayoutVolume(uint32_t ssrc, double volume) {
+ ASSERT(signaling_thread()->IsCurrent());
+ ASSERT(volume >= 0 && volume <= 10);
+ if (!voice_channel_) {
+ LOG(LS_ERROR) << "SetAudioPlayoutVolume: No audio channel exists.";
+ return;
+ }
+
+ if (!voice_channel_->SetOutputVolume(ssrc, volume)) {
+ ASSERT(false);
+ }
+}
+
+bool WebRtcSession::SetCaptureDevice(uint32_t ssrc,
+ cricket::VideoCapturer* camera) {
+ ASSERT(signaling_thread()->IsCurrent());
+
+ if (!video_channel_) {
+ // |video_channel_| doesnt't exist. Probably because the remote end doesnt't
+ // support video.
+ LOG(LS_WARNING) << "Video not used in this call.";
+ return false;
+ }
+ if (!video_channel_->SetCapturer(ssrc, camera)) {
+ // Allow that SetCapturer fail if |camera| is NULL but assert otherwise.
+ // This in the normal case when the underlying media channel has already
+ // been deleted.
+ ASSERT(camera == NULL);
+ return false;
+ }
+ return true;
+}
+
+void WebRtcSession::SetVideoPlayout(uint32_t ssrc,
+ bool enable,
+ cricket::VideoRenderer* renderer) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!video_channel_) {
+ LOG(LS_WARNING) << "SetVideoPlayout: No video channel exists.";
+ return;
+ }
+ if (!video_channel_->SetRenderer(ssrc, enable ? renderer : NULL)) {
+ // Allow that SetRenderer fail if |renderer| is NULL but assert otherwise.
+ // This in the normal case when the underlying media channel has already
+ // been deleted.
+ ASSERT(renderer == NULL);
+ }
+}
+
+void WebRtcSession::SetVideoSend(uint32_t ssrc,
+ bool enable,
+ const cricket::VideoOptions* options) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!video_channel_) {
+ LOG(LS_WARNING) << "SetVideoSend: No video channel exists.";
+ return;
+ }
+ if (!video_channel_->SetVideoSend(ssrc, enable, options)) {
+ // Allow that MuteStream fail if |enable| is false but assert otherwise.
+ // This in the normal case when the underlying media channel has already
+ // been deleted.
+ ASSERT(enable == false);
+ }
+}
+
+bool WebRtcSession::CanInsertDtmf(const std::string& track_id) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!voice_channel_) {
+ LOG(LS_ERROR) << "CanInsertDtmf: No audio channel exists.";
+ return false;
+ }
+ uint32_t send_ssrc = 0;
+ // The Dtmf is negotiated per channel not ssrc, so we only check if the ssrc
+ // exists.
+ if (!local_desc_ ||
+ !GetAudioSsrcByTrackId(local_desc_->description(), track_id,
+ &send_ssrc)) {
+ LOG(LS_ERROR) << "CanInsertDtmf: Track does not exist: " << track_id;
+ return false;
+ }
+ return voice_channel_->CanInsertDtmf();
+}
+
+bool WebRtcSession::InsertDtmf(const std::string& track_id,
+ int code, int duration) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!voice_channel_) {
+ LOG(LS_ERROR) << "InsertDtmf: No audio channel exists.";
+ return false;
+ }
+ uint32_t send_ssrc = 0;
+ if (!VERIFY(local_desc_ && GetAudioSsrcByTrackId(local_desc_->description(),
+ track_id, &send_ssrc))) {
+ LOG(LS_ERROR) << "InsertDtmf: Track does not exist: " << track_id;
+ return false;
+ }
+ if (!voice_channel_->InsertDtmf(send_ssrc, code, duration,
+ cricket::DF_SEND)) {
+ LOG(LS_ERROR) << "Failed to insert DTMF to channel.";
+ return false;
+ }
+ return true;
+}
+
+sigslot::signal0<>* WebRtcSession::GetOnDestroyedSignal() {
+ return &SignalVoiceChannelDestroyed;
+}
+
+bool WebRtcSession::SendData(const cricket::SendDataParams& params,
+ const rtc::Buffer& payload,
+ cricket::SendDataResult* result) {
+ if (!data_channel_) {
+ LOG(LS_ERROR) << "SendData called when data_channel_ is NULL.";
+ return false;
+ }
+ return data_channel_->SendData(params, payload, result);
+}
+
+bool WebRtcSession::ConnectDataChannel(DataChannel* webrtc_data_channel) {
+ if (!data_channel_) {
+ LOG(LS_ERROR) << "ConnectDataChannel called when data_channel_ is NULL.";
+ return false;
+ }
+ data_channel_->SignalReadyToSendData.connect(webrtc_data_channel,
+ &DataChannel::OnChannelReady);
+ data_channel_->SignalDataReceived.connect(webrtc_data_channel,
+ &DataChannel::OnDataReceived);
+ data_channel_->SignalStreamClosedRemotely.connect(
+ webrtc_data_channel, &DataChannel::OnStreamClosedRemotely);
+ return true;
+}
+
+void WebRtcSession::DisconnectDataChannel(DataChannel* webrtc_data_channel) {
+ if (!data_channel_) {
+ LOG(LS_ERROR) << "DisconnectDataChannel called when data_channel_ is NULL.";
+ return;
+ }
+ data_channel_->SignalReadyToSendData.disconnect(webrtc_data_channel);
+ data_channel_->SignalDataReceived.disconnect(webrtc_data_channel);
+ data_channel_->SignalStreamClosedRemotely.disconnect(webrtc_data_channel);
+}
+
+void WebRtcSession::AddSctpDataStream(int sid) {
+ if (!data_channel_) {
+ LOG(LS_ERROR) << "AddDataChannelStreams called when data_channel_ is NULL.";
+ return;
+ }
+ data_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(sid));
+ data_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(sid));
+}
+
+void WebRtcSession::RemoveSctpDataStream(int sid) {
+ if (!data_channel_) {
+ LOG(LS_ERROR) << "RemoveDataChannelStreams called when data_channel_ is "
+ << "NULL.";
+ return;
+ }
+ data_channel_->RemoveRecvStream(sid);
+ data_channel_->RemoveSendStream(sid);
+}
+
+bool WebRtcSession::ReadyToSendData() const {
+ return data_channel_ && data_channel_->ready_to_send_data();
+}
+
+cricket::DataChannelType WebRtcSession::data_channel_type() const {
+ return data_channel_type_;
+}
+
+bool WebRtcSession::IceRestartPending() const {
+ return ice_restart_latch_->Get();
+}
+
+void WebRtcSession::ResetIceRestartLatch() {
+ ice_restart_latch_->Reset();
+}
+
+void WebRtcSession::OnCertificateReady(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
+ transport_controller_->SetLocalCertificate(certificate);
+}
+
+bool WebRtcSession::waiting_for_certificate_for_testing() const {
+ return webrtc_session_desc_factory_->waiting_for_certificate_for_testing();
+}
+
+const rtc::scoped_refptr<rtc::RTCCertificate>&
+WebRtcSession::certificate_for_testing() {
+ return transport_controller_->certificate_for_testing();
+}
+
+void WebRtcSession::SetIceConnectionState(
+ PeerConnectionInterface::IceConnectionState state) {
+ if (ice_connection_state_ == state) {
+ return;
+ }
+
+ // ASSERT that the requested transition is allowed. Note that
+ // WebRtcSession does not implement "kIceConnectionClosed" (that is handled
+ // within PeerConnection). This switch statement should compile away when
+ // ASSERTs are disabled.
+ LOG(LS_INFO) << "Changing IceConnectionState " << ice_connection_state_
+ << " => " << state;
+ switch (ice_connection_state_) {
+ case PeerConnectionInterface::kIceConnectionNew:
+ ASSERT(state == PeerConnectionInterface::kIceConnectionChecking);
+ break;
+ case PeerConnectionInterface::kIceConnectionChecking:
+ ASSERT(state == PeerConnectionInterface::kIceConnectionFailed ||
+ state == PeerConnectionInterface::kIceConnectionConnected);
+ break;
+ case PeerConnectionInterface::kIceConnectionConnected:
+ ASSERT(state == PeerConnectionInterface::kIceConnectionDisconnected ||
+ state == PeerConnectionInterface::kIceConnectionChecking ||
+ state == PeerConnectionInterface::kIceConnectionCompleted);
+ break;
+ case PeerConnectionInterface::kIceConnectionCompleted:
+ ASSERT(state == PeerConnectionInterface::kIceConnectionConnected ||
+ state == PeerConnectionInterface::kIceConnectionDisconnected);
+ break;
+ case PeerConnectionInterface::kIceConnectionFailed:
+ ASSERT(state == PeerConnectionInterface::kIceConnectionNew);
+ break;
+ case PeerConnectionInterface::kIceConnectionDisconnected:
+ ASSERT(state == PeerConnectionInterface::kIceConnectionChecking ||
+ state == PeerConnectionInterface::kIceConnectionConnected ||
+ state == PeerConnectionInterface::kIceConnectionCompleted ||
+ state == PeerConnectionInterface::kIceConnectionFailed);
+ break;
+ case PeerConnectionInterface::kIceConnectionClosed:
+ ASSERT(false);
+ break;
+ default:
+ ASSERT(false);
+ break;
+ }
+
+ ice_connection_state_ = state;
+ if (ice_observer_) {
+ ice_observer_->OnIceConnectionChange(ice_connection_state_);
+ }
+}
+
+void WebRtcSession::OnTransportControllerConnectionState(
+ cricket::IceConnectionState state) {
+ switch (state) {
+ case cricket::kIceConnectionConnecting:
+ // If the current state is Connected or Completed, then there were
+ // writable channels but now there are not, so the next state must
+ // be Disconnected.
+ // kIceConnectionConnecting is currently used as the default,
+ // un-connected state by the TransportController, so its only use is
+ // detecting disconnections.
+ if (ice_connection_state_ ==
+ PeerConnectionInterface::kIceConnectionConnected ||
+ ice_connection_state_ ==
+ PeerConnectionInterface::kIceConnectionCompleted) {
+ SetIceConnectionState(
+ PeerConnectionInterface::kIceConnectionDisconnected);
+ }
+ break;
+ case cricket::kIceConnectionFailed:
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionFailed);
+ break;
+ case cricket::kIceConnectionConnected:
+ LOG(LS_INFO) << "Changing to ICE connected state because "
+ << "all transports are writable.";
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected);
+ break;
+ case cricket::kIceConnectionCompleted:
+ LOG(LS_INFO) << "Changing to ICE completed state because "
+ << "all transports are complete.";
+ if (ice_connection_state_ !=
+ PeerConnectionInterface::kIceConnectionConnected) {
+ // If jumping directly from "checking" to "connected",
+ // signal "connected" first.
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected);
+ }
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionCompleted);
+ if (metrics_observer_) {
+ ReportTransportStats();
+ }
+ break;
+ default:
+ ASSERT(false);
+ }
+}
+
+void WebRtcSession::OnTransportControllerReceiving(bool receiving) {
+ SetIceConnectionReceiving(receiving);
+}
+
+void WebRtcSession::SetIceConnectionReceiving(bool receiving) {
+ if (ice_connection_receiving_ == receiving) {
+ return;
+ }
+ ice_connection_receiving_ = receiving;
+ if (ice_observer_) {
+ ice_observer_->OnIceConnectionReceivingChange(receiving);
+ }
+}
+
+void WebRtcSession::OnTransportControllerCandidatesGathered(
+ const std::string& transport_name,
+ const cricket::Candidates& candidates) {
+ ASSERT(signaling_thread()->IsCurrent());
+ int sdp_mline_index;
+ if (!GetLocalCandidateMediaIndex(transport_name, &sdp_mline_index)) {
+ LOG(LS_ERROR) << "OnTransportControllerCandidatesGathered: content name "
+ << transport_name << " not found";
+ return;
+ }
+
+ for (cricket::Candidates::const_iterator citer = candidates.begin();
+ citer != candidates.end(); ++citer) {
+ // Use transport_name as the candidate media id.
+ JsepIceCandidate candidate(transport_name, sdp_mline_index, *citer);
+ if (ice_observer_) {
+ ice_observer_->OnIceCandidate(&candidate);
+ }
+ if (local_desc_) {
+ local_desc_->AddCandidate(&candidate);
+ }
+ }
+}
+
+// Enabling voice and video channel.
+void WebRtcSession::EnableChannels() {
+ if (voice_channel_ && !voice_channel_->enabled())
+ voice_channel_->Enable(true);
+
+ if (video_channel_ && !video_channel_->enabled())
+ video_channel_->Enable(true);
+
+ if (data_channel_ && !data_channel_->enabled())
+ data_channel_->Enable(true);
+}
+
+// Returns the media index for a local ice candidate given the content name.
+bool WebRtcSession::GetLocalCandidateMediaIndex(const std::string& content_name,
+ int* sdp_mline_index) {
+ if (!local_desc_ || !sdp_mline_index) {
+ return false;
+ }
+
+ bool content_found = false;
+ const ContentInfos& contents = local_desc_->description()->contents();
+ for (size_t index = 0; index < contents.size(); ++index) {
+ if (contents[index].name == content_name) {
+ *sdp_mline_index = static_cast<int>(index);
+ content_found = true;
+ break;
+ }
+ }
+ return content_found;
+}
+
+bool WebRtcSession::UseCandidatesInSessionDescription(
+ const SessionDescriptionInterface* remote_desc) {
+ if (!remote_desc) {
+ return true;
+ }
+ bool ret = true;
+
+ for (size_t m = 0; m < remote_desc->number_of_mediasections(); ++m) {
+ const IceCandidateCollection* candidates = remote_desc->candidates(m);
+ for (size_t n = 0; n < candidates->count(); ++n) {
+ const IceCandidateInterface* candidate = candidates->at(n);
+ bool valid = false;
+ if (!ReadyToUseRemoteCandidate(candidate, remote_desc, &valid)) {
+ if (valid) {
+ LOG(LS_INFO) << "UseCandidatesInSessionDescription: Not ready to use "
+ << "candidate.";
+ }
+ continue;
+ }
+ ret = UseCandidate(candidate);
+ if (!ret) {
+ break;
+ }
+ }
+ }
+ return ret;
+}
+
+bool WebRtcSession::UseCandidate(
+ const IceCandidateInterface* candidate) {
+
+ size_t mediacontent_index = static_cast<size_t>(candidate->sdp_mline_index());
+ size_t remote_content_size = remote_desc_->description()->contents().size();
+ if (mediacontent_index >= remote_content_size) {
+ LOG(LS_ERROR)
+ << "UseRemoteCandidateInSession: Invalid candidate media index.";
+ return false;
+ }
+
+ cricket::ContentInfo content =
+ remote_desc_->description()->contents()[mediacontent_index];
+ std::vector<cricket::Candidate> candidates;
+ candidates.push_back(candidate->candidate());
+ // Invoking BaseSession method to handle remote candidates.
+ std::string error;
+ if (transport_controller_->AddRemoteCandidates(content.name, candidates,
+ &error)) {
+ // Candidates successfully submitted for checking.
+ if (ice_connection_state_ == PeerConnectionInterface::kIceConnectionNew ||
+ ice_connection_state_ ==
+ PeerConnectionInterface::kIceConnectionDisconnected) {
+ // If state is New, then the session has just gotten its first remote ICE
+ // candidates, so go to Checking.
+ // If state is Disconnected, the session is re-using old candidates or
+ // receiving additional ones, so go to Checking.
+ // If state is Connected, stay Connected.
+ // TODO(bemasc): If state is Connected, and the new candidates are for a
+ // newly added transport, then the state actually _should_ move to
+ // checking. Add a way to distinguish that case.
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking);
+ }
+ // TODO(bemasc): If state is Completed, go back to Connected.
+ } else {
+ if (!error.empty()) {
+ LOG(LS_WARNING) << error;
+ }
+ }
+ return true;
+}
+
+void WebRtcSession::RemoveUnusedChannels(const SessionDescription* desc) {
+ // Destroy video_channel_ first since it may have a pointer to the
+ // voice_channel_.
+ const cricket::ContentInfo* video_info =
+ cricket::GetFirstVideoContent(desc);
+ if ((!video_info || video_info->rejected) && video_channel_) {
+ SignalVideoChannelDestroyed();
+ const std::string content_name = video_channel_->content_name();
+ channel_manager_->DestroyVideoChannel(video_channel_.release());
+ }
+
+ const cricket::ContentInfo* voice_info =
+ cricket::GetFirstAudioContent(desc);
+ if ((!voice_info || voice_info->rejected) && voice_channel_) {
+ SignalVoiceChannelDestroyed();
+ const std::string content_name = voice_channel_->content_name();
+ channel_manager_->DestroyVoiceChannel(voice_channel_.release());
+ }
+
+ const cricket::ContentInfo* data_info =
+ cricket::GetFirstDataContent(desc);
+ if ((!data_info || data_info->rejected) && data_channel_) {
+ SignalDataChannelDestroyed();
+ const std::string content_name = data_channel_->content_name();
+ channel_manager_->DestroyDataChannel(data_channel_.release());
+ }
+}
+
+// TODO(mallinath) - Add a correct error code if the channels are not created
+// due to BUNDLE is enabled but rtcp-mux is disabled.
+bool WebRtcSession::CreateChannels(const SessionDescription* desc) {
+ // Creating the media channels and transport proxies.
+ const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(desc);
+ if (voice && !voice->rejected && !voice_channel_) {
+ if (!CreateVoiceChannel(voice)) {
+ LOG(LS_ERROR) << "Failed to create voice channel.";
+ return false;
+ }
+ }
+
+ const cricket::ContentInfo* video = cricket::GetFirstVideoContent(desc);
+ if (video && !video->rejected && !video_channel_) {
+ if (!CreateVideoChannel(video)) {
+ LOG(LS_ERROR) << "Failed to create video channel.";
+ return false;
+ }
+ }
+
+ const cricket::ContentInfo* data = cricket::GetFirstDataContent(desc);
+ if (data_channel_type_ != cricket::DCT_NONE &&
+ data && !data->rejected && !data_channel_) {
+ if (!CreateDataChannel(data)) {
+ LOG(LS_ERROR) << "Failed to create data channel.";
+ return false;
+ }
+ }
+
+ if (rtcp_mux_policy_ == PeerConnectionInterface::kRtcpMuxPolicyRequire) {
+ if (voice_channel()) {
+ voice_channel()->ActivateRtcpMux();
+ }
+ if (video_channel()) {
+ video_channel()->ActivateRtcpMux();
+ }
+ if (data_channel()) {
+ data_channel()->ActivateRtcpMux();
+ }
+ }
+
+ // Enable BUNDLE immediately when kBundlePolicyMaxBundle is in effect.
+ if (bundle_policy_ == PeerConnectionInterface::kBundlePolicyMaxBundle) {
+ const cricket::ContentGroup* bundle_group = desc->GetGroupByName(
+ cricket::GROUP_TYPE_BUNDLE);
+ if (!bundle_group) {
+ LOG(LS_WARNING) << "max-bundle specified without BUNDLE specified";
+ return false;
+ }
+ if (!EnableBundle(*bundle_group)) {
+ LOG(LS_WARNING) << "max-bundle failed to enable bundling.";
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool WebRtcSession::CreateVoiceChannel(const cricket::ContentInfo* content) {
+ voice_channel_.reset(channel_manager_->CreateVoiceChannel(
+ media_controller_, transport_controller_.get(), content->name, true,
+ audio_options_));
+ if (!voice_channel_) {
+ return false;
+ }
+
+ voice_channel_->SignalDtlsSetupFailure.connect(
+ this, &WebRtcSession::OnDtlsSetupFailure);
+
+ SignalVoiceChannelCreated();
+ voice_channel_->transport_channel()->SignalSentPacket.connect(
+ this, &WebRtcSession::OnSentPacket_w);
+ return true;
+}
+
+bool WebRtcSession::CreateVideoChannel(const cricket::ContentInfo* content) {
+ video_channel_.reset(channel_manager_->CreateVideoChannel(
+ media_controller_, transport_controller_.get(), content->name, true,
+ video_options_));
+ if (!video_channel_) {
+ return false;
+ }
+
+ video_channel_->SignalDtlsSetupFailure.connect(
+ this, &WebRtcSession::OnDtlsSetupFailure);
+
+ SignalVideoChannelCreated();
+ video_channel_->transport_channel()->SignalSentPacket.connect(
+ this, &WebRtcSession::OnSentPacket_w);
+ return true;
+}
+
+bool WebRtcSession::CreateDataChannel(const cricket::ContentInfo* content) {
+ bool sctp = (data_channel_type_ == cricket::DCT_SCTP);
+ data_channel_.reset(channel_manager_->CreateDataChannel(
+ transport_controller_.get(), content->name, !sctp, data_channel_type_));
+ if (!data_channel_) {
+ return false;
+ }
+
+ if (sctp) {
+ data_channel_->SignalDataReceived.connect(
+ this, &WebRtcSession::OnDataChannelMessageReceived);
+ }
+
+ data_channel_->SignalDtlsSetupFailure.connect(
+ this, &WebRtcSession::OnDtlsSetupFailure);
+
+ SignalDataChannelCreated();
+ data_channel_->transport_channel()->SignalSentPacket.connect(
+ this, &WebRtcSession::OnSentPacket_w);
+ return true;
+}
+
+void WebRtcSession::OnDtlsSetupFailure(cricket::BaseChannel*, bool rtcp) {
+ SetError(ERROR_TRANSPORT,
+ rtcp ? kDtlsSetupFailureRtcp : kDtlsSetupFailureRtp);
+}
+
+void WebRtcSession::OnDataChannelMessageReceived(
+ cricket::DataChannel* channel,
+ const cricket::ReceiveDataParams& params,
+ const rtc::Buffer& payload) {
+ RTC_DCHECK(data_channel_type_ == cricket::DCT_SCTP);
+ if (params.type == cricket::DMT_CONTROL && IsOpenMessage(payload)) {
+ // Received OPEN message; parse and signal that a new data channel should
+ // be created.
+ std::string label;
+ InternalDataChannelInit config;
+ config.id = params.ssrc;
+ if (!ParseDataChannelOpenMessage(payload, &label, &config)) {
+ LOG(LS_WARNING) << "Failed to parse the OPEN message for sid "
+ << params.ssrc;
+ return;
+ }
+ config.open_handshake_role = InternalDataChannelInit::kAcker;
+ SignalDataChannelOpenMessage(label, config);
+ }
+ // Otherwise ignore the message.
+}
+
+// Returns false if bundle is enabled and rtcp_mux is disabled.
+bool WebRtcSession::ValidateBundleSettings(const SessionDescription* desc) {
+ bool bundle_enabled = desc->HasGroup(cricket::GROUP_TYPE_BUNDLE);
+ if (!bundle_enabled)
+ return true;
+
+ const cricket::ContentGroup* bundle_group =
+ desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ ASSERT(bundle_group != NULL);
+
+ const cricket::ContentInfos& contents = desc->contents();
+ for (cricket::ContentInfos::const_iterator citer = contents.begin();
+ citer != contents.end(); ++citer) {
+ const cricket::ContentInfo* content = (&*citer);
+ ASSERT(content != NULL);
+ if (bundle_group->HasContentName(content->name) &&
+ !content->rejected && content->type == cricket::NS_JINGLE_RTP) {
+ if (!HasRtcpMuxEnabled(content))
+ return false;
+ }
+ }
+ // RTCP-MUX is enabled in all the contents.
+ return true;
+}
+
+bool WebRtcSession::HasRtcpMuxEnabled(
+ const cricket::ContentInfo* content) {
+ const cricket::MediaContentDescription* description =
+ static_cast<cricket::MediaContentDescription*>(content->description);
+ return description->rtcp_mux();
+}
+
+bool WebRtcSession::ValidateSessionDescription(
+ const SessionDescriptionInterface* sdesc,
+ cricket::ContentSource source, std::string* err_desc) {
+ std::string type;
+ if (error() != ERROR_NONE) {
+ return BadSdp(source, type, GetSessionErrorMsg(), err_desc);
+ }
+
+ if (!sdesc || !sdesc->description()) {
+ return BadSdp(source, type, kInvalidSdp, err_desc);
+ }
+
+ type = sdesc->type();
+ Action action = GetAction(sdesc->type());
+ if (source == cricket::CS_LOCAL) {
+ if (!ExpectSetLocalDescription(action))
+ return BadLocalSdp(type, BadStateErrMsg(state()), err_desc);
+ } else {
+ if (!ExpectSetRemoteDescription(action))
+ return BadRemoteSdp(type, BadStateErrMsg(state()), err_desc);
+ }
+
+ // Verify crypto settings.
+ std::string crypto_error;
+ if ((webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED ||
+ dtls_enabled_) &&
+ !VerifyCrypto(sdesc->description(), dtls_enabled_, &crypto_error)) {
+ return BadSdp(source, type, crypto_error, err_desc);
+ }
+
+ // Verify ice-ufrag and ice-pwd.
+ if (!VerifyIceUfragPwdPresent(sdesc->description())) {
+ return BadSdp(source, type, kSdpWithoutIceUfragPwd, err_desc);
+ }
+
+ if (!ValidateBundleSettings(sdesc->description())) {
+ return BadSdp(source, type, kBundleWithoutRtcpMux, err_desc);
+ }
+
+ // Verify m-lines in Answer when compared against Offer.
+ if (action == kAnswer) {
+ const cricket::SessionDescription* offer_desc =
+ (source == cricket::CS_LOCAL) ? remote_desc_->description()
+ : local_desc_->description();
+ if (!VerifyMediaDescriptions(sdesc->description(), offer_desc)) {
+ return BadAnswerSdp(source, kMlineMismatch, err_desc);
+ }
+ }
+
+ return true;
+}
+
+bool WebRtcSession::ExpectSetLocalDescription(Action action) {
+ return ((action == kOffer && state() == STATE_INIT) ||
+ // update local offer
+ (action == kOffer && state() == STATE_SENTOFFER) ||
+ // update the current ongoing session.
+ (action == kOffer && state() == STATE_INPROGRESS) ||
+ // accept remote offer
+ (action == kAnswer && state() == STATE_RECEIVEDOFFER) ||
+ (action == kAnswer && state() == STATE_SENTPRANSWER) ||
+ (action == kPrAnswer && state() == STATE_RECEIVEDOFFER) ||
+ (action == kPrAnswer && state() == STATE_SENTPRANSWER));
+}
+
+bool WebRtcSession::ExpectSetRemoteDescription(Action action) {
+ return ((action == kOffer && state() == STATE_INIT) ||
+ // update remote offer
+ (action == kOffer && state() == STATE_RECEIVEDOFFER) ||
+ // update the current ongoing session
+ (action == kOffer && state() == STATE_INPROGRESS) ||
+ // accept local offer
+ (action == kAnswer && state() == STATE_SENTOFFER) ||
+ (action == kAnswer && state() == STATE_RECEIVEDPRANSWER) ||
+ (action == kPrAnswer && state() == STATE_SENTOFFER) ||
+ (action == kPrAnswer && state() == STATE_RECEIVEDPRANSWER));
+}
+
+std::string WebRtcSession::GetSessionErrorMsg() {
+ std::ostringstream desc;
+ desc << kSessionError << GetErrorCodeString(error()) << ". ";
+ desc << kSessionErrorDesc << error_desc() << ".";
+ return desc.str();
+}
+
+// We need to check the local/remote description for the Transport instead of
+// the session, because a new Transport added during renegotiation may have
+// them unset while the session has them set from the previous negotiation.
+// Not doing so may trigger the auto generation of transport description and
+// mess up DTLS identity information, ICE credential, etc.
+bool WebRtcSession::ReadyToUseRemoteCandidate(
+ const IceCandidateInterface* candidate,
+ const SessionDescriptionInterface* remote_desc,
+ bool* valid) {
+ *valid = true;;
+
+ const SessionDescriptionInterface* current_remote_desc =
+ remote_desc ? remote_desc : remote_desc_.get();
+
+ if (!current_remote_desc) {
+ return false;
+ }
+
+ size_t mediacontent_index =
+ static_cast<size_t>(candidate->sdp_mline_index());
+ size_t remote_content_size =
+ current_remote_desc->description()->contents().size();
+ if (mediacontent_index >= remote_content_size) {
+ LOG(LS_ERROR)
+ << "ReadyToUseRemoteCandidate: Invalid candidate media index.";
+
+ *valid = false;
+ return false;
+ }
+
+ cricket::ContentInfo content =
+ current_remote_desc->description()->contents()[mediacontent_index];
+ cricket::BaseChannel* channel = GetChannel(content.name);
+ if (!channel) {
+ return false;
+ }
+
+ return transport_controller_->ReadyForRemoteCandidates(
+ channel->transport_name());
+}
+
+void WebRtcSession::OnTransportControllerGatheringState(
+ cricket::IceGatheringState state) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (state == cricket::kIceGatheringGathering) {
+ if (ice_observer_) {
+ ice_observer_->OnIceGatheringChange(
+ PeerConnectionInterface::kIceGatheringGathering);
+ }
+ } else if (state == cricket::kIceGatheringComplete) {
+ if (ice_observer_) {
+ ice_observer_->OnIceGatheringChange(
+ PeerConnectionInterface::kIceGatheringComplete);
+ ice_observer_->OnIceComplete();
+ }
+ }
+}
+
+void WebRtcSession::ReportTransportStats() {
+ // Use a set so we don't report the same stats twice if two channels share
+ // a transport.
+ std::set<std::string> transport_names;
+ if (voice_channel()) {
+ transport_names.insert(voice_channel()->transport_name());
+ }
+ if (video_channel()) {
+ transport_names.insert(video_channel()->transport_name());
+ }
+ if (data_channel()) {
+ transport_names.insert(data_channel()->transport_name());
+ }
+ for (const auto& name : transport_names) {
+ cricket::TransportStats stats;
+ if (transport_controller_->GetStats(name, &stats)) {
+ ReportBestConnectionState(stats);
+ ReportNegotiatedCiphers(stats);
+ }
+ }
+}
+// Walk through the ConnectionInfos to gather best connection usage
+// for IPv4 and IPv6.
+void WebRtcSession::ReportBestConnectionState(
+ const cricket::TransportStats& stats) {
+ RTC_DCHECK(metrics_observer_ != NULL);
+ for (cricket::TransportChannelStatsList::const_iterator it =
+ stats.channel_stats.begin();
+ it != stats.channel_stats.end(); ++it) {
+ for (cricket::ConnectionInfos::const_iterator it_info =
+ it->connection_infos.begin();
+ it_info != it->connection_infos.end(); ++it_info) {
+ if (!it_info->best_connection) {
+ continue;
+ }
+
+ PeerConnectionEnumCounterType type = kPeerConnectionEnumCounterMax;
+ const cricket::Candidate& local = it_info->local_candidate;
+ const cricket::Candidate& remote = it_info->remote_candidate;
+
+ // Increment the counter for IceCandidatePairType.
+ if (local.protocol() == cricket::TCP_PROTOCOL_NAME ||
+ (local.type() == RELAY_PORT_TYPE &&
+ local.relay_protocol() == cricket::TCP_PROTOCOL_NAME)) {
+ type = kEnumCounterIceCandidatePairTypeTcp;
+ } else if (local.protocol() == cricket::UDP_PROTOCOL_NAME) {
+ type = kEnumCounterIceCandidatePairTypeUdp;
+ } else {
+ RTC_CHECK(0);
+ }
+ metrics_observer_->IncrementEnumCounter(
+ type, GetIceCandidatePairCounter(local, remote),
+ kIceCandidatePairMax);
+
+ // Increment the counter for IP type.
+ if (local.address().family() == AF_INET) {
+ metrics_observer_->IncrementEnumCounter(
+ kEnumCounterAddressFamily, kBestConnections_IPv4,
+ kPeerConnectionAddressFamilyCounter_Max);
+
+ } else if (local.address().family() == AF_INET6) {
+ metrics_observer_->IncrementEnumCounter(
+ kEnumCounterAddressFamily, kBestConnections_IPv6,
+ kPeerConnectionAddressFamilyCounter_Max);
+ } else {
+ RTC_CHECK(0);
+ }
+
+ return;
+ }
+ }
+}
+
+void WebRtcSession::ReportNegotiatedCiphers(
+ const cricket::TransportStats& stats) {
+ RTC_DCHECK(metrics_observer_ != NULL);
+ if (!dtls_enabled_ || stats.channel_stats.empty()) {
+ return;
+ }
+
+ const std::string& srtp_cipher = stats.channel_stats[0].srtp_cipher;
+ int ssl_cipher = stats.channel_stats[0].ssl_cipher;
+ if (srtp_cipher.empty() && !ssl_cipher) {
+ return;
+ }
+
+ PeerConnectionEnumCounterType srtp_counter_type;
+ PeerConnectionEnumCounterType ssl_counter_type;
+ if (stats.transport_name == cricket::CN_AUDIO) {
+ srtp_counter_type = kEnumCounterAudioSrtpCipher;
+ ssl_counter_type = kEnumCounterAudioSslCipher;
+ } else if (stats.transport_name == cricket::CN_VIDEO) {
+ srtp_counter_type = kEnumCounterVideoSrtpCipher;
+ ssl_counter_type = kEnumCounterVideoSslCipher;
+ } else if (stats.transport_name == cricket::CN_DATA) {
+ srtp_counter_type = kEnumCounterDataSrtpCipher;
+ ssl_counter_type = kEnumCounterDataSslCipher;
+ } else {
+ RTC_NOTREACHED();
+ return;
+ }
+
+ if (!srtp_cipher.empty()) {
+ metrics_observer_->IncrementSparseEnumCounter(
+ srtp_counter_type, rtc::GetSrtpCryptoSuiteFromName(srtp_cipher));
+ }
+ if (ssl_cipher) {
+ metrics_observer_->IncrementSparseEnumCounter(ssl_counter_type, ssl_cipher);
+ }
+}
+
+void WebRtcSession::OnSentPacket_w(cricket::TransportChannel* channel,
+ const rtc::SentPacket& sent_packet) {
+ RTC_DCHECK(worker_thread()->IsCurrent());
+ media_controller_->call_w()->OnSentPacket(sent_packet);
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/webrtcsession.h b/talk/app/webrtc/webrtcsession.h
new file mode 100644
index 0000000000..d9c40d1a83
--- /dev/null
+++ b/talk/app/webrtc/webrtcsession.h
@@ -0,0 +1,517 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_WEBRTCSESSION_H_
+#define TALK_APP_WEBRTC_WEBRTCSESSION_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/datachannel.h"
+#include "talk/app/webrtc/dtmfsender.h"
+#include "talk/app/webrtc/mediacontroller.h"
+#include "talk/app/webrtc/mediastreamprovider.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/statstypes.h"
+#include "talk/media/base/mediachannel.h"
+#include "webrtc/p2p/base/transportcontroller.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/thread.h"
+
+namespace cricket {
+
+class ChannelManager;
+class DataChannel;
+class StatsReport;
+class VideoCapturer;
+class VideoChannel;
+class VoiceChannel;
+
+} // namespace cricket
+
+namespace webrtc {
+
+class IceRestartAnswerLatch;
+class JsepIceCandidate;
+class MediaStreamSignaling;
+class WebRtcSessionDescriptionFactory;
+
+extern const char kBundleWithoutRtcpMux[];
+extern const char kCreateChannelFailed[];
+extern const char kInvalidCandidates[];
+extern const char kInvalidSdp[];
+extern const char kMlineMismatch[];
+extern const char kPushDownTDFailed[];
+extern const char kSdpWithoutDtlsFingerprint[];
+extern const char kSdpWithoutSdesCrypto[];
+extern const char kSdpWithoutIceUfragPwd[];
+extern const char kSdpWithoutSdesAndDtlsDisabled[];
+extern const char kSessionError[];
+extern const char kSessionErrorDesc[];
+extern const char kDtlsSetupFailureRtp[];
+extern const char kDtlsSetupFailureRtcp[];
+extern const char kEnableBundleFailed[];
+
+// Maximum number of received video streams that will be processed by webrtc
+// even if they are not signalled beforehand.
+extern const int kMaxUnsignalledRecvStreams;
+
+// ICE state callback interface.
+class IceObserver {
+ public:
+ IceObserver() {}
+ // Called any time the IceConnectionState changes
+ // TODO(honghaiz): Change the name to OnIceConnectionStateChange so as to
+ // conform to the w3c standard.
+ virtual void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {}
+ // Called any time the IceGatheringState changes
+ virtual void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {}
+ // New Ice candidate have been found.
+ virtual void OnIceCandidate(const IceCandidateInterface* candidate) = 0;
+ // All Ice candidates have been found.
+ // TODO(bemasc): Remove this once callers transition to OnIceGatheringChange.
+ // (via PeerConnectionObserver)
+ virtual void OnIceComplete() {}
+
+ // Called whenever the state changes between receiving and not receiving.
+ virtual void OnIceConnectionReceivingChange(bool receiving) {}
+
+ protected:
+ ~IceObserver() {}
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(IceObserver);
+};
+
+// Statistics for all the transports of the session.
+typedef std::map<std::string, cricket::TransportStats> TransportStatsMap;
+typedef std::map<std::string, std::string> ProxyTransportMap;
+
+// TODO(pthatcher): Think of a better name for this. We already have
+// a TransportStats in transport.h. Perhaps TransportsStats?
+struct SessionStats {
+ ProxyTransportMap proxy_to_transport;
+ TransportStatsMap transport_stats;
+};
+
+// A WebRtcSession manages general session state. This includes negotiation
+// of both the application-level and network-level protocols: the former
+// defines what will be sent and the latter defines how it will be sent. Each
+// network-level protocol is represented by a Transport object. Each Transport
+// participates in the network-level negotiation. The individual streams of
+// packets are represented by TransportChannels. The application-level protocol
+// is represented by SessionDecription objects.
+class WebRtcSession : public AudioProviderInterface,
+ public VideoProviderInterface,
+ public DtmfProviderInterface,
+ public DataChannelProviderInterface,
+ public sigslot::has_slots<> {
+ public:
+ enum State {
+ STATE_INIT = 0,
+ STATE_SENTOFFER, // Sent offer, waiting for answer.
+ STATE_RECEIVEDOFFER, // Received an offer. Need to send answer.
+ STATE_SENTPRANSWER, // Sent provisional answer. Need to send answer.
+ STATE_RECEIVEDPRANSWER, // Received provisional answer, waiting for answer.
+ STATE_INPROGRESS, // Offer/answer exchange completed.
+ STATE_CLOSED, // Close() was called.
+ };
+
+ enum Error {
+ ERROR_NONE = 0, // no error
+ ERROR_CONTENT = 1, // channel errors in SetLocalContent/SetRemoteContent
+ ERROR_TRANSPORT = 2, // transport error of some kind
+ };
+
+ WebRtcSession(webrtc::MediaControllerInterface* media_controller,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ cricket::PortAllocator* port_allocator);
+ virtual ~WebRtcSession();
+
+ // These are const to allow them to be called from const methods.
+ rtc::Thread* signaling_thread() const { return signaling_thread_; }
+ rtc::Thread* worker_thread() const { return worker_thread_; }
+ cricket::PortAllocator* port_allocator() const { return port_allocator_; }
+
+ // The ID of this session.
+ const std::string& id() const { return sid_; }
+
+ bool Initialize(
+ const PeerConnectionFactoryInterface::Options& options,
+ const MediaConstraintsInterface* constraints,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ const PeerConnectionInterface::RTCConfiguration& rtc_configuration);
+ // Deletes the voice, video and data channel and changes the session state
+ // to STATE_CLOSED.
+ void Close();
+
+ // Returns true if we were the initial offerer.
+ bool initial_offerer() const { return initial_offerer_; }
+
+ // Returns the current state of the session. See the enum above for details.
+ // Each time the state changes, we will fire this signal.
+ State state() const { return state_; }
+ sigslot::signal2<WebRtcSession*, State> SignalState;
+
+ // Returns the last error in the session. See the enum above for details.
+ Error error() const { return error_; }
+ const std::string& error_desc() const { return error_desc_; }
+
+ void RegisterIceObserver(IceObserver* observer) {
+ ice_observer_ = observer;
+ }
+
+ virtual cricket::VoiceChannel* voice_channel() {
+ return voice_channel_.get();
+ }
+ virtual cricket::VideoChannel* video_channel() {
+ return video_channel_.get();
+ }
+ virtual cricket::DataChannel* data_channel() {
+ return data_channel_.get();
+ }
+
+ void SetSdesPolicy(cricket::SecurePolicy secure_policy);
+ cricket::SecurePolicy SdesPolicy() const;
+
+ // Get current ssl role from transport.
+ bool GetSslRole(rtc::SSLRole* role);
+
+ void CreateOffer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ const cricket::MediaSessionOptions& session_options);
+ void CreateAnswer(CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints,
+ const cricket::MediaSessionOptions& session_options);
+ // The ownership of |desc| will be transferred after this call.
+ bool SetLocalDescription(SessionDescriptionInterface* desc,
+ std::string* err_desc);
+ // The ownership of |desc| will be transferred after this call.
+ bool SetRemoteDescription(SessionDescriptionInterface* desc,
+ std::string* err_desc);
+ bool ProcessIceMessage(const IceCandidateInterface* ice_candidate);
+
+ bool SetIceTransports(PeerConnectionInterface::IceTransportsType type);
+
+ cricket::IceConfig ParseIceConfig(
+ const PeerConnectionInterface::RTCConfiguration& config) const;
+
+ void SetIceConfig(const cricket::IceConfig& ice_config);
+
+ // Start gathering candidates for any new transports, or transports doing an
+ // ICE restart.
+ void MaybeStartGathering();
+
+ const SessionDescriptionInterface* local_description() const {
+ return local_desc_.get();
+ }
+ const SessionDescriptionInterface* remote_description() const {
+ return remote_desc_.get();
+ }
+
+ // Get the id used as a media stream track's "id" field from ssrc.
+ virtual bool GetLocalTrackIdBySsrc(uint32_t ssrc, std::string* track_id);
+ virtual bool GetRemoteTrackIdBySsrc(uint32_t ssrc, std::string* track_id);
+
+ // AudioMediaProviderInterface implementation.
+ void SetAudioPlayout(uint32_t ssrc, bool enable) override;
+ void SetAudioSend(uint32_t ssrc,
+ bool enable,
+ const cricket::AudioOptions& options,
+ cricket::AudioRenderer* renderer) override;
+ void SetAudioPlayoutVolume(uint32_t ssrc, double volume) override;
+
+ // Implements VideoMediaProviderInterface.
+ bool SetCaptureDevice(uint32_t ssrc, cricket::VideoCapturer* camera) override;
+ void SetVideoPlayout(uint32_t ssrc,
+ bool enable,
+ cricket::VideoRenderer* renderer) override;
+ void SetVideoSend(uint32_t ssrc,
+ bool enable,
+ const cricket::VideoOptions* options) override;
+
+ // Implements DtmfProviderInterface.
+ virtual bool CanInsertDtmf(const std::string& track_id);
+ virtual bool InsertDtmf(const std::string& track_id,
+ int code, int duration);
+ virtual sigslot::signal0<>* GetOnDestroyedSignal();
+
+ // Implements DataChannelProviderInterface.
+ bool SendData(const cricket::SendDataParams& params,
+ const rtc::Buffer& payload,
+ cricket::SendDataResult* result) override;
+ bool ConnectDataChannel(DataChannel* webrtc_data_channel) override;
+ void DisconnectDataChannel(DataChannel* webrtc_data_channel) override;
+ void AddSctpDataStream(int sid) override;
+ void RemoveSctpDataStream(int sid) override;
+ bool ReadyToSendData() const override;
+
+ // Returns stats for all channels of all transports.
+ // This avoids exposing the internal structures used to track them.
+ virtual bool GetTransportStats(SessionStats* stats);
+
+ // Get stats for a specific channel
+ bool GetChannelTransportStats(cricket::BaseChannel* ch, SessionStats* stats);
+
+ // virtual so it can be mocked in unit tests
+ virtual bool GetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate);
+
+ // Caller owns returned certificate
+ virtual bool GetRemoteSSLCertificate(const std::string& transport_name,
+ rtc::SSLCertificate** cert);
+
+ cricket::DataChannelType data_channel_type() const;
+
+ bool IceRestartPending() const;
+
+ void ResetIceRestartLatch();
+
+ // Called when an RTCCertificate is generated or retrieved by
+ // WebRTCSessionDescriptionFactory. Should happen before setLocalDescription.
+ void OnCertificateReady(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
+ void OnDtlsSetupFailure(cricket::BaseChannel*, bool rtcp);
+
+ // For unit test.
+ bool waiting_for_certificate_for_testing() const;
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate_for_testing();
+
+ void set_metrics_observer(
+ webrtc::MetricsObserverInterface* metrics_observer) {
+ metrics_observer_ = metrics_observer;
+ }
+
+ // Called when voice_channel_, video_channel_ and data_channel_ are created
+ // and destroyed. As a result of, for example, setting a new description.
+ sigslot::signal0<> SignalVoiceChannelCreated;
+ sigslot::signal0<> SignalVoiceChannelDestroyed;
+ sigslot::signal0<> SignalVideoChannelCreated;
+ sigslot::signal0<> SignalVideoChannelDestroyed;
+ sigslot::signal0<> SignalDataChannelCreated;
+ sigslot::signal0<> SignalDataChannelDestroyed;
+
+ // Called when a valid data channel OPEN message is received.
+ // std::string represents the data channel label.
+ sigslot::signal2<const std::string&, const InternalDataChannelInit&>
+ SignalDataChannelOpenMessage;
+
+ private:
+ // Indicates the type of SessionDescription in a call to SetLocalDescription
+ // and SetRemoteDescription.
+ enum Action {
+ kOffer,
+ kPrAnswer,
+ kAnswer,
+ };
+
+ // Log session state.
+ void LogState(State old_state, State new_state);
+
+ // Updates the state, signaling if necessary.
+ virtual void SetState(State state);
+
+ // Updates the error state, signaling if necessary.
+ // TODO(ronghuawu): remove the SetError method that doesn't take |error_desc|.
+ virtual void SetError(Error error, const std::string& error_desc);
+
+ bool UpdateSessionState(Action action, cricket::ContentSource source,
+ std::string* err_desc);
+ static Action GetAction(const std::string& type);
+ // Push the media parts of the local or remote session description
+ // down to all of the channels.
+ bool PushdownMediaDescription(cricket::ContentAction action,
+ cricket::ContentSource source,
+ std::string* error_desc);
+
+ bool PushdownTransportDescription(cricket::ContentSource source,
+ cricket::ContentAction action,
+ std::string* error_desc);
+
+ // Helper methods to push local and remote transport descriptions.
+ bool PushdownLocalTransportDescription(
+ const cricket::SessionDescription* sdesc,
+ cricket::ContentAction action,
+ std::string* error_desc);
+ bool PushdownRemoteTransportDescription(
+ const cricket::SessionDescription* sdesc,
+ cricket::ContentAction action,
+ std::string* error_desc);
+
+ // Returns true and the TransportInfo of the given |content_name|
+ // from |description|. Returns false if it's not available.
+ static bool GetTransportDescription(
+ const cricket::SessionDescription* description,
+ const std::string& content_name,
+ cricket::TransportDescription* info);
+
+ cricket::BaseChannel* GetChannel(const std::string& content_name);
+ // Cause all the BaseChannels in the bundle group to have the same
+ // transport channel.
+ bool EnableBundle(const cricket::ContentGroup& bundle);
+
+ // Enables media channels to allow sending of media.
+ void EnableChannels();
+ // Returns the media index for a local ice candidate given the content name.
+ // Returns false if the local session description does not have a media
+ // content called |content_name|.
+ bool GetLocalCandidateMediaIndex(const std::string& content_name,
+ int* sdp_mline_index);
+ // Uses all remote candidates in |remote_desc| in this session.
+ bool UseCandidatesInSessionDescription(
+ const SessionDescriptionInterface* remote_desc);
+ // Uses |candidate| in this session.
+ bool UseCandidate(const IceCandidateInterface* candidate);
+ // Deletes the corresponding channel of contents that don't exist in |desc|.
+ // |desc| can be null. This means that all channels are deleted.
+ void RemoveUnusedChannels(const cricket::SessionDescription* desc);
+
+ // Allocates media channels based on the |desc|. If |desc| doesn't have
+ // the BUNDLE option, this method will disable BUNDLE in PortAllocator.
+ // This method will also delete any existing media channels before creating.
+ bool CreateChannels(const cricket::SessionDescription* desc);
+
+ // Helper methods to create media channels.
+ bool CreateVoiceChannel(const cricket::ContentInfo* content);
+ bool CreateVideoChannel(const cricket::ContentInfo* content);
+ bool CreateDataChannel(const cricket::ContentInfo* content);
+
+ // Listens to SCTP CONTROL messages on unused SIDs and process them as OPEN
+ // messages.
+ void OnDataChannelMessageReceived(cricket::DataChannel* channel,
+ const cricket::ReceiveDataParams& params,
+ const rtc::Buffer& payload);
+
+ std::string BadStateErrMsg(State state);
+ void SetIceConnectionState(PeerConnectionInterface::IceConnectionState state);
+ void SetIceConnectionReceiving(bool receiving);
+
+ bool ValidateBundleSettings(const cricket::SessionDescription* desc);
+ bool HasRtcpMuxEnabled(const cricket::ContentInfo* content);
+ // Below methods are helper methods which verifies SDP.
+ bool ValidateSessionDescription(const SessionDescriptionInterface* sdesc,
+ cricket::ContentSource source,
+ std::string* err_desc);
+
+ // Check if a call to SetLocalDescription is acceptable with |action|.
+ bool ExpectSetLocalDescription(Action action);
+ // Check if a call to SetRemoteDescription is acceptable with |action|.
+ bool ExpectSetRemoteDescription(Action action);
+ // Verifies a=setup attribute as per RFC 5763.
+ bool ValidateDtlsSetupAttribute(const cricket::SessionDescription* desc,
+ Action action);
+
+ // Returns true if we are ready to push down the remote candidate.
+ // |remote_desc| is the new remote description, or NULL if the current remote
+ // description should be used. Output |valid| is true if the candidate media
+ // index is valid.
+ bool ReadyToUseRemoteCandidate(const IceCandidateInterface* candidate,
+ const SessionDescriptionInterface* remote_desc,
+ bool* valid);
+
+ void OnTransportControllerConnectionState(cricket::IceConnectionState state);
+ void OnTransportControllerReceiving(bool receiving);
+ void OnTransportControllerGatheringState(cricket::IceGatheringState state);
+ void OnTransportControllerCandidatesGathered(
+ const std::string& transport_name,
+ const cricket::Candidates& candidates);
+
+ std::string GetSessionErrorMsg();
+
+ // Invoked when TransportController connection completion is signaled.
+ // Reports stats for all transports in use.
+ void ReportTransportStats();
+
+ // Gather the usage of IPv4/IPv6 as best connection.
+ void ReportBestConnectionState(const cricket::TransportStats& stats);
+
+ void ReportNegotiatedCiphers(const cricket::TransportStats& stats);
+
+ void OnSentPacket_w(cricket::TransportChannel* channel,
+ const rtc::SentPacket& sent_packet);
+
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const worker_thread_;
+ cricket::PortAllocator* const port_allocator_;
+
+ State state_ = STATE_INIT;
+ Error error_ = ERROR_NONE;
+ std::string error_desc_;
+
+ const std::string sid_;
+ bool initial_offerer_ = false;
+
+ rtc::scoped_ptr<cricket::TransportController> transport_controller_;
+ MediaControllerInterface* media_controller_;
+ rtc::scoped_ptr<cricket::VoiceChannel> voice_channel_;
+ rtc::scoped_ptr<cricket::VideoChannel> video_channel_;
+ rtc::scoped_ptr<cricket::DataChannel> data_channel_;
+ cricket::ChannelManager* channel_manager_;
+ IceObserver* ice_observer_;
+ PeerConnectionInterface::IceConnectionState ice_connection_state_;
+ bool ice_connection_receiving_;
+ rtc::scoped_ptr<SessionDescriptionInterface> local_desc_;
+ rtc::scoped_ptr<SessionDescriptionInterface> remote_desc_;
+ // If the remote peer is using a older version of implementation.
+ bool older_version_remote_peer_;
+ bool dtls_enabled_;
+ // Specifies which kind of data channel is allowed. This is controlled
+ // by the chrome command-line flag and constraints:
+ // 1. If chrome command-line switch 'enable-sctp-data-channels' is enabled,
+ // constraint kEnableDtlsSrtp is true, and constaint kEnableRtpDataChannels is
+ // not set or false, SCTP is allowed (DCT_SCTP);
+ // 2. If constraint kEnableRtpDataChannels is true, RTP is allowed (DCT_RTP);
+ // 3. If both 1&2 are false, data channel is not allowed (DCT_NONE).
+ cricket::DataChannelType data_channel_type_;
+ rtc::scoped_ptr<IceRestartAnswerLatch> ice_restart_latch_;
+
+ rtc::scoped_ptr<WebRtcSessionDescriptionFactory>
+ webrtc_session_desc_factory_;
+
+ // Member variables for caching global options.
+ cricket::AudioOptions audio_options_;
+ cricket::VideoOptions video_options_;
+ MetricsObserverInterface* metrics_observer_;
+
+ // Declares the bundle policy for the WebRTCSession.
+ PeerConnectionInterface::BundlePolicy bundle_policy_;
+
+ // Declares the RTCP mux policy for the WebRTCSession.
+ PeerConnectionInterface::RtcpMuxPolicy rtcp_mux_policy_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcSession);
+};
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_WEBRTCSESSION_H_
diff --git a/talk/app/webrtc/webrtcsession_unittest.cc b/talk/app/webrtc/webrtcsession_unittest.cc
new file mode 100644
index 0000000000..3eb46f1d3c
--- /dev/null
+++ b/talk/app/webrtc/webrtcsession_unittest.cc
@@ -0,0 +1,4219 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <vector>
+
+#include "talk/app/webrtc/audiotrack.h"
+#include "talk/app/webrtc/fakemediacontroller.h"
+#include "talk/app/webrtc/fakemetricsobserver.h"
+#include "talk/app/webrtc/jsepicecandidate.h"
+#include "talk/app/webrtc/jsepsessiondescription.h"
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/mediastreamsignaling.h"
+#include "talk/app/webrtc/sctputils.h"
+#include "talk/app/webrtc/streamcollection.h"
+#include "talk/app/webrtc/streamcollection.h"
+#include "talk/app/webrtc/test/fakeconstraints.h"
+#include "talk/app/webrtc/test/fakedtlsidentitystore.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "talk/app/webrtc/webrtcsession.h"
+#include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
+#include "talk/media/base/fakemediaengine.h"
+#include "talk/media/base/fakevideorenderer.h"
+#include "talk/media/base/mediachannel.h"
+#include "talk/media/webrtc/fakewebrtccall.h"
+#include "webrtc/p2p/base/stunserver.h"
+#include "webrtc/p2p/base/teststunserver.h"
+#include "webrtc/p2p/base/testturnserver.h"
+#include "webrtc/p2p/base/transportchannel.h"
+#include "webrtc/p2p/client/basicportallocator.h"
+#include "talk/session/media/channelmanager.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/fakenetwork.h"
+#include "webrtc/base/firewallsocketserver.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/network.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/virtualsocketserver.h"
+
+#define MAYBE_SKIP_TEST(feature) \
+ if (!(feature())) { \
+ LOG(LS_INFO) << "Feature disabled... skipping"; \
+ return; \
+ }
+
+using cricket::DF_PLAY;
+using cricket::DF_SEND;
+using cricket::FakeVoiceMediaChannel;
+using cricket::TransportInfo;
+using rtc::SocketAddress;
+using rtc::scoped_ptr;
+using rtc::Thread;
+using webrtc::CreateSessionDescription;
+using webrtc::CreateSessionDescriptionObserver;
+using webrtc::CreateSessionDescriptionRequest;
+using webrtc::DataChannel;
+using webrtc::DtlsIdentityStoreInterface;
+using webrtc::FakeConstraints;
+using webrtc::FakeMetricsObserver;
+using webrtc::IceCandidateCollection;
+using webrtc::InternalDataChannelInit;
+using webrtc::JsepIceCandidate;
+using webrtc::JsepSessionDescription;
+using webrtc::PeerConnectionFactoryInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::SessionDescriptionInterface;
+using webrtc::SessionStats;
+using webrtc::StreamCollection;
+using webrtc::WebRtcSession;
+using webrtc::kBundleWithoutRtcpMux;
+using webrtc::kCreateChannelFailed;
+using webrtc::kInvalidSdp;
+using webrtc::kMlineMismatch;
+using webrtc::kPushDownTDFailed;
+using webrtc::kSdpWithoutIceUfragPwd;
+using webrtc::kSdpWithoutDtlsFingerprint;
+using webrtc::kSdpWithoutSdesCrypto;
+using webrtc::kSessionError;
+using webrtc::kSessionErrorDesc;
+using webrtc::kMaxUnsignalledRecvStreams;
+
+typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
+
+static const int kClientAddrPort = 0;
+static const char kClientAddrHost1[] = "11.11.11.11";
+static const char kClientIPv6AddrHost1[] =
+ "2620:0:aaaa:bbbb:cccc:dddd:eeee:ffff";
+static const char kClientAddrHost2[] = "22.22.22.22";
+static const char kStunAddrHost[] = "99.99.99.1";
+static const SocketAddress kTurnUdpIntAddr("99.99.99.4", 3478);
+static const SocketAddress kTurnUdpExtAddr("99.99.99.6", 0);
+static const char kTurnUsername[] = "test";
+static const char kTurnPassword[] = "test";
+
+static const char kSessionVersion[] = "1";
+
+// Media index of candidates belonging to the first media content.
+static const int kMediaContentIndex0 = 0;
+static const char kMediaContentName0[] = "audio";
+
+// Media index of candidates belonging to the second media content.
+static const int kMediaContentIndex1 = 1;
+static const char kMediaContentName1[] = "video";
+
+static const int kIceCandidatesTimeout = 10000;
+
+static const char kFakeDtlsFingerprint[] =
+ "BB:CD:72:F7:2F:D0:BA:43:F3:68:B1:0C:23:72:B6:4A:"
+ "0F:DE:34:06:BC:E0:FE:01:BC:73:C8:6D:F4:65:D5:24";
+
+static const char kTooLongIceUfragPwd[] =
+ "IceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfrag"
+ "IceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfrag"
+ "IceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfrag"
+ "IceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfrag";
+
+static const char kSdpWithRtx[] =
+ "v=0\r\n"
+ "o=- 4104004319237231850 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS stream1\r\n"
+ "m=video 9 RTP/SAVPF 0 96\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:CerjGp19G7wpXwl7\r\n"
+ "a=ice-pwd:cMvOlFvQ6ochez1ZOoC2uBEC\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:5/4N5CDvMiyDArHtBByUM71VIkguH17ZNoX60GrA\r\n"
+ "a=rtpmap:0 fake_video_codec/90000\r\n"
+ "a=rtpmap:96 rtx/90000\r\n"
+ "a=fmtp:96 apt=0\r\n";
+
+static const char kStream1[] = "stream1";
+static const char kVideoTrack1[] = "video1";
+static const char kAudioTrack1[] = "audio1";
+
+static const char kStream2[] = "stream2";
+static const char kVideoTrack2[] = "video2";
+static const char kAudioTrack2[] = "audio2";
+
+enum RTCCertificateGenerationMethod { ALREADY_GENERATED, DTLS_IDENTITY_STORE };
+
+// Add some extra |newlines| to the |message| after |line|.
+static void InjectAfter(const std::string& line,
+ const std::string& newlines,
+ std::string* message) {
+ const std::string tmp = line + newlines;
+ rtc::replace_substrs(line.c_str(), line.length(), tmp.c_str(), tmp.length(),
+ message);
+}
+
+class MockIceObserver : public webrtc::IceObserver {
+ public:
+ MockIceObserver()
+ : oncandidatesready_(false),
+ ice_connection_state_(PeerConnectionInterface::kIceConnectionNew),
+ ice_gathering_state_(PeerConnectionInterface::kIceGatheringNew) {
+ }
+
+ virtual void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ ice_connection_state_ = new_state;
+ }
+ virtual void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {
+ // We can never transition back to "new".
+ EXPECT_NE(PeerConnectionInterface::kIceGatheringNew, new_state);
+ ice_gathering_state_ = new_state;
+
+ // oncandidatesready_ really means "ICE gathering is complete".
+ // This if statement ensures that this value remains correct when we
+ // transition from kIceGatheringComplete to kIceGatheringGathering.
+ if (new_state == PeerConnectionInterface::kIceGatheringGathering) {
+ oncandidatesready_ = false;
+ }
+ }
+
+ // Found a new candidate.
+ virtual void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {
+ switch (candidate->sdp_mline_index()) {
+ case kMediaContentIndex0:
+ mline_0_candidates_.push_back(candidate->candidate());
+ break;
+ case kMediaContentIndex1:
+ mline_1_candidates_.push_back(candidate->candidate());
+ break;
+ default:
+ ASSERT(false);
+ }
+
+ // The ICE gathering state should always be Gathering when a candidate is
+ // received (or possibly Completed in the case of the final candidate).
+ EXPECT_NE(PeerConnectionInterface::kIceGatheringNew, ice_gathering_state_);
+ }
+
+ // TODO(bemasc): Remove this once callers transition to OnIceGatheringChange.
+ virtual void OnIceComplete() {
+ EXPECT_FALSE(oncandidatesready_);
+ oncandidatesready_ = true;
+
+ // OnIceGatheringChange(IceGatheringCompleted) and OnIceComplete() should
+ // be called approximately simultaneously. For ease of testing, this
+ // check additionally requires that they be called in the above order.
+ EXPECT_EQ(PeerConnectionInterface::kIceGatheringComplete,
+ ice_gathering_state_);
+ }
+
+ bool oncandidatesready_;
+ std::vector<cricket::Candidate> mline_0_candidates_;
+ std::vector<cricket::Candidate> mline_1_candidates_;
+ PeerConnectionInterface::IceConnectionState ice_connection_state_;
+ PeerConnectionInterface::IceGatheringState ice_gathering_state_;
+};
+
+class WebRtcSessionForTest : public webrtc::WebRtcSession {
+ public:
+ WebRtcSessionForTest(webrtc::MediaControllerInterface* media_controller,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ cricket::PortAllocator* port_allocator,
+ webrtc::IceObserver* ice_observer)
+ : WebRtcSession(media_controller,
+ signaling_thread,
+ worker_thread,
+ port_allocator) {
+ RegisterIceObserver(ice_observer);
+ }
+ virtual ~WebRtcSessionForTest() {}
+
+ // Note that these methods are only safe to use if the signaling thread
+ // is the same as the worker thread
+ cricket::TransportChannel* voice_rtp_transport_channel() {
+ return rtp_transport_channel(voice_channel());
+ }
+
+ cricket::TransportChannel* voice_rtcp_transport_channel() {
+ return rtcp_transport_channel(voice_channel());
+ }
+
+ cricket::TransportChannel* video_rtp_transport_channel() {
+ return rtp_transport_channel(video_channel());
+ }
+
+ cricket::TransportChannel* video_rtcp_transport_channel() {
+ return rtcp_transport_channel(video_channel());
+ }
+
+ cricket::TransportChannel* data_rtp_transport_channel() {
+ return rtp_transport_channel(data_channel());
+ }
+
+ cricket::TransportChannel* data_rtcp_transport_channel() {
+ return rtcp_transport_channel(data_channel());
+ }
+
+ using webrtc::WebRtcSession::SetAudioPlayout;
+ using webrtc::WebRtcSession::SetAudioSend;
+ using webrtc::WebRtcSession::SetCaptureDevice;
+ using webrtc::WebRtcSession::SetVideoPlayout;
+ using webrtc::WebRtcSession::SetVideoSend;
+
+ private:
+ cricket::TransportChannel* rtp_transport_channel(cricket::BaseChannel* ch) {
+ if (!ch) {
+ return nullptr;
+ }
+ return ch->transport_channel();
+ }
+
+ cricket::TransportChannel* rtcp_transport_channel(cricket::BaseChannel* ch) {
+ if (!ch) {
+ return nullptr;
+ }
+ return ch->rtcp_transport_channel();
+ }
+};
+
+class WebRtcSessionCreateSDPObserverForTest
+ : public rtc::RefCountedObject<CreateSessionDescriptionObserver> {
+ public:
+ enum State {
+ kInit,
+ kFailed,
+ kSucceeded,
+ };
+ WebRtcSessionCreateSDPObserverForTest() : state_(kInit) {}
+
+ // CreateSessionDescriptionObserver implementation.
+ virtual void OnSuccess(SessionDescriptionInterface* desc) {
+ description_.reset(desc);
+ state_ = kSucceeded;
+ }
+ virtual void OnFailure(const std::string& error) {
+ state_ = kFailed;
+ }
+
+ SessionDescriptionInterface* description() { return description_.get(); }
+
+ SessionDescriptionInterface* ReleaseDescription() {
+ return description_.release();
+ }
+
+ State state() const { return state_; }
+
+ protected:
+ ~WebRtcSessionCreateSDPObserverForTest() {}
+
+ private:
+ rtc::scoped_ptr<SessionDescriptionInterface> description_;
+ State state_;
+};
+
+class FakeAudioRenderer : public cricket::AudioRenderer {
+ public:
+ FakeAudioRenderer() : sink_(NULL) {}
+ virtual ~FakeAudioRenderer() {
+ if (sink_)
+ sink_->OnClose();
+ }
+
+ void SetSink(Sink* sink) override { sink_ = sink; }
+
+ cricket::AudioRenderer::Sink* sink() const { return sink_; }
+ private:
+ cricket::AudioRenderer::Sink* sink_;
+};
+
+class WebRtcSessionTest
+ : public testing::TestWithParam<RTCCertificateGenerationMethod>,
+ public sigslot::has_slots<> {
+ protected:
+ // TODO Investigate why ChannelManager crashes, if it's created
+ // after stun_server.
+ WebRtcSessionTest()
+ : media_engine_(new cricket::FakeMediaEngine()),
+ data_engine_(new cricket::FakeDataEngine()),
+ channel_manager_(
+ new cricket::ChannelManager(media_engine_,
+ data_engine_,
+ new cricket::CaptureManager(),
+ rtc::Thread::Current())),
+ fake_call_(webrtc::Call::Config()),
+ media_controller_(
+ webrtc::MediaControllerInterface::Create(rtc::Thread::Current(),
+ channel_manager_.get())),
+ tdesc_factory_(new cricket::TransportDescriptionFactory()),
+ desc_factory_(
+ new cricket::MediaSessionDescriptionFactory(channel_manager_.get(),
+ tdesc_factory_.get())),
+ pss_(new rtc::PhysicalSocketServer),
+ vss_(new rtc::VirtualSocketServer(pss_.get())),
+ fss_(new rtc::FirewallSocketServer(vss_.get())),
+ ss_scope_(fss_.get()),
+ stun_socket_addr_(
+ rtc::SocketAddress(kStunAddrHost, cricket::STUN_SERVER_PORT)),
+ stun_server_(cricket::TestStunServer::Create(Thread::Current(),
+ stun_socket_addr_)),
+ turn_server_(Thread::Current(), kTurnUdpIntAddr, kTurnUdpExtAddr),
+ metrics_observer_(new rtc::RefCountedObject<FakeMetricsObserver>()) {
+ cricket::ServerAddresses stun_servers;
+ stun_servers.insert(stun_socket_addr_);
+ allocator_.reset(new cricket::BasicPortAllocator(
+ &network_manager_,
+ stun_servers,
+ SocketAddress(), SocketAddress(), SocketAddress()));
+ allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+ cricket::PORTALLOCATOR_DISABLE_RELAY);
+ EXPECT_TRUE(channel_manager_->Init());
+ desc_factory_->set_add_legacy_streams(false);
+ allocator_->set_step_delay(cricket::kMinimumStepDelay);
+ }
+
+ void AddInterface(const SocketAddress& addr) {
+ network_manager_.AddInterface(addr);
+ }
+
+ // If |dtls_identity_store| != null or |rtc_configuration| contains
+ // |certificates| then DTLS will be enabled unless explicitly disabled by
+ // |rtc_configuration| options. When DTLS is enabled a certificate will be
+ // used if provided, otherwise one will be generated using the
+ // |dtls_identity_store|.
+ void Init(
+ rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store,
+ const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
+ ASSERT_TRUE(session_.get() == NULL);
+ session_.reset(new WebRtcSessionForTest(
+ media_controller_.get(), rtc::Thread::Current(), rtc::Thread::Current(),
+ allocator_.get(), &observer_));
+ session_->SignalDataChannelOpenMessage.connect(
+ this, &WebRtcSessionTest::OnDataChannelOpenMessage);
+
+ EXPECT_EQ(PeerConnectionInterface::kIceConnectionNew,
+ observer_.ice_connection_state_);
+ EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew,
+ observer_.ice_gathering_state_);
+
+ EXPECT_TRUE(session_->Initialize(options_, constraints_.get(),
+ dtls_identity_store.Pass(),
+ rtc_configuration));
+ session_->set_metrics_observer(metrics_observer_);
+ }
+
+ void OnDataChannelOpenMessage(const std::string& label,
+ const InternalDataChannelInit& config) {
+ last_data_channel_label_ = label;
+ last_data_channel_config_ = config;
+ }
+
+ void Init() {
+ PeerConnectionInterface::RTCConfiguration configuration;
+ Init(nullptr, configuration);
+ }
+
+ void InitWithIceTransport(
+ PeerConnectionInterface::IceTransportsType ice_transport_type) {
+ PeerConnectionInterface::RTCConfiguration configuration;
+ configuration.type = ice_transport_type;
+ Init(nullptr, configuration);
+ }
+
+ void InitWithBundlePolicy(
+ PeerConnectionInterface::BundlePolicy bundle_policy) {
+ PeerConnectionInterface::RTCConfiguration configuration;
+ configuration.bundle_policy = bundle_policy;
+ Init(nullptr, configuration);
+ }
+
+ void InitWithRtcpMuxPolicy(
+ PeerConnectionInterface::RtcpMuxPolicy rtcp_mux_policy) {
+ PeerConnectionInterface::RTCConfiguration configuration;
+ configuration.rtcp_mux_policy = rtcp_mux_policy;
+ Init(nullptr, configuration);
+ }
+
+ // Successfully init with DTLS; with a certificate generated and supplied or
+ // with a store that generates it for us.
+ void InitWithDtls(RTCCertificateGenerationMethod cert_gen_method) {
+ rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store;
+ PeerConnectionInterface::RTCConfiguration configuration;
+ if (cert_gen_method == ALREADY_GENERATED) {
+ configuration.certificates.push_back(
+ FakeDtlsIdentityStore::GenerateCertificate());
+ } else if (cert_gen_method == DTLS_IDENTITY_STORE) {
+ dtls_identity_store.reset(new FakeDtlsIdentityStore());
+ dtls_identity_store->set_should_fail(false);
+ } else {
+ RTC_CHECK(false);
+ }
+ Init(dtls_identity_store.Pass(), configuration);
+ }
+
+ // Init with DTLS with a store that will fail to generate a certificate.
+ void InitWithDtlsIdentityGenFail() {
+ rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+ new FakeDtlsIdentityStore());
+ dtls_identity_store->set_should_fail(true);
+ PeerConnectionInterface::RTCConfiguration configuration;
+ Init(dtls_identity_store.Pass(), configuration);
+ }
+
+ void InitWithDtmfCodec() {
+ // Add kTelephoneEventCodec for dtmf test.
+ const cricket::AudioCodec kTelephoneEventCodec(
+ 106, "telephone-event", 8000, 0, 1, 0);
+ std::vector<cricket::AudioCodec> codecs;
+ codecs.push_back(kTelephoneEventCodec);
+ media_engine_->SetAudioCodecs(codecs);
+ desc_factory_->set_audio_codecs(codecs);
+ Init();
+ }
+
+ void SendAudioVideoStream1() {
+ send_stream_1_ = true;
+ send_stream_2_ = false;
+ send_audio_ = true;
+ send_video_ = true;
+ }
+
+ void SendAudioVideoStream2() {
+ send_stream_1_ = false;
+ send_stream_2_ = true;
+ send_audio_ = true;
+ send_video_ = true;
+ }
+
+ void SendAudioVideoStream1And2() {
+ send_stream_1_ = true;
+ send_stream_2_ = true;
+ send_audio_ = true;
+ send_video_ = true;
+ }
+
+ void SendNothing() {
+ send_stream_1_ = false;
+ send_stream_2_ = false;
+ send_audio_ = false;
+ send_video_ = false;
+ }
+
+ void SendAudioOnlyStream2() {
+ send_stream_1_ = false;
+ send_stream_2_ = true;
+ send_audio_ = true;
+ send_video_ = false;
+ }
+
+ void SendVideoOnlyStream2() {
+ send_stream_1_ = false;
+ send_stream_2_ = true;
+ send_audio_ = false;
+ send_video_ = true;
+ }
+
+ void AddStreamsToOptions(cricket::MediaSessionOptions* session_options) {
+ if (send_stream_1_ && send_audio_) {
+ session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrack1,
+ kStream1);
+ }
+ if (send_stream_1_ && send_video_) {
+ session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrack1,
+ kStream1);
+ }
+ if (send_stream_2_ && send_audio_) {
+ session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrack2,
+ kStream2);
+ }
+ if (send_stream_2_ && send_video_) {
+ session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrack2,
+ kStream2);
+ }
+ if (data_channel_ && session_->data_channel_type() == cricket::DCT_RTP) {
+ session_options->AddSendStream(cricket::MEDIA_TYPE_DATA,
+ data_channel_->label(),
+ data_channel_->label());
+ }
+ }
+
+ void GetOptionsForOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+ cricket::MediaSessionOptions* session_options) {
+ ASSERT_TRUE(ConvertRtcOptionsForOffer(rtc_options, session_options));
+
+ AddStreamsToOptions(session_options);
+ if (rtc_options.offer_to_receive_audio ==
+ RTCOfferAnswerOptions::kUndefined) {
+ session_options->recv_audio =
+ session_options->HasSendMediaStream(cricket::MEDIA_TYPE_AUDIO);
+ }
+ if (rtc_options.offer_to_receive_video ==
+ RTCOfferAnswerOptions::kUndefined) {
+ session_options->recv_video =
+ session_options->HasSendMediaStream(cricket::MEDIA_TYPE_VIDEO);
+ }
+ session_options->bundle_enabled =
+ session_options->bundle_enabled &&
+ (session_options->has_audio() || session_options->has_video() ||
+ session_options->has_data());
+
+ if (session_->data_channel_type() == cricket::DCT_SCTP && data_channel_) {
+ session_options->data_channel_type = cricket::DCT_SCTP;
+ }
+ }
+
+ void GetOptionsForAnswer(const webrtc::MediaConstraintsInterface* constraints,
+ cricket::MediaSessionOptions* session_options) {
+ session_options->recv_audio = false;
+ session_options->recv_video = false;
+ ASSERT_TRUE(ParseConstraintsForAnswer(constraints, session_options));
+
+ AddStreamsToOptions(session_options);
+ session_options->bundle_enabled =
+ session_options->bundle_enabled &&
+ (session_options->has_audio() || session_options->has_video() ||
+ session_options->has_data());
+
+ if (session_->data_channel_type() == cricket::DCT_SCTP) {
+ session_options->data_channel_type = cricket::DCT_SCTP;
+ }
+ }
+
+ // Creates a local offer and applies it. Starts ICE.
+ // Call SendAudioVideoStreamX() before this function
+ // to decide which streams to create.
+ void InitiateCall() {
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+ EXPECT_TRUE_WAIT(PeerConnectionInterface::kIceGatheringNew !=
+ observer_.ice_gathering_state_,
+ kIceCandidatesTimeout);
+ }
+
+ SessionDescriptionInterface* CreateOffer() {
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+
+ return CreateOffer(options);
+ }
+
+ SessionDescriptionInterface* CreateOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest>
+ observer = new WebRtcSessionCreateSDPObserverForTest();
+ cricket::MediaSessionOptions session_options;
+ GetOptionsForOffer(options, &session_options);
+ session_->CreateOffer(observer, options, session_options);
+ EXPECT_TRUE_WAIT(
+ observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit,
+ 2000);
+ return observer->ReleaseDescription();
+ }
+
+ SessionDescriptionInterface* CreateAnswer(
+ const webrtc::MediaConstraintsInterface* constraints) {
+ rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest> observer
+ = new WebRtcSessionCreateSDPObserverForTest();
+ cricket::MediaSessionOptions session_options;
+ GetOptionsForAnswer(constraints, &session_options);
+ session_->CreateAnswer(observer, constraints, session_options);
+ EXPECT_TRUE_WAIT(
+ observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit,
+ 2000);
+ return observer->ReleaseDescription();
+ }
+
+ bool ChannelsExist() const {
+ return (session_->voice_channel() != NULL &&
+ session_->video_channel() != NULL);
+ }
+
+ void VerifyCryptoParams(const cricket::SessionDescription* sdp) {
+ ASSERT_TRUE(session_.get() != NULL);
+ const cricket::ContentInfo* content = cricket::GetFirstAudioContent(sdp);
+ ASSERT_TRUE(content != NULL);
+ const cricket::AudioContentDescription* audio_content =
+ static_cast<const cricket::AudioContentDescription*>(
+ content->description);
+ ASSERT_TRUE(audio_content != NULL);
+ ASSERT_EQ(1U, audio_content->cryptos().size());
+ ASSERT_EQ(47U, audio_content->cryptos()[0].key_params.size());
+ ASSERT_EQ("AES_CM_128_HMAC_SHA1_80",
+ audio_content->cryptos()[0].cipher_suite);
+ EXPECT_EQ(std::string(cricket::kMediaProtocolSavpf),
+ audio_content->protocol());
+
+ content = cricket::GetFirstVideoContent(sdp);
+ ASSERT_TRUE(content != NULL);
+ const cricket::VideoContentDescription* video_content =
+ static_cast<const cricket::VideoContentDescription*>(
+ content->description);
+ ASSERT_TRUE(video_content != NULL);
+ ASSERT_EQ(1U, video_content->cryptos().size());
+ ASSERT_EQ("AES_CM_128_HMAC_SHA1_80",
+ video_content->cryptos()[0].cipher_suite);
+ ASSERT_EQ(47U, video_content->cryptos()[0].key_params.size());
+ EXPECT_EQ(std::string(cricket::kMediaProtocolSavpf),
+ video_content->protocol());
+ }
+
+ void VerifyNoCryptoParams(const cricket::SessionDescription* sdp, bool dtls) {
+ const cricket::ContentInfo* content = cricket::GetFirstAudioContent(sdp);
+ ASSERT_TRUE(content != NULL);
+ const cricket::AudioContentDescription* audio_content =
+ static_cast<const cricket::AudioContentDescription*>(
+ content->description);
+ ASSERT_TRUE(audio_content != NULL);
+ ASSERT_EQ(0U, audio_content->cryptos().size());
+
+ content = cricket::GetFirstVideoContent(sdp);
+ ASSERT_TRUE(content != NULL);
+ const cricket::VideoContentDescription* video_content =
+ static_cast<const cricket::VideoContentDescription*>(
+ content->description);
+ ASSERT_TRUE(video_content != NULL);
+ ASSERT_EQ(0U, video_content->cryptos().size());
+
+ if (dtls) {
+ EXPECT_EQ(std::string(cricket::kMediaProtocolDtlsSavpf),
+ audio_content->protocol());
+ EXPECT_EQ(std::string(cricket::kMediaProtocolDtlsSavpf),
+ video_content->protocol());
+ } else {
+ EXPECT_EQ(std::string(cricket::kMediaProtocolAvpf),
+ audio_content->protocol());
+ EXPECT_EQ(std::string(cricket::kMediaProtocolAvpf),
+ video_content->protocol());
+ }
+ }
+
+ // Set the internal fake description factories to do DTLS-SRTP.
+ void SetFactoryDtlsSrtp() {
+ desc_factory_->set_secure(cricket::SEC_DISABLED);
+ std::string identity_name = "WebRTC" +
+ rtc::ToString(rtc::CreateRandomId());
+ // Confirmed to work with KT_RSA and KT_ECDSA.
+ tdesc_factory_->set_certificate(rtc::RTCCertificate::Create(
+ rtc::scoped_ptr<rtc::SSLIdentity>(rtc::SSLIdentity::Generate(
+ identity_name, rtc::KT_DEFAULT)).Pass()));
+ tdesc_factory_->set_secure(cricket::SEC_REQUIRED);
+ }
+
+ void VerifyFingerprintStatus(const cricket::SessionDescription* sdp,
+ bool expected) {
+ const TransportInfo* audio = sdp->GetTransportInfoByName("audio");
+ ASSERT_TRUE(audio != NULL);
+ ASSERT_EQ(expected, audio->description.identity_fingerprint.get() != NULL);
+ const TransportInfo* video = sdp->GetTransportInfoByName("video");
+ ASSERT_TRUE(video != NULL);
+ ASSERT_EQ(expected, video->description.identity_fingerprint.get() != NULL);
+ }
+
+ void VerifyAnswerFromNonCryptoOffer() {
+ // Create an SDP without Crypto.
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ JsepSessionDescription* offer(
+ CreateRemoteOffer(options, cricket::SEC_DISABLED));
+ ASSERT_TRUE(offer != NULL);
+ VerifyNoCryptoParams(offer->description(), false);
+ SetRemoteDescriptionOfferExpectError(kSdpWithoutSdesCrypto,
+ offer);
+ const webrtc::SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ // Answer should be NULL as no crypto params in offer.
+ ASSERT_TRUE(answer == NULL);
+ }
+
+ void VerifyAnswerFromCryptoOffer() {
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ options.bundle_enabled = true;
+ scoped_ptr<JsepSessionDescription> offer(
+ CreateRemoteOffer(options, cricket::SEC_REQUIRED));
+ ASSERT_TRUE(offer.get() != NULL);
+ VerifyCryptoParams(offer->description());
+ SetRemoteDescriptionWithoutError(offer.release());
+ scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(NULL));
+ ASSERT_TRUE(answer.get() != NULL);
+ VerifyCryptoParams(answer->description());
+ }
+
+ void SetAndVerifyNumUnsignalledRecvStreams(
+ int value_set, int value_expected) {
+ constraints_.reset(new FakeConstraints());
+ constraints_->AddOptional(
+ webrtc::MediaConstraintsInterface::kNumUnsignalledRecvStreams,
+ value_set);
+ session_.reset();
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+
+ SetLocalDescriptionWithoutError(offer);
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+
+ ASSERT_TRUE(video_channel_ != NULL);
+ const cricket::VideoOptions& video_options = video_channel_->options();
+ EXPECT_EQ(value_expected,
+ video_options.unsignalled_recv_stream_limit.GetWithDefaultIfUnset(-1));
+ }
+
+ void CompareIceUfragAndPassword(const cricket::SessionDescription* desc1,
+ const cricket::SessionDescription* desc2,
+ bool expect_equal) {
+ if (desc1->contents().size() != desc2->contents().size()) {
+ EXPECT_FALSE(expect_equal);
+ return;
+ }
+
+ const cricket::ContentInfos& contents = desc1->contents();
+ cricket::ContentInfos::const_iterator it = contents.begin();
+
+ for (; it != contents.end(); ++it) {
+ const cricket::TransportDescription* transport_desc1 =
+ desc1->GetTransportDescriptionByName(it->name);
+ const cricket::TransportDescription* transport_desc2 =
+ desc2->GetTransportDescriptionByName(it->name);
+ if (!transport_desc1 || !transport_desc2) {
+ EXPECT_FALSE(expect_equal);
+ return;
+ }
+ if (transport_desc1->ice_pwd != transport_desc2->ice_pwd ||
+ transport_desc1->ice_ufrag != transport_desc2->ice_ufrag) {
+ EXPECT_FALSE(expect_equal);
+ return;
+ }
+ }
+ EXPECT_TRUE(expect_equal);
+ }
+
+ void RemoveIceUfragPwdLines(const SessionDescriptionInterface* current_desc,
+ std::string *sdp) {
+ const cricket::SessionDescription* desc = current_desc->description();
+ EXPECT_TRUE(current_desc->ToString(sdp));
+
+ const cricket::ContentInfos& contents = desc->contents();
+ cricket::ContentInfos::const_iterator it = contents.begin();
+ // Replace ufrag and pwd lines with empty strings.
+ for (; it != contents.end(); ++it) {
+ const cricket::TransportDescription* transport_desc =
+ desc->GetTransportDescriptionByName(it->name);
+ std::string ufrag_line = "a=ice-ufrag:" + transport_desc->ice_ufrag
+ + "\r\n";
+ std::string pwd_line = "a=ice-pwd:" + transport_desc->ice_pwd
+ + "\r\n";
+ rtc::replace_substrs(ufrag_line.c_str(), ufrag_line.length(),
+ "", 0,
+ sdp);
+ rtc::replace_substrs(pwd_line.c_str(), pwd_line.length(),
+ "", 0,
+ sdp);
+ }
+ }
+
+ void ModifyIceUfragPwdLines(const SessionDescriptionInterface* current_desc,
+ const std::string& modified_ice_ufrag,
+ const std::string& modified_ice_pwd,
+ std::string* sdp) {
+ const cricket::SessionDescription* desc = current_desc->description();
+ EXPECT_TRUE(current_desc->ToString(sdp));
+
+ const cricket::ContentInfos& contents = desc->contents();
+ cricket::ContentInfos::const_iterator it = contents.begin();
+ // Replace ufrag and pwd lines with |modified_ice_ufrag| and
+ // |modified_ice_pwd| strings.
+ for (; it != contents.end(); ++it) {
+ const cricket::TransportDescription* transport_desc =
+ desc->GetTransportDescriptionByName(it->name);
+ std::string ufrag_line = "a=ice-ufrag:" + transport_desc->ice_ufrag
+ + "\r\n";
+ std::string pwd_line = "a=ice-pwd:" + transport_desc->ice_pwd
+ + "\r\n";
+ std::string mod_ufrag = "a=ice-ufrag:" + modified_ice_ufrag + "\r\n";
+ std::string mod_pwd = "a=ice-pwd:" + modified_ice_pwd + "\r\n";
+ rtc::replace_substrs(ufrag_line.c_str(), ufrag_line.length(),
+ mod_ufrag.c_str(), mod_ufrag.length(),
+ sdp);
+ rtc::replace_substrs(pwd_line.c_str(), pwd_line.length(),
+ mod_pwd.c_str(), mod_pwd.length(),
+ sdp);
+ }
+ }
+
+ // Creates a remote offer and and applies it as a remote description,
+ // creates a local answer and applies is as a local description.
+ // Call SendAudioVideoStreamX() before this function
+ // to decide which local and remote streams to create.
+ void CreateAndSetRemoteOfferAndLocalAnswer() {
+ SessionDescriptionInterface* offer = CreateRemoteOffer();
+ SetRemoteDescriptionWithoutError(offer);
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ SetLocalDescriptionWithoutError(answer);
+ }
+ void SetLocalDescriptionWithoutError(SessionDescriptionInterface* desc) {
+ EXPECT_TRUE(session_->SetLocalDescription(desc, NULL));
+ session_->MaybeStartGathering();
+ }
+ void SetLocalDescriptionExpectState(SessionDescriptionInterface* desc,
+ WebRtcSession::State expected_state) {
+ SetLocalDescriptionWithoutError(desc);
+ EXPECT_EQ(expected_state, session_->state());
+ }
+ void SetLocalDescriptionExpectError(const std::string& action,
+ const std::string& expected_error,
+ SessionDescriptionInterface* desc) {
+ std::string error;
+ EXPECT_FALSE(session_->SetLocalDescription(desc, &error));
+ std::string sdp_type = "local ";
+ sdp_type.append(action);
+ EXPECT_NE(std::string::npos, error.find(sdp_type));
+ EXPECT_NE(std::string::npos, error.find(expected_error));
+ }
+ void SetLocalDescriptionOfferExpectError(const std::string& expected_error,
+ SessionDescriptionInterface* desc) {
+ SetLocalDescriptionExpectError(SessionDescriptionInterface::kOffer,
+ expected_error, desc);
+ }
+ void SetLocalDescriptionAnswerExpectError(const std::string& expected_error,
+ SessionDescriptionInterface* desc) {
+ SetLocalDescriptionExpectError(SessionDescriptionInterface::kAnswer,
+ expected_error, desc);
+ }
+ void SetRemoteDescriptionWithoutError(SessionDescriptionInterface* desc) {
+ EXPECT_TRUE(session_->SetRemoteDescription(desc, NULL));
+ }
+ void SetRemoteDescriptionExpectState(SessionDescriptionInterface* desc,
+ WebRtcSession::State expected_state) {
+ SetRemoteDescriptionWithoutError(desc);
+ EXPECT_EQ(expected_state, session_->state());
+ }
+ void SetRemoteDescriptionExpectError(const std::string& action,
+ const std::string& expected_error,
+ SessionDescriptionInterface* desc) {
+ std::string error;
+ EXPECT_FALSE(session_->SetRemoteDescription(desc, &error));
+ std::string sdp_type = "remote ";
+ sdp_type.append(action);
+ EXPECT_NE(std::string::npos, error.find(sdp_type));
+ EXPECT_NE(std::string::npos, error.find(expected_error));
+ }
+ void SetRemoteDescriptionOfferExpectError(
+ const std::string& expected_error, SessionDescriptionInterface* desc) {
+ SetRemoteDescriptionExpectError(SessionDescriptionInterface::kOffer,
+ expected_error, desc);
+ }
+ void SetRemoteDescriptionPranswerExpectError(
+ const std::string& expected_error, SessionDescriptionInterface* desc) {
+ SetRemoteDescriptionExpectError(SessionDescriptionInterface::kPrAnswer,
+ expected_error, desc);
+ }
+ void SetRemoteDescriptionAnswerExpectError(
+ const std::string& expected_error, SessionDescriptionInterface* desc) {
+ SetRemoteDescriptionExpectError(SessionDescriptionInterface::kAnswer,
+ expected_error, desc);
+ }
+
+ void CreateCryptoOfferAndNonCryptoAnswer(SessionDescriptionInterface** offer,
+ SessionDescriptionInterface** nocrypto_answer) {
+ // Create a SDP without Crypto.
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ options.bundle_enabled = true;
+ *offer = CreateRemoteOffer(options, cricket::SEC_ENABLED);
+ ASSERT_TRUE(*offer != NULL);
+ VerifyCryptoParams((*offer)->description());
+
+ *nocrypto_answer = CreateRemoteAnswer(*offer, options,
+ cricket::SEC_DISABLED);
+ EXPECT_TRUE(*nocrypto_answer != NULL);
+ }
+
+ void CreateDtlsOfferAndNonDtlsAnswer(SessionDescriptionInterface** offer,
+ SessionDescriptionInterface** nodtls_answer) {
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ options.bundle_enabled = true;
+
+ rtc::scoped_ptr<SessionDescriptionInterface> temp_offer(
+ CreateRemoteOffer(options, cricket::SEC_ENABLED));
+
+ *nodtls_answer =
+ CreateRemoteAnswer(temp_offer.get(), options, cricket::SEC_ENABLED);
+ EXPECT_TRUE(*nodtls_answer != NULL);
+ VerifyFingerprintStatus((*nodtls_answer)->description(), false);
+ VerifyCryptoParams((*nodtls_answer)->description());
+
+ SetFactoryDtlsSrtp();
+ *offer = CreateRemoteOffer(options, cricket::SEC_ENABLED);
+ ASSERT_TRUE(*offer != NULL);
+ VerifyFingerprintStatus((*offer)->description(), true);
+ VerifyCryptoParams((*offer)->description());
+ }
+
+ JsepSessionDescription* CreateRemoteOfferWithVersion(
+ cricket::MediaSessionOptions options,
+ cricket::SecurePolicy secure_policy,
+ const std::string& session_version,
+ const SessionDescriptionInterface* current_desc) {
+ std::string session_id = rtc::ToString(rtc::CreateRandomId64());
+ const cricket::SessionDescription* cricket_desc = NULL;
+ if (current_desc) {
+ cricket_desc = current_desc->description();
+ session_id = current_desc->session_id();
+ }
+
+ desc_factory_->set_secure(secure_policy);
+ JsepSessionDescription* offer(
+ new JsepSessionDescription(JsepSessionDescription::kOffer));
+ if (!offer->Initialize(desc_factory_->CreateOffer(options, cricket_desc),
+ session_id, session_version)) {
+ delete offer;
+ offer = NULL;
+ }
+ return offer;
+ }
+ JsepSessionDescription* CreateRemoteOffer(
+ cricket::MediaSessionOptions options) {
+ return CreateRemoteOfferWithVersion(options, cricket::SEC_ENABLED,
+ kSessionVersion, NULL);
+ }
+ JsepSessionDescription* CreateRemoteOffer(
+ cricket::MediaSessionOptions options, cricket::SecurePolicy sdes_policy) {
+ return CreateRemoteOfferWithVersion(
+ options, sdes_policy, kSessionVersion, NULL);
+ }
+ JsepSessionDescription* CreateRemoteOffer(
+ cricket::MediaSessionOptions options,
+ const SessionDescriptionInterface* current_desc) {
+ return CreateRemoteOfferWithVersion(options, cricket::SEC_ENABLED,
+ kSessionVersion, current_desc);
+ }
+
+ JsepSessionDescription* CreateRemoteOfferWithSctpPort(
+ const char* sctp_stream_name, int new_port,
+ cricket::MediaSessionOptions options) {
+ options.data_channel_type = cricket::DCT_SCTP;
+ options.AddSendStream(cricket::MEDIA_TYPE_DATA, "datachannel",
+ sctp_stream_name);
+ return ChangeSDPSctpPort(new_port, CreateRemoteOffer(options));
+ }
+
+ // Takes ownership of offer_basis (and deletes it).
+ JsepSessionDescription* ChangeSDPSctpPort(
+ int new_port, webrtc::SessionDescriptionInterface *offer_basis) {
+ // Stringify the input SDP, swap the 5000 for 'new_port' and create a new
+ // SessionDescription from the mutated string.
+ const char* default_port_str = "5000";
+ char new_port_str[16];
+ rtc::sprintfn(new_port_str, sizeof(new_port_str), "%d", new_port);
+ std::string offer_str;
+ offer_basis->ToString(&offer_str);
+ rtc::replace_substrs(default_port_str, strlen(default_port_str),
+ new_port_str, strlen(new_port_str),
+ &offer_str);
+ JsepSessionDescription* offer = new JsepSessionDescription(
+ offer_basis->type());
+ delete offer_basis;
+ offer->Initialize(offer_str, NULL);
+ return offer;
+ }
+
+ // Create a remote offer. Call SendAudioVideoStreamX()
+ // before this function to decide which streams to create.
+ JsepSessionDescription* CreateRemoteOffer() {
+ cricket::MediaSessionOptions options;
+ GetOptionsForAnswer(NULL, &options);
+ return CreateRemoteOffer(options, session_->remote_description());
+ }
+
+ JsepSessionDescription* CreateRemoteAnswer(
+ const SessionDescriptionInterface* offer,
+ cricket::MediaSessionOptions options,
+ cricket::SecurePolicy policy) {
+ desc_factory_->set_secure(policy);
+ const std::string session_id =
+ rtc::ToString(rtc::CreateRandomId64());
+ JsepSessionDescription* answer(
+ new JsepSessionDescription(JsepSessionDescription::kAnswer));
+ if (!answer->Initialize(desc_factory_->CreateAnswer(offer->description(),
+ options, NULL),
+ session_id, kSessionVersion)) {
+ delete answer;
+ answer = NULL;
+ }
+ return answer;
+ }
+
+ JsepSessionDescription* CreateRemoteAnswer(
+ const SessionDescriptionInterface* offer,
+ cricket::MediaSessionOptions options) {
+ return CreateRemoteAnswer(offer, options, cricket::SEC_REQUIRED);
+ }
+
+ // Creates an answer session description.
+ // Call SendAudioVideoStreamX() before this function
+ // to decide which streams to create.
+ JsepSessionDescription* CreateRemoteAnswer(
+ const SessionDescriptionInterface* offer) {
+ cricket::MediaSessionOptions options;
+ GetOptionsForAnswer(NULL, &options);
+ return CreateRemoteAnswer(offer, options, cricket::SEC_REQUIRED);
+ }
+
+ void TestSessionCandidatesWithBundleRtcpMux(bool bundle, bool rtcp_mux) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ Init();
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = bundle;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ // SetLocalDescription and SetRemoteDescriptions takes ownership of offer
+ // and answer.
+ SetLocalDescriptionWithoutError(offer);
+
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateRemoteAnswer(session_->local_description()));
+ std::string sdp;
+ EXPECT_TRUE(answer->ToString(&sdp));
+
+ size_t expected_candidate_num = 2;
+ if (!rtcp_mux) {
+ // If rtcp_mux is enabled we should expect 4 candidates - host and srflex
+ // for rtp and rtcp.
+ expected_candidate_num = 4;
+ // Disable rtcp-mux from the answer
+ const std::string kRtcpMux = "a=rtcp-mux";
+ const std::string kXRtcpMux = "a=xrtcp-mux";
+ rtc::replace_substrs(kRtcpMux.c_str(), kRtcpMux.length(),
+ kXRtcpMux.c_str(), kXRtcpMux.length(),
+ &sdp);
+ }
+
+ SessionDescriptionInterface* new_answer = CreateSessionDescription(
+ JsepSessionDescription::kAnswer, sdp, NULL);
+
+ // SetRemoteDescription to enable rtcp mux.
+ SetRemoteDescriptionWithoutError(new_answer);
+ EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+ EXPECT_EQ(expected_candidate_num, observer_.mline_0_candidates_.size());
+ if (bundle) {
+ EXPECT_EQ(0, observer_.mline_1_candidates_.size());
+ } else {
+ EXPECT_EQ(expected_candidate_num, observer_.mline_1_candidates_.size());
+ }
+ }
+ // Tests that we can only send DTMF when the dtmf codec is supported.
+ void TestCanInsertDtmf(bool can) {
+ if (can) {
+ InitWithDtmfCodec();
+ } else {
+ Init();
+ }
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+ EXPECT_FALSE(session_->CanInsertDtmf(""));
+ EXPECT_EQ(can, session_->CanInsertDtmf(kAudioTrack1));
+ }
+
+ // Helper class to configure loopback network and verify Best
+ // Connection using right IP protocol for TestLoopbackCall
+ // method. LoopbackNetworkManager applies firewall rules to block
+ // all ping traffic once ICE completed, and remove them to observe
+ // ICE reconnected again. This LoopbackNetworkConfiguration struct
+ // verifies the best connection is using the right IP protocol after
+ // initial ICE convergences.
+
+ class LoopbackNetworkConfiguration {
+ public:
+ LoopbackNetworkConfiguration()
+ : test_ipv6_network_(false),
+ test_extra_ipv4_network_(false),
+ best_connection_after_initial_ice_converged_(1, 0) {}
+
+ // Used to track the expected best connection count in each IP protocol.
+ struct ExpectedBestConnection {
+ ExpectedBestConnection(int ipv4_count, int ipv6_count)
+ : ipv4_count_(ipv4_count),
+ ipv6_count_(ipv6_count) {}
+
+ int ipv4_count_;
+ int ipv6_count_;
+ };
+
+ bool test_ipv6_network_;
+ bool test_extra_ipv4_network_;
+ ExpectedBestConnection best_connection_after_initial_ice_converged_;
+
+ void VerifyBestConnectionAfterIceConverge(
+ const rtc::scoped_refptr<FakeMetricsObserver> metrics_observer) const {
+ Verify(metrics_observer, best_connection_after_initial_ice_converged_);
+ }
+
+ private:
+ void Verify(const rtc::scoped_refptr<FakeMetricsObserver> metrics_observer,
+ const ExpectedBestConnection& expected) const {
+ EXPECT_EQ(
+ metrics_observer->GetEnumCounter(webrtc::kEnumCounterAddressFamily,
+ webrtc::kBestConnections_IPv4),
+ expected.ipv4_count_);
+ EXPECT_EQ(
+ metrics_observer->GetEnumCounter(webrtc::kEnumCounterAddressFamily,
+ webrtc::kBestConnections_IPv6),
+ expected.ipv6_count_);
+ // This is used in the loopback call so there is only single host to host
+ // candidate pair.
+ EXPECT_EQ(metrics_observer->GetEnumCounter(
+ webrtc::kEnumCounterIceCandidatePairTypeUdp,
+ webrtc::kIceCandidatePairHostHost),
+ 0);
+ EXPECT_EQ(metrics_observer->GetEnumCounter(
+ webrtc::kEnumCounterIceCandidatePairTypeUdp,
+ webrtc::kIceCandidatePairHostPublicHostPublic),
+ 1);
+ }
+ };
+
+ class LoopbackNetworkManager {
+ public:
+ LoopbackNetworkManager(WebRtcSessionTest* session,
+ const LoopbackNetworkConfiguration& config)
+ : config_(config) {
+ session->AddInterface(
+ rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ if (config_.test_extra_ipv4_network_) {
+ session->AddInterface(
+ rtc::SocketAddress(kClientAddrHost2, kClientAddrPort));
+ }
+ if (config_.test_ipv6_network_) {
+ session->AddInterface(
+ rtc::SocketAddress(kClientIPv6AddrHost1, kClientAddrPort));
+ }
+ }
+
+ void ApplyFirewallRules(rtc::FirewallSocketServer* fss) {
+ fss->AddRule(false, rtc::FP_ANY, rtc::FD_ANY,
+ rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ if (config_.test_extra_ipv4_network_) {
+ fss->AddRule(false, rtc::FP_ANY, rtc::FD_ANY,
+ rtc::SocketAddress(kClientAddrHost2, kClientAddrPort));
+ }
+ if (config_.test_ipv6_network_) {
+ fss->AddRule(false, rtc::FP_ANY, rtc::FD_ANY,
+ rtc::SocketAddress(kClientIPv6AddrHost1, kClientAddrPort));
+ }
+ }
+
+ void ClearRules(rtc::FirewallSocketServer* fss) { fss->ClearRules(); }
+
+ private:
+ LoopbackNetworkConfiguration config_;
+ };
+
+ // The method sets up a call from the session to itself, in a loopback
+ // arrangement. It also uses a firewall rule to create a temporary
+ // disconnection, and then a permanent disconnection.
+ // This code is placed in a method so that it can be invoked
+ // by multiple tests with different allocators (e.g. with and without BUNDLE).
+ // While running the call, this method also checks if the session goes through
+ // the correct sequence of ICE states when a connection is established,
+ // broken, and re-established.
+ // The Connection state should go:
+ // New -> Checking -> (Connected) -> Completed -> Disconnected -> Completed
+ // -> Failed.
+ // The Gathering state should go: New -> Gathering -> Completed.
+
+ void SetupLoopbackCall() {
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+
+ EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew,
+ observer_.ice_gathering_state_);
+ SetLocalDescriptionWithoutError(offer);
+ EXPECT_EQ(PeerConnectionInterface::kIceConnectionNew,
+ observer_.ice_connection_state_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceGatheringGathering,
+ observer_.ice_gathering_state_, kIceCandidatesTimeout);
+ EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceGatheringComplete,
+ observer_.ice_gathering_state_, kIceCandidatesTimeout);
+
+ std::string sdp;
+ offer->ToString(&sdp);
+ SessionDescriptionInterface* desc = webrtc::CreateSessionDescription(
+ JsepSessionDescription::kAnswer, sdp, nullptr);
+ ASSERT_TRUE(desc != NULL);
+ SetRemoteDescriptionWithoutError(desc);
+
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionChecking,
+ observer_.ice_connection_state_, kIceCandidatesTimeout);
+
+ // The ice connection state is "Connected" too briefly to catch in a test.
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ observer_.ice_connection_state_, kIceCandidatesTimeout);
+ }
+
+ void TestLoopbackCall(const LoopbackNetworkConfiguration& config) {
+ LoopbackNetworkManager loopback_network_manager(this, config);
+ SetupLoopbackCall();
+ config.VerifyBestConnectionAfterIceConverge(metrics_observer_);
+ // Adding firewall rule to block ping requests, which should cause
+ // transport channel failure.
+
+ loopback_network_manager.ApplyFirewallRules(fss_.get());
+
+ LOG(LS_INFO) << "Firewall Rules applied";
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionDisconnected,
+ observer_.ice_connection_state_,
+ kIceCandidatesTimeout);
+
+ metrics_observer_->Reset();
+
+ // Clearing the rules, session should move back to completed state.
+ loopback_network_manager.ClearRules(fss_.get());
+
+ LOG(LS_INFO) << "Firewall Rules cleared";
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ observer_.ice_connection_state_,
+ kIceCandidatesTimeout);
+
+ // Now we block ping requests and wait until the ICE connection transitions
+ // to the Failed state. This will take at least 30 seconds because it must
+ // wait for the Port to timeout.
+ int port_timeout = 30000;
+
+ loopback_network_manager.ApplyFirewallRules(fss_.get());
+ LOG(LS_INFO) << "Firewall Rules applied again";
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionDisconnected,
+ observer_.ice_connection_state_,
+ kIceCandidatesTimeout + port_timeout);
+ }
+
+ void TestLoopbackCall() {
+ LoopbackNetworkConfiguration config;
+ TestLoopbackCall(config);
+ }
+
+ void TestPacketOptions() {
+ media_controller_.reset(
+ new cricket::FakeMediaController(channel_manager_.get(), &fake_call_));
+ LoopbackNetworkConfiguration config;
+ LoopbackNetworkManager loopback_network_manager(this, config);
+
+ SetupLoopbackCall();
+
+ uint8_t test_packet[15] = {0};
+ rtc::PacketOptions options;
+ options.packet_id = 10;
+ media_engine_->GetVideoChannel(0)
+ ->SendRtp(test_packet, sizeof(test_packet), options);
+
+ const int kPacketTimeout = 2000;
+ EXPECT_EQ_WAIT(fake_call_.last_sent_packet().packet_id, 10, kPacketTimeout);
+ EXPECT_GT(fake_call_.last_sent_packet().send_time_ms, -1);
+ }
+
+ // Adds CN codecs to FakeMediaEngine and MediaDescriptionFactory.
+ void AddCNCodecs() {
+ const cricket::AudioCodec kCNCodec1(102, "CN", 8000, 0, 1, 0);
+ const cricket::AudioCodec kCNCodec2(103, "CN", 16000, 0, 1, 0);
+
+ // Add kCNCodec for dtmf test.
+ std::vector<cricket::AudioCodec> codecs = media_engine_->audio_codecs();;
+ codecs.push_back(kCNCodec1);
+ codecs.push_back(kCNCodec2);
+ media_engine_->SetAudioCodecs(codecs);
+ desc_factory_->set_audio_codecs(codecs);
+ }
+
+ bool VerifyNoCNCodecs(const cricket::ContentInfo* content) {
+ const cricket::ContentDescription* description = content->description;
+ ASSERT(description != NULL);
+ const cricket::AudioContentDescription* audio_content_desc =
+ static_cast<const cricket::AudioContentDescription*>(description);
+ ASSERT(audio_content_desc != NULL);
+ for (size_t i = 0; i < audio_content_desc->codecs().size(); ++i) {
+ if (audio_content_desc->codecs()[i].name == "CN")
+ return false;
+ }
+ return true;
+ }
+
+ void CreateDataChannel() {
+ webrtc::InternalDataChannelInit dci;
+ dci.reliable = session_->data_channel_type() == cricket::DCT_SCTP;
+ data_channel_ = DataChannel::Create(
+ session_.get(), session_->data_channel_type(), "datachannel", dci);
+ }
+
+ void SetLocalDescriptionWithDataChannel() {
+ CreateDataChannel();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+ }
+
+ void VerifyMultipleAsyncCreateDescription(
+ RTCCertificateGenerationMethod cert_gen_method,
+ CreateSessionDescriptionRequest::Type type) {
+ InitWithDtls(cert_gen_method);
+ VerifyMultipleAsyncCreateDescriptionAfterInit(true, type);
+ }
+
+ void VerifyMultipleAsyncCreateDescriptionIdentityGenFailure(
+ CreateSessionDescriptionRequest::Type type) {
+ InitWithDtlsIdentityGenFail();
+ VerifyMultipleAsyncCreateDescriptionAfterInit(false, type);
+ }
+
+ void VerifyMultipleAsyncCreateDescriptionAfterInit(
+ bool success, CreateSessionDescriptionRequest::Type type) {
+ RTC_CHECK(session_);
+ SetFactoryDtlsSrtp();
+ if (type == CreateSessionDescriptionRequest::kAnswer) {
+ cricket::MediaSessionOptions options;
+ scoped_ptr<JsepSessionDescription> offer(
+ CreateRemoteOffer(options, cricket::SEC_DISABLED));
+ ASSERT_TRUE(offer.get() != NULL);
+ SetRemoteDescriptionWithoutError(offer.release());
+ }
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ cricket::MediaSessionOptions session_options;
+ const int kNumber = 3;
+ rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest>
+ observers[kNumber];
+ for (int i = 0; i < kNumber; ++i) {
+ observers[i] = new WebRtcSessionCreateSDPObserverForTest();
+ if (type == CreateSessionDescriptionRequest::kOffer) {
+ session_->CreateOffer(observers[i], options, session_options);
+ } else {
+ session_->CreateAnswer(observers[i], nullptr, session_options);
+ }
+ }
+
+ WebRtcSessionCreateSDPObserverForTest::State expected_state =
+ success ? WebRtcSessionCreateSDPObserverForTest::kSucceeded :
+ WebRtcSessionCreateSDPObserverForTest::kFailed;
+
+ for (int i = 0; i < kNumber; ++i) {
+ EXPECT_EQ_WAIT(expected_state, observers[i]->state(), 1000);
+ if (success) {
+ EXPECT_TRUE(observers[i]->description() != NULL);
+ } else {
+ EXPECT_TRUE(observers[i]->description() == NULL);
+ }
+ }
+ }
+
+ void ConfigureAllocatorWithTurn() {
+ cricket::RelayServerConfig relay_server(cricket::RELAY_TURN);
+ cricket::RelayCredentials credentials(kTurnUsername, kTurnPassword);
+ relay_server.credentials = credentials;
+ relay_server.ports.push_back(cricket::ProtocolAddress(
+ kTurnUdpIntAddr, cricket::PROTO_UDP, false));
+ allocator_->AddRelay(relay_server);
+ allocator_->set_step_delay(cricket::kMinimumStepDelay);
+ allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP);
+ }
+
+ cricket::FakeMediaEngine* media_engine_;
+ cricket::FakeDataEngine* data_engine_;
+ rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+ cricket::FakeCall fake_call_;
+ rtc::scoped_ptr<webrtc::MediaControllerInterface> media_controller_;
+ rtc::scoped_ptr<cricket::TransportDescriptionFactory> tdesc_factory_;
+ rtc::scoped_ptr<cricket::MediaSessionDescriptionFactory> desc_factory_;
+ rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
+ rtc::scoped_ptr<rtc::VirtualSocketServer> vss_;
+ rtc::scoped_ptr<rtc::FirewallSocketServer> fss_;
+ rtc::SocketServerScope ss_scope_;
+ rtc::SocketAddress stun_socket_addr_;
+ rtc::scoped_ptr<cricket::TestStunServer> stun_server_;
+ cricket::TestTurnServer turn_server_;
+ rtc::FakeNetworkManager network_manager_;
+ rtc::scoped_ptr<cricket::BasicPortAllocator> allocator_;
+ PeerConnectionFactoryInterface::Options options_;
+ rtc::scoped_ptr<FakeConstraints> constraints_;
+ rtc::scoped_ptr<WebRtcSessionForTest> session_;
+ MockIceObserver observer_;
+ cricket::FakeVideoMediaChannel* video_channel_;
+ cricket::FakeVoiceMediaChannel* voice_channel_;
+ rtc::scoped_refptr<FakeMetricsObserver> metrics_observer_;
+ // The following flags affect options created for CreateOffer/CreateAnswer.
+ bool send_stream_1_ = false;
+ bool send_stream_2_ = false;
+ bool send_audio_ = false;
+ bool send_video_ = false;
+ rtc::scoped_refptr<DataChannel> data_channel_;
+ // Last values received from data channel creation signal.
+ std::string last_data_channel_label_;
+ InternalDataChannelInit last_data_channel_config_;
+};
+
+TEST_P(WebRtcSessionTest, TestInitializeWithDtls) {
+ InitWithDtls(GetParam());
+ // SDES is disabled when DTLS is on.
+ EXPECT_EQ(cricket::SEC_DISABLED, session_->SdesPolicy());
+}
+
+TEST_F(WebRtcSessionTest, TestInitializeWithoutDtls) {
+ Init();
+ // SDES is required if DTLS is off.
+ EXPECT_EQ(cricket::SEC_REQUIRED, session_->SdesPolicy());
+}
+
+TEST_F(WebRtcSessionTest, TestSessionCandidates) {
+ TestSessionCandidatesWithBundleRtcpMux(false, false);
+}
+
+// Below test cases (TestSessionCandidatesWith*) verify the candidates gathered
+// with rtcp-mux and/or bundle.
+TEST_F(WebRtcSessionTest, TestSessionCandidatesWithRtcpMux) {
+ TestSessionCandidatesWithBundleRtcpMux(false, true);
+}
+
+TEST_F(WebRtcSessionTest, TestSessionCandidatesWithBundleRtcpMux) {
+ TestSessionCandidatesWithBundleRtcpMux(true, true);
+}
+
+TEST_F(WebRtcSessionTest, TestMultihomeCandidates) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ AddInterface(rtc::SocketAddress(kClientAddrHost2, kClientAddrPort));
+ Init();
+ SendAudioVideoStream1();
+ InitiateCall();
+ EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+ EXPECT_EQ(8u, observer_.mline_0_candidates_.size());
+ EXPECT_EQ(8u, observer_.mline_1_candidates_.size());
+}
+
+TEST_F(WebRtcSessionTest, TestStunError) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ AddInterface(rtc::SocketAddress(kClientAddrHost2, kClientAddrPort));
+ fss_->AddRule(false,
+ rtc::FP_UDP,
+ rtc::FD_ANY,
+ rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ Init();
+ SendAudioVideoStream1();
+ InitiateCall();
+ // Since kClientAddrHost1 is blocked, not expecting stun candidates for it.
+ EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+ EXPECT_EQ(6u, observer_.mline_0_candidates_.size());
+ EXPECT_EQ(6u, observer_.mline_1_candidates_.size());
+}
+
+// Test session delivers no candidates gathered when constraint set to "none".
+TEST_F(WebRtcSessionTest, TestIceTransportsNone) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ InitWithIceTransport(PeerConnectionInterface::kNone);
+ SendAudioVideoStream1();
+ InitiateCall();
+ EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+ EXPECT_EQ(0u, observer_.mline_0_candidates_.size());
+ EXPECT_EQ(0u, observer_.mline_1_candidates_.size());
+}
+
+// Test session delivers only relay candidates gathered when constaint set to
+// "relay".
+TEST_F(WebRtcSessionTest, TestIceTransportsRelay) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ ConfigureAllocatorWithTurn();
+ InitWithIceTransport(PeerConnectionInterface::kRelay);
+ SendAudioVideoStream1();
+ InitiateCall();
+ EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+ EXPECT_EQ(2u, observer_.mline_0_candidates_.size());
+ EXPECT_EQ(2u, observer_.mline_1_candidates_.size());
+ for (size_t i = 0; i < observer_.mline_0_candidates_.size(); ++i) {
+ EXPECT_EQ(cricket::RELAY_PORT_TYPE,
+ observer_.mline_0_candidates_[i].type());
+ }
+ for (size_t i = 0; i < observer_.mline_1_candidates_.size(); ++i) {
+ EXPECT_EQ(cricket::RELAY_PORT_TYPE,
+ observer_.mline_1_candidates_[i].type());
+ }
+}
+
+// Test session delivers all candidates gathered when constaint set to "all".
+TEST_F(WebRtcSessionTest, TestIceTransportsAll) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ InitWithIceTransport(PeerConnectionInterface::kAll);
+ SendAudioVideoStream1();
+ InitiateCall();
+ EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+ // Host + STUN. By default allocator is disabled to gather relay candidates.
+ EXPECT_EQ(4u, observer_.mline_0_candidates_.size());
+ EXPECT_EQ(4u, observer_.mline_1_candidates_.size());
+}
+
+TEST_F(WebRtcSessionTest, SetSdpFailedOnInvalidSdp) {
+ Init();
+ SessionDescriptionInterface* offer = NULL;
+ // Since |offer| is NULL, there's no way to tell if it's an offer or answer.
+ std::string unknown_action;
+ SetLocalDescriptionExpectError(unknown_action, kInvalidSdp, offer);
+ SetRemoteDescriptionExpectError(unknown_action, kInvalidSdp, offer);
+}
+
+// Test creating offers and receive answers and make sure the
+// media engine creates the expected send and receive streams.
+TEST_F(WebRtcSessionTest, TestCreateSdesOfferReceiveSdesAnswer) {
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+ const std::string session_id_orig = offer->session_id();
+ const std::string session_version_orig = offer->session_version();
+ SetLocalDescriptionWithoutError(offer);
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+ voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+ ASSERT_EQ(1u, video_channel_->recv_streams().size());
+ EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[0].id);
+
+ ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+ EXPECT_TRUE(kAudioTrack2 == voice_channel_->recv_streams()[0].id);
+
+ ASSERT_EQ(1u, video_channel_->send_streams().size());
+ EXPECT_TRUE(kVideoTrack1 == video_channel_->send_streams()[0].id);
+ ASSERT_EQ(1u, voice_channel_->send_streams().size());
+ EXPECT_TRUE(kAudioTrack1 == voice_channel_->send_streams()[0].id);
+
+ // Create new offer without send streams.
+ SendNothing();
+ offer = CreateOffer();
+
+ // Verify the session id is the same and the session version is
+ // increased.
+ EXPECT_EQ(session_id_orig, offer->session_id());
+ EXPECT_LT(rtc::FromString<uint64_t>(session_version_orig),
+ rtc::FromString<uint64_t>(offer->session_version()));
+
+ SetLocalDescriptionWithoutError(offer);
+ EXPECT_EQ(0u, video_channel_->send_streams().size());
+ EXPECT_EQ(0u, voice_channel_->send_streams().size());
+
+ SendAudioVideoStream2();
+ answer = CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ // Make sure the receive streams have not changed.
+ ASSERT_EQ(1u, video_channel_->recv_streams().size());
+ EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[0].id);
+ ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+ EXPECT_TRUE(kAudioTrack2 == voice_channel_->recv_streams()[0].id);
+}
+
+// Test receiving offers and creating answers and make sure the
+// media engine creates the expected send and receive streams.
+TEST_F(WebRtcSessionTest, TestReceiveSdesOfferCreateSdesAnswer) {
+ Init();
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* offer = CreateOffer();
+ VerifyCryptoParams(offer->description());
+ SetRemoteDescriptionWithoutError(offer);
+
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ VerifyCryptoParams(answer->description());
+ SetLocalDescriptionWithoutError(answer);
+
+ const std::string session_id_orig = answer->session_id();
+ const std::string session_version_orig = answer->session_version();
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+ voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+ ASSERT_EQ(1u, video_channel_->recv_streams().size());
+ EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[0].id);
+
+ ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+ EXPECT_TRUE(kAudioTrack2 == voice_channel_->recv_streams()[0].id);
+
+ ASSERT_EQ(1u, video_channel_->send_streams().size());
+ EXPECT_TRUE(kVideoTrack1 == video_channel_->send_streams()[0].id);
+ ASSERT_EQ(1u, voice_channel_->send_streams().size());
+ EXPECT_TRUE(kAudioTrack1 == voice_channel_->send_streams()[0].id);
+
+ SendAudioVideoStream1And2();
+ offer = CreateOffer();
+ SetRemoteDescriptionWithoutError(offer);
+
+ // Answer by turning off all send streams.
+ SendNothing();
+ answer = CreateAnswer(NULL);
+
+ // Verify the session id is the same and the session version is
+ // increased.
+ EXPECT_EQ(session_id_orig, answer->session_id());
+ EXPECT_LT(rtc::FromString<uint64_t>(session_version_orig),
+ rtc::FromString<uint64_t>(answer->session_version()));
+ SetLocalDescriptionWithoutError(answer);
+
+ ASSERT_EQ(2u, video_channel_->recv_streams().size());
+ EXPECT_TRUE(kVideoTrack1 == video_channel_->recv_streams()[0].id);
+ EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[1].id);
+ ASSERT_EQ(2u, voice_channel_->recv_streams().size());
+ EXPECT_TRUE(kAudioTrack1 == voice_channel_->recv_streams()[0].id);
+ EXPECT_TRUE(kAudioTrack2 == voice_channel_->recv_streams()[1].id);
+
+ // Make sure we have no send streams.
+ EXPECT_EQ(0u, video_channel_->send_streams().size());
+ EXPECT_EQ(0u, voice_channel_->send_streams().size());
+}
+
+TEST_F(WebRtcSessionTest, SetLocalSdpFailedOnCreateChannel) {
+ Init();
+ media_engine_->set_fail_create_channel(true);
+
+ SessionDescriptionInterface* offer = CreateOffer();
+ ASSERT_TRUE(offer != NULL);
+ // SetRemoteDescription and SetLocalDescription will take the ownership of
+ // the offer.
+ SetRemoteDescriptionOfferExpectError(kCreateChannelFailed, offer);
+ offer = CreateOffer();
+ ASSERT_TRUE(offer != NULL);
+ SetLocalDescriptionOfferExpectError(kCreateChannelFailed, offer);
+}
+
+//
+// Tests for creating/setting SDP under different SDES/DTLS polices:
+//
+// --DTLS off and SDES on
+// TestCreateSdesOfferReceiveSdesAnswer/TestReceiveSdesOfferCreateSdesAnswer:
+// set local/remote offer/answer with crypto --> success
+// TestSetNonSdesOfferWhenSdesOn: set local/remote offer without crypto --->
+// failure
+// TestSetLocalNonSdesAnswerWhenSdesOn: set local answer without crypto -->
+// failure
+// TestSetRemoteNonSdesAnswerWhenSdesOn: set remote answer without crypto -->
+// failure
+//
+// --DTLS on and SDES off
+// TestCreateDtlsOfferReceiveDtlsAnswer/TestReceiveDtlsOfferCreateDtlsAnswer:
+// set local/remote offer/answer with DTLS fingerprint --> success
+// TestReceiveNonDtlsOfferWhenDtlsOn: set local/remote offer without DTLS
+// fingerprint --> failure
+// TestSetLocalNonDtlsAnswerWhenDtlsOn: set local answer without fingerprint
+// --> failure
+// TestSetRemoteNonDtlsAnswerWhenDtlsOn: set remote answer without fingerprint
+// --> failure
+//
+// --Encryption disabled: DTLS off and SDES off
+// TestCreateOfferReceiveAnswerWithoutEncryption: set local offer and remote
+// answer without SDES or DTLS --> success
+// TestCreateAnswerReceiveOfferWithoutEncryption: set remote offer and local
+// answer without SDES or DTLS --> success
+//
+
+// Test that we return a failure when applying a remote/local offer that doesn't
+// have cryptos enabled when DTLS is off.
+TEST_F(WebRtcSessionTest, TestSetNonSdesOfferWhenSdesOn) {
+ Init();
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ JsepSessionDescription* offer = CreateRemoteOffer(
+ options, cricket::SEC_DISABLED);
+ ASSERT_TRUE(offer != NULL);
+ VerifyNoCryptoParams(offer->description(), false);
+ // SetRemoteDescription and SetLocalDescription will take the ownership of
+ // the offer.
+ SetRemoteDescriptionOfferExpectError(kSdpWithoutSdesCrypto, offer);
+ offer = CreateRemoteOffer(options, cricket::SEC_DISABLED);
+ ASSERT_TRUE(offer != NULL);
+ SetLocalDescriptionOfferExpectError(kSdpWithoutSdesCrypto, offer);
+}
+
+// Test that we return a failure when applying a local answer that doesn't have
+// cryptos enabled when DTLS is off.
+TEST_F(WebRtcSessionTest, TestSetLocalNonSdesAnswerWhenSdesOn) {
+ Init();
+ SessionDescriptionInterface* offer = NULL;
+ SessionDescriptionInterface* answer = NULL;
+ CreateCryptoOfferAndNonCryptoAnswer(&offer, &answer);
+ // SetRemoteDescription and SetLocalDescription will take the ownership of
+ // the offer.
+ SetRemoteDescriptionWithoutError(offer);
+ SetLocalDescriptionAnswerExpectError(kSdpWithoutSdesCrypto, answer);
+}
+
+// Test we will return fail when apply an remote answer that doesn't have
+// crypto enabled when DTLS is off.
+TEST_F(WebRtcSessionTest, TestSetRemoteNonSdesAnswerWhenSdesOn) {
+ Init();
+ SessionDescriptionInterface* offer = NULL;
+ SessionDescriptionInterface* answer = NULL;
+ CreateCryptoOfferAndNonCryptoAnswer(&offer, &answer);
+ // SetRemoteDescription and SetLocalDescription will take the ownership of
+ // the offer.
+ SetLocalDescriptionWithoutError(offer);
+ SetRemoteDescriptionAnswerExpectError(kSdpWithoutSdesCrypto, answer);
+}
+
+// Test that we accept an offer with a DTLS fingerprint when DTLS is on
+// and that we return an answer with a DTLS fingerprint.
+TEST_P(WebRtcSessionTest, TestReceiveDtlsOfferCreateDtlsAnswer) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ SendAudioVideoStream1();
+ InitWithDtls(GetParam());
+ SetFactoryDtlsSrtp();
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ JsepSessionDescription* offer =
+ CreateRemoteOffer(options, cricket::SEC_DISABLED);
+ ASSERT_TRUE(offer != NULL);
+ VerifyFingerprintStatus(offer->description(), true);
+ VerifyNoCryptoParams(offer->description(), true);
+
+ // SetRemoteDescription will take the ownership of the offer.
+ SetRemoteDescriptionWithoutError(offer);
+
+ // Verify that we get a crypto fingerprint in the answer.
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ ASSERT_TRUE(answer != NULL);
+ VerifyFingerprintStatus(answer->description(), true);
+ // Check that we don't have an a=crypto line in the answer.
+ VerifyNoCryptoParams(answer->description(), true);
+
+ // Now set the local description, which should work, even without a=crypto.
+ SetLocalDescriptionWithoutError(answer);
+}
+
+// Test that we set a local offer with a DTLS fingerprint when DTLS is on
+// and then we accept a remote answer with a DTLS fingerprint successfully.
+TEST_P(WebRtcSessionTest, TestCreateDtlsOfferReceiveDtlsAnswer) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ SendAudioVideoStream1();
+ InitWithDtls(GetParam());
+ SetFactoryDtlsSrtp();
+
+ // Verify that we get a crypto fingerprint in the answer.
+ SessionDescriptionInterface* offer = CreateOffer();
+ ASSERT_TRUE(offer != NULL);
+ VerifyFingerprintStatus(offer->description(), true);
+ // Check that we don't have an a=crypto line in the offer.
+ VerifyNoCryptoParams(offer->description(), true);
+
+ // Now set the local description, which should work, even without a=crypto.
+ SetLocalDescriptionWithoutError(offer);
+
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ JsepSessionDescription* answer =
+ CreateRemoteAnswer(offer, options, cricket::SEC_DISABLED);
+ ASSERT_TRUE(answer != NULL);
+ VerifyFingerprintStatus(answer->description(), true);
+ VerifyNoCryptoParams(answer->description(), true);
+
+ // SetRemoteDescription will take the ownership of the answer.
+ SetRemoteDescriptionWithoutError(answer);
+}
+
+// Test that if we support DTLS and the other side didn't offer a fingerprint,
+// we will fail to set the remote description.
+TEST_P(WebRtcSessionTest, TestReceiveNonDtlsOfferWhenDtlsOn) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ InitWithDtls(GetParam());
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ options.bundle_enabled = true;
+ JsepSessionDescription* offer = CreateRemoteOffer(
+ options, cricket::SEC_REQUIRED);
+ ASSERT_TRUE(offer != NULL);
+ VerifyFingerprintStatus(offer->description(), false);
+ VerifyCryptoParams(offer->description());
+
+ // SetRemoteDescription will take the ownership of the offer.
+ SetRemoteDescriptionOfferExpectError(
+ kSdpWithoutDtlsFingerprint, offer);
+
+ offer = CreateRemoteOffer(options, cricket::SEC_REQUIRED);
+ // SetLocalDescription will take the ownership of the offer.
+ SetLocalDescriptionOfferExpectError(
+ kSdpWithoutDtlsFingerprint, offer);
+}
+
+// Test that we return a failure when applying a local answer that doesn't have
+// a DTLS fingerprint when DTLS is required.
+TEST_P(WebRtcSessionTest, TestSetLocalNonDtlsAnswerWhenDtlsOn) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ InitWithDtls(GetParam());
+ SessionDescriptionInterface* offer = NULL;
+ SessionDescriptionInterface* answer = NULL;
+ CreateDtlsOfferAndNonDtlsAnswer(&offer, &answer);
+
+ // SetRemoteDescription and SetLocalDescription will take the ownership of
+ // the offer and answer.
+ SetRemoteDescriptionWithoutError(offer);
+ SetLocalDescriptionAnswerExpectError(
+ kSdpWithoutDtlsFingerprint, answer);
+}
+
+// Test that we return a failure when applying a remote answer that doesn't have
+// a DTLS fingerprint when DTLS is required.
+TEST_P(WebRtcSessionTest, TestSetRemoteNonDtlsAnswerWhenDtlsOn) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ InitWithDtls(GetParam());
+ SessionDescriptionInterface* offer = CreateOffer();
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ rtc::scoped_ptr<SessionDescriptionInterface> temp_offer(
+ CreateRemoteOffer(options, cricket::SEC_ENABLED));
+ JsepSessionDescription* answer =
+ CreateRemoteAnswer(temp_offer.get(), options, cricket::SEC_ENABLED);
+
+ // SetRemoteDescription and SetLocalDescription will take the ownership of
+ // the offer and answer.
+ SetLocalDescriptionWithoutError(offer);
+ SetRemoteDescriptionAnswerExpectError(
+ kSdpWithoutDtlsFingerprint, answer);
+}
+
+// Test that we create a local offer without SDES or DTLS and accept a remote
+// answer without SDES or DTLS when encryption is disabled.
+TEST_P(WebRtcSessionTest, TestCreateOfferReceiveAnswerWithoutEncryption) {
+ SendAudioVideoStream1();
+ options_.disable_encryption = true;
+ InitWithDtls(GetParam());
+
+ // Verify that we get a crypto fingerprint in the answer.
+ SessionDescriptionInterface* offer = CreateOffer();
+ ASSERT_TRUE(offer != NULL);
+ VerifyFingerprintStatus(offer->description(), false);
+ // Check that we don't have an a=crypto line in the offer.
+ VerifyNoCryptoParams(offer->description(), false);
+
+ // Now set the local description, which should work, even without a=crypto.
+ SetLocalDescriptionWithoutError(offer);
+
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ JsepSessionDescription* answer =
+ CreateRemoteAnswer(offer, options, cricket::SEC_DISABLED);
+ ASSERT_TRUE(answer != NULL);
+ VerifyFingerprintStatus(answer->description(), false);
+ VerifyNoCryptoParams(answer->description(), false);
+
+ // SetRemoteDescription will take the ownership of the answer.
+ SetRemoteDescriptionWithoutError(answer);
+}
+
+// Test that we create a local answer without SDES or DTLS and accept a remote
+// offer without SDES or DTLS when encryption is disabled.
+TEST_P(WebRtcSessionTest, TestCreateAnswerReceiveOfferWithoutEncryption) {
+ options_.disable_encryption = true;
+ InitWithDtls(GetParam());
+
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ JsepSessionDescription* offer =
+ CreateRemoteOffer(options, cricket::SEC_DISABLED);
+ ASSERT_TRUE(offer != NULL);
+ VerifyFingerprintStatus(offer->description(), false);
+ VerifyNoCryptoParams(offer->description(), false);
+
+ // SetRemoteDescription will take the ownership of the offer.
+ SetRemoteDescriptionWithoutError(offer);
+
+ // Verify that we get a crypto fingerprint in the answer.
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ ASSERT_TRUE(answer != NULL);
+ VerifyFingerprintStatus(answer->description(), false);
+ // Check that we don't have an a=crypto line in the answer.
+ VerifyNoCryptoParams(answer->description(), false);
+
+ // Now set the local description, which should work, even without a=crypto.
+ SetLocalDescriptionWithoutError(answer);
+}
+
+TEST_F(WebRtcSessionTest, TestSetLocalOfferTwice) {
+ Init();
+ SendNothing();
+ // SetLocalDescription take ownership of offer.
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+
+ // SetLocalDescription take ownership of offer.
+ SessionDescriptionInterface* offer2 = CreateOffer();
+ SetLocalDescriptionWithoutError(offer2);
+}
+
+TEST_F(WebRtcSessionTest, TestSetRemoteOfferTwice) {
+ Init();
+ SendNothing();
+ // SetLocalDescription take ownership of offer.
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetRemoteDescriptionWithoutError(offer);
+
+ SessionDescriptionInterface* offer2 = CreateOffer();
+ SetRemoteDescriptionWithoutError(offer2);
+}
+
+TEST_F(WebRtcSessionTest, TestSetLocalAndRemoteOffer) {
+ Init();
+ SendNothing();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+ offer = CreateOffer();
+ SetRemoteDescriptionOfferExpectError("Called in wrong state: STATE_SENTOFFER",
+ offer);
+}
+
+TEST_F(WebRtcSessionTest, TestSetRemoteAndLocalOffer) {
+ Init();
+ SendNothing();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetRemoteDescriptionWithoutError(offer);
+ offer = CreateOffer();
+ SetLocalDescriptionOfferExpectError(
+ "Called in wrong state: STATE_RECEIVEDOFFER", offer);
+}
+
+TEST_F(WebRtcSessionTest, TestSetLocalPrAnswer) {
+ Init();
+ SendNothing();
+ SessionDescriptionInterface* offer = CreateRemoteOffer();
+ SetRemoteDescriptionExpectState(offer, WebRtcSession::STATE_RECEIVEDOFFER);
+
+ JsepSessionDescription* pranswer = static_cast<JsepSessionDescription*>(
+ CreateAnswer(NULL));
+ pranswer->set_type(SessionDescriptionInterface::kPrAnswer);
+ SetLocalDescriptionExpectState(pranswer, WebRtcSession::STATE_SENTPRANSWER);
+
+ SendAudioVideoStream1();
+ JsepSessionDescription* pranswer2 = static_cast<JsepSessionDescription*>(
+ CreateAnswer(NULL));
+ pranswer2->set_type(SessionDescriptionInterface::kPrAnswer);
+
+ SetLocalDescriptionExpectState(pranswer2, WebRtcSession::STATE_SENTPRANSWER);
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ SetLocalDescriptionExpectState(answer, WebRtcSession::STATE_INPROGRESS);
+}
+
+TEST_F(WebRtcSessionTest, TestSetRemotePrAnswer) {
+ Init();
+ SendNothing();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionExpectState(offer, WebRtcSession::STATE_SENTOFFER);
+
+ JsepSessionDescription* pranswer =
+ CreateRemoteAnswer(session_->local_description());
+ pranswer->set_type(SessionDescriptionInterface::kPrAnswer);
+
+ SetRemoteDescriptionExpectState(pranswer,
+ WebRtcSession::STATE_RECEIVEDPRANSWER);
+
+ SendAudioVideoStream1();
+ JsepSessionDescription* pranswer2 =
+ CreateRemoteAnswer(session_->local_description());
+ pranswer2->set_type(SessionDescriptionInterface::kPrAnswer);
+
+ SetRemoteDescriptionExpectState(pranswer2,
+ WebRtcSession::STATE_RECEIVEDPRANSWER);
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionExpectState(answer, WebRtcSession::STATE_INPROGRESS);
+}
+
+TEST_F(WebRtcSessionTest, TestSetLocalAnswerWithoutOffer) {
+ Init();
+ SendNothing();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(offer.get());
+ SetLocalDescriptionAnswerExpectError("Called in wrong state: STATE_INIT",
+ answer);
+}
+
+TEST_F(WebRtcSessionTest, TestSetRemoteAnswerWithoutOffer) {
+ Init();
+ SendNothing();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(offer.get());
+ SetRemoteDescriptionAnswerExpectError(
+ "Called in wrong state: STATE_INIT", answer);
+}
+
+TEST_F(WebRtcSessionTest, TestAddRemoteCandidate) {
+ Init();
+ SendAudioVideoStream1();
+
+ cricket::Candidate candidate;
+ candidate.set_component(1);
+ JsepIceCandidate ice_candidate1(kMediaContentName0, 0, candidate);
+
+ // Fail since we have not set a remote description.
+ EXPECT_FALSE(session_->ProcessIceMessage(&ice_candidate1));
+
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+
+ // Fail since we have not set a remote description.
+ EXPECT_FALSE(session_->ProcessIceMessage(&ice_candidate1));
+
+ SessionDescriptionInterface* answer = CreateRemoteAnswer(
+ session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate1));
+ candidate.set_component(2);
+ JsepIceCandidate ice_candidate2(kMediaContentName0, 0, candidate);
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate2));
+
+ // Verifying the candidates are copied properly from internal vector.
+ const SessionDescriptionInterface* remote_desc =
+ session_->remote_description();
+ ASSERT_TRUE(remote_desc != NULL);
+ ASSERT_EQ(2u, remote_desc->number_of_mediasections());
+ const IceCandidateCollection* candidates =
+ remote_desc->candidates(kMediaContentIndex0);
+ ASSERT_EQ(2u, candidates->count());
+ EXPECT_EQ(kMediaContentIndex0, candidates->at(0)->sdp_mline_index());
+ EXPECT_EQ(kMediaContentName0, candidates->at(0)->sdp_mid());
+ EXPECT_EQ(1, candidates->at(0)->candidate().component());
+ EXPECT_EQ(2, candidates->at(1)->candidate().component());
+
+ // |ice_candidate3| is identical to |ice_candidate2|. It can be added
+ // successfully, but the total count of candidates will not increase.
+ candidate.set_component(2);
+ JsepIceCandidate ice_candidate3(kMediaContentName0, 0, candidate);
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate3));
+ ASSERT_EQ(2u, candidates->count());
+
+ JsepIceCandidate bad_ice_candidate("bad content name", 99, candidate);
+ EXPECT_FALSE(session_->ProcessIceMessage(&bad_ice_candidate));
+}
+
+// Test that a remote candidate is added to the remote session description and
+// that it is retained if the remote session description is changed.
+TEST_F(WebRtcSessionTest, TestRemoteCandidatesAddedToSessionDescription) {
+ Init();
+ cricket::Candidate candidate1;
+ candidate1.set_component(1);
+ JsepIceCandidate ice_candidate1(kMediaContentName0, kMediaContentIndex0,
+ candidate1);
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate1));
+ const SessionDescriptionInterface* remote_desc =
+ session_->remote_description();
+ ASSERT_TRUE(remote_desc != NULL);
+ ASSERT_EQ(2u, remote_desc->number_of_mediasections());
+ const IceCandidateCollection* candidates =
+ remote_desc->candidates(kMediaContentIndex0);
+ ASSERT_EQ(1u, candidates->count());
+ EXPECT_EQ(kMediaContentIndex0, candidates->at(0)->sdp_mline_index());
+
+ // Update the RemoteSessionDescription with a new session description and
+ // a candidate and check that the new remote session description contains both
+ // candidates.
+ SessionDescriptionInterface* offer = CreateRemoteOffer();
+ cricket::Candidate candidate2;
+ JsepIceCandidate ice_candidate2(kMediaContentName0, kMediaContentIndex0,
+ candidate2);
+ EXPECT_TRUE(offer->AddCandidate(&ice_candidate2));
+ SetRemoteDescriptionWithoutError(offer);
+
+ remote_desc = session_->remote_description();
+ ASSERT_TRUE(remote_desc != NULL);
+ ASSERT_EQ(2u, remote_desc->number_of_mediasections());
+ candidates = remote_desc->candidates(kMediaContentIndex0);
+ ASSERT_EQ(2u, candidates->count());
+ EXPECT_EQ(kMediaContentIndex0, candidates->at(0)->sdp_mline_index());
+ // Username and password have be updated with the TransportInfo of the
+ // SessionDescription, won't be equal to the original one.
+ candidate2.set_username(candidates->at(0)->candidate().username());
+ candidate2.set_password(candidates->at(0)->candidate().password());
+ EXPECT_TRUE(candidate2.IsEquivalent(candidates->at(0)->candidate()));
+ EXPECT_EQ(kMediaContentIndex0, candidates->at(1)->sdp_mline_index());
+ // No need to verify the username and password.
+ candidate1.set_username(candidates->at(1)->candidate().username());
+ candidate1.set_password(candidates->at(1)->candidate().password());
+ EXPECT_TRUE(candidate1.IsEquivalent(candidates->at(1)->candidate()));
+
+ // Test that the candidate is ignored if we can add the same candidate again.
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate2));
+}
+
+// Test that local candidates are added to the local session description and
+// that they are retained if the local session description is changed.
+TEST_F(WebRtcSessionTest, TestLocalCandidatesAddedToSessionDescription) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ Init();
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+
+ const SessionDescriptionInterface* local_desc = session_->local_description();
+ const IceCandidateCollection* candidates =
+ local_desc->candidates(kMediaContentIndex0);
+ ASSERT_TRUE(candidates != NULL);
+ EXPECT_EQ(0u, candidates->count());
+
+ EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+
+ local_desc = session_->local_description();
+ candidates = local_desc->candidates(kMediaContentIndex0);
+ ASSERT_TRUE(candidates != NULL);
+ EXPECT_LT(0u, candidates->count());
+ candidates = local_desc->candidates(1);
+ ASSERT_TRUE(candidates != NULL);
+ EXPECT_EQ(0u, candidates->count());
+
+ // Update the session descriptions.
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+
+ local_desc = session_->local_description();
+ candidates = local_desc->candidates(kMediaContentIndex0);
+ ASSERT_TRUE(candidates != NULL);
+ EXPECT_LT(0u, candidates->count());
+ candidates = local_desc->candidates(1);
+ ASSERT_TRUE(candidates != NULL);
+ EXPECT_EQ(0u, candidates->count());
+}
+
+// Test that we can set a remote session description with remote candidates.
+TEST_F(WebRtcSessionTest, TestSetRemoteSessionDescriptionWithCandidates) {
+ Init();
+
+ cricket::Candidate candidate1;
+ candidate1.set_component(1);
+ JsepIceCandidate ice_candidate(kMediaContentName0, kMediaContentIndex0,
+ candidate1);
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+
+ EXPECT_TRUE(offer->AddCandidate(&ice_candidate));
+ SetRemoteDescriptionWithoutError(offer);
+
+ const SessionDescriptionInterface* remote_desc =
+ session_->remote_description();
+ ASSERT_TRUE(remote_desc != NULL);
+ ASSERT_EQ(2u, remote_desc->number_of_mediasections());
+ const IceCandidateCollection* candidates =
+ remote_desc->candidates(kMediaContentIndex0);
+ ASSERT_EQ(1u, candidates->count());
+ EXPECT_EQ(kMediaContentIndex0, candidates->at(0)->sdp_mline_index());
+
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ SetLocalDescriptionWithoutError(answer);
+}
+
+// Test that offers and answers contains ice candidates when Ice candidates have
+// been gathered.
+TEST_F(WebRtcSessionTest, TestSetLocalAndRemoteDescriptionWithCandidates) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+ Init();
+ SendAudioVideoStream1();
+ // Ice is started but candidates are not provided until SetLocalDescription
+ // is called.
+ EXPECT_EQ(0u, observer_.mline_0_candidates_.size());
+ EXPECT_EQ(0u, observer_.mline_1_candidates_.size());
+ CreateAndSetRemoteOfferAndLocalAnswer();
+ // Wait until at least one local candidate has been collected.
+ EXPECT_TRUE_WAIT(0u < observer_.mline_0_candidates_.size(),
+ kIceCandidatesTimeout);
+
+ rtc::scoped_ptr<SessionDescriptionInterface> local_offer(CreateOffer());
+
+ ASSERT_TRUE(local_offer->candidates(kMediaContentIndex0) != NULL);
+ EXPECT_LT(0u, local_offer->candidates(kMediaContentIndex0)->count());
+
+ SessionDescriptionInterface* remote_offer(CreateRemoteOffer());
+ SetRemoteDescriptionWithoutError(remote_offer);
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ ASSERT_TRUE(answer->candidates(kMediaContentIndex0) != NULL);
+ EXPECT_LT(0u, answer->candidates(kMediaContentIndex0)->count());
+ SetLocalDescriptionWithoutError(answer);
+}
+
+// Verifies TransportProxy and media channels are created with content names
+// present in the SessionDescription.
+TEST_F(WebRtcSessionTest, TestChannelCreationsWithContentNames) {
+ Init();
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ // CreateOffer creates session description with the content names "audio" and
+ // "video". Goal is to modify these content names and verify transport
+ // channels
+ // in the WebRtcSession, as channels are created with the content names
+ // present in SDP.
+ std::string sdp;
+ EXPECT_TRUE(offer->ToString(&sdp));
+ const std::string kAudioMid = "a=mid:audio";
+ const std::string kAudioMidReplaceStr = "a=mid:audio_content_name";
+ const std::string kVideoMid = "a=mid:video";
+ const std::string kVideoMidReplaceStr = "a=mid:video_content_name";
+
+ // Replacing |audio| with |audio_content_name|.
+ rtc::replace_substrs(kAudioMid.c_str(), kAudioMid.length(),
+ kAudioMidReplaceStr.c_str(),
+ kAudioMidReplaceStr.length(),
+ &sdp);
+ // Replacing |video| with |video_content_name|.
+ rtc::replace_substrs(kVideoMid.c_str(), kVideoMid.length(),
+ kVideoMidReplaceStr.c_str(),
+ kVideoMidReplaceStr.length(),
+ &sdp);
+
+ SessionDescriptionInterface* modified_offer =
+ CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+
+ SetRemoteDescriptionWithoutError(modified_offer);
+
+ SessionDescriptionInterface* answer =
+ CreateAnswer(NULL);
+ SetLocalDescriptionWithoutError(answer);
+
+ cricket::TransportChannel* voice_transport_channel =
+ session_->voice_rtp_transport_channel();
+ EXPECT_TRUE(voice_transport_channel != NULL);
+ EXPECT_EQ(voice_transport_channel->transport_name(), "audio_content_name");
+ cricket::TransportChannel* video_transport_channel =
+ session_->video_rtp_transport_channel();
+ EXPECT_TRUE(video_transport_channel != NULL);
+ EXPECT_EQ(video_transport_channel->transport_name(), "video_content_name");
+ EXPECT_TRUE((video_channel_ = media_engine_->GetVideoChannel(0)) != NULL);
+ EXPECT_TRUE((voice_channel_ = media_engine_->GetVoiceChannel(0)) != NULL);
+}
+
+// Test that an offer contains the correct media content descriptions based on
+// the send streams when no constraints have been set.
+TEST_F(WebRtcSessionTest, CreateOfferWithoutConstraintsOrStreams) {
+ Init();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ ASSERT_TRUE(offer != NULL);
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(content != NULL);
+ content = cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(content == NULL);
+}
+
+// Test that an offer contains the correct media content descriptions based on
+// the send streams when no constraints have been set.
+TEST_F(WebRtcSessionTest, CreateOfferWithoutConstraints) {
+ Init();
+ // Test Audio only offer.
+ SendAudioOnlyStream2();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(content != NULL);
+ content = cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(content == NULL);
+
+ // Test Audio / Video offer.
+ SendAudioVideoStream1();
+ offer.reset(CreateOffer());
+ content = cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(content != NULL);
+ content = cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(content != NULL);
+}
+
+// Test that an offer contains no media content descriptions if
+// kOfferToReceiveVideo and kOfferToReceiveAudio constraints are set to false.
+TEST_F(WebRtcSessionTest, CreateOfferWithConstraintsWithoutStreams) {
+ Init();
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 0;
+ options.offer_to_receive_video = 0;
+
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(
+ CreateOffer(options));
+
+ ASSERT_TRUE(offer != NULL);
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(content == NULL);
+ content = cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(content == NULL);
+}
+
+// Test that an offer contains only audio media content descriptions if
+// kOfferToReceiveAudio constraints are set to true.
+TEST_F(WebRtcSessionTest, CreateAudioOnlyOfferWithConstraints) {
+ Init();
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(
+ CreateOffer(options));
+
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(content != NULL);
+ content = cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(content == NULL);
+}
+
+// Test that an offer contains audio and video media content descriptions if
+// kOfferToReceiveAudio and kOfferToReceiveVideo constraints are set to true.
+TEST_F(WebRtcSessionTest, CreateOfferWithConstraints) {
+ Init();
+ // Test Audio / Video offer.
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+ options.offer_to_receive_video =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(
+ CreateOffer(options));
+
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(content != NULL);
+
+ content = cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(content != NULL);
+
+ // Sets constraints to false and verifies that audio/video contents are
+ // removed.
+ options.offer_to_receive_audio = 0;
+ options.offer_to_receive_video = 0;
+ offer.reset(CreateOffer(options));
+
+ content = cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(content == NULL);
+ content = cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(content == NULL);
+}
+
+// Test that an answer can not be created if the last remote description is not
+// an offer.
+TEST_F(WebRtcSessionTest, CreateAnswerWithoutAnOffer) {
+ Init();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+ SessionDescriptionInterface* answer = CreateRemoteAnswer(offer);
+ SetRemoteDescriptionWithoutError(answer);
+ EXPECT_TRUE(CreateAnswer(NULL) == NULL);
+}
+
+// Test that an answer contains the correct media content descriptions when no
+// constraints have been set.
+TEST_F(WebRtcSessionTest, CreateAnswerWithoutConstraintsOrStreams) {
+ Init();
+ // Create a remote offer with audio and video content.
+ rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ SetRemoteDescriptionWithoutError(offer.release());
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateAnswer(NULL));
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_FALSE(content->rejected);
+
+ content = cricket::GetFirstVideoContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_FALSE(content->rejected);
+}
+
+// Test that an answer contains the correct media content descriptions when no
+// constraints have been set and the offer only contain audio.
+TEST_F(WebRtcSessionTest, CreateAudioAnswerWithoutConstraintsOrStreams) {
+ Init();
+ // Create a remote offer with audio only.
+ cricket::MediaSessionOptions options;
+
+ rtc::scoped_ptr<JsepSessionDescription> offer(
+ CreateRemoteOffer(options));
+ ASSERT_TRUE(cricket::GetFirstVideoContent(offer->description()) == NULL);
+ ASSERT_TRUE(cricket::GetFirstAudioContent(offer->description()) != NULL);
+
+ SetRemoteDescriptionWithoutError(offer.release());
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateAnswer(NULL));
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_FALSE(content->rejected);
+
+ EXPECT_TRUE(cricket::GetFirstVideoContent(answer->description()) == NULL);
+}
+
+// Test that an answer contains the correct media content descriptions when no
+// constraints have been set.
+TEST_F(WebRtcSessionTest, CreateAnswerWithoutConstraints) {
+ Init();
+ // Create a remote offer with audio and video content.
+ rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ SetRemoteDescriptionWithoutError(offer.release());
+ // Test with a stream with tracks.
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateAnswer(NULL));
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_FALSE(content->rejected);
+
+ content = cricket::GetFirstVideoContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_FALSE(content->rejected);
+}
+
+// Test that an answer contains the correct media content descriptions when
+// constraints have been set but no stream is sent.
+TEST_F(WebRtcSessionTest, CreateAnswerWithConstraintsWithoutStreams) {
+ Init();
+ // Create a remote offer with audio and video content.
+ rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ SetRemoteDescriptionWithoutError(offer.release());
+
+ webrtc::FakeConstraints constraints_no_receive;
+ constraints_no_receive.SetMandatoryReceiveAudio(false);
+ constraints_no_receive.SetMandatoryReceiveVideo(false);
+
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateAnswer(&constraints_no_receive));
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_TRUE(content->rejected);
+
+ content = cricket::GetFirstVideoContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_TRUE(content->rejected);
+}
+
+// Test that an answer contains the correct media content descriptions when
+// constraints have been set and streams are sent.
+TEST_F(WebRtcSessionTest, CreateAnswerWithConstraints) {
+ Init();
+ // Create a remote offer with audio and video content.
+ rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ SetRemoteDescriptionWithoutError(offer.release());
+
+ webrtc::FakeConstraints constraints_no_receive;
+ constraints_no_receive.SetMandatoryReceiveAudio(false);
+ constraints_no_receive.SetMandatoryReceiveVideo(false);
+
+ // Test with a stream with tracks.
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateAnswer(&constraints_no_receive));
+
+ // TODO(perkj): Should the direction be set to SEND_ONLY?
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_FALSE(content->rejected);
+
+ // TODO(perkj): Should the direction be set to SEND_ONLY?
+ content = cricket::GetFirstVideoContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_FALSE(content->rejected);
+}
+
+TEST_F(WebRtcSessionTest, CreateOfferWithoutCNCodecs) {
+ AddCNCodecs();
+ Init();
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+ options.voice_activity_detection = false;
+
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(
+ CreateOffer(options));
+
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(content != NULL);
+ EXPECT_TRUE(VerifyNoCNCodecs(content));
+}
+
+TEST_F(WebRtcSessionTest, CreateAnswerWithoutCNCodecs) {
+ AddCNCodecs();
+ Init();
+ // Create a remote offer with audio and video content.
+ rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ SetRemoteDescriptionWithoutError(offer.release());
+
+ webrtc::FakeConstraints constraints;
+ constraints.SetOptionalVAD(false);
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateAnswer(&constraints));
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(answer->description());
+ ASSERT_TRUE(content != NULL);
+ EXPECT_TRUE(VerifyNoCNCodecs(content));
+}
+
+// This test verifies the call setup when remote answer with audio only and
+// later updates with video.
+TEST_F(WebRtcSessionTest, TestAVOfferWithAudioOnlyAnswer) {
+ Init();
+ EXPECT_TRUE(media_engine_->GetVideoChannel(0) == NULL);
+ EXPECT_TRUE(media_engine_->GetVoiceChannel(0) == NULL);
+
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+
+ cricket::MediaSessionOptions options;
+ SessionDescriptionInterface* answer = CreateRemoteAnswer(offer, options);
+
+ // SetLocalDescription and SetRemoteDescriptions takes ownership of offer
+ // and answer;
+ SetLocalDescriptionWithoutError(offer);
+ SetRemoteDescriptionWithoutError(answer);
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+ voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+ ASSERT_TRUE(video_channel_ == NULL);
+
+ ASSERT_EQ(0u, voice_channel_->recv_streams().size());
+ ASSERT_EQ(1u, voice_channel_->send_streams().size());
+ EXPECT_EQ(kAudioTrack1, voice_channel_->send_streams()[0].id);
+
+ // Let the remote end update the session descriptions, with Audio and Video.
+ SendAudioVideoStream2();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+ voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+ ASSERT_TRUE(video_channel_ != NULL);
+ ASSERT_TRUE(voice_channel_ != NULL);
+
+ ASSERT_EQ(1u, video_channel_->recv_streams().size());
+ ASSERT_EQ(1u, video_channel_->send_streams().size());
+ EXPECT_EQ(kVideoTrack2, video_channel_->recv_streams()[0].id);
+ EXPECT_EQ(kVideoTrack2, video_channel_->send_streams()[0].id);
+ ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+ ASSERT_EQ(1u, voice_channel_->send_streams().size());
+ EXPECT_EQ(kAudioTrack2, voice_channel_->recv_streams()[0].id);
+ EXPECT_EQ(kAudioTrack2, voice_channel_->send_streams()[0].id);
+
+ // Change session back to audio only.
+ SendAudioOnlyStream2();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+
+ EXPECT_EQ(0u, video_channel_->recv_streams().size());
+ ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+ EXPECT_EQ(kAudioTrack2, voice_channel_->recv_streams()[0].id);
+ ASSERT_EQ(1u, voice_channel_->send_streams().size());
+ EXPECT_EQ(kAudioTrack2, voice_channel_->send_streams()[0].id);
+}
+
+// This test verifies the call setup when remote answer with video only and
+// later updates with audio.
+TEST_F(WebRtcSessionTest, TestAVOfferWithVideoOnlyAnswer) {
+ Init();
+ EXPECT_TRUE(media_engine_->GetVideoChannel(0) == NULL);
+ EXPECT_TRUE(media_engine_->GetVoiceChannel(0) == NULL);
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+
+ cricket::MediaSessionOptions options;
+ options.recv_audio = false;
+ options.recv_video = true;
+ SessionDescriptionInterface* answer = CreateRemoteAnswer(
+ offer, options, cricket::SEC_ENABLED);
+
+ // SetLocalDescription and SetRemoteDescriptions takes ownership of offer
+ // and answer.
+ SetLocalDescriptionWithoutError(offer);
+ SetRemoteDescriptionWithoutError(answer);
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+ voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+ ASSERT_TRUE(voice_channel_ == NULL);
+ ASSERT_TRUE(video_channel_ != NULL);
+
+ EXPECT_EQ(0u, video_channel_->recv_streams().size());
+ ASSERT_EQ(1u, video_channel_->send_streams().size());
+ EXPECT_EQ(kVideoTrack1, video_channel_->send_streams()[0].id);
+
+ // Update the session descriptions, with Audio and Video.
+ SendAudioVideoStream2();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+
+ voice_channel_ = media_engine_->GetVoiceChannel(0);
+ ASSERT_TRUE(voice_channel_ != NULL);
+
+ ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+ ASSERT_EQ(1u, voice_channel_->send_streams().size());
+ EXPECT_EQ(kAudioTrack2, voice_channel_->recv_streams()[0].id);
+ EXPECT_EQ(kAudioTrack2, voice_channel_->send_streams()[0].id);
+
+ // Change session back to video only.
+ SendVideoOnlyStream2();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+ voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+ ASSERT_EQ(1u, video_channel_->recv_streams().size());
+ EXPECT_EQ(kVideoTrack2, video_channel_->recv_streams()[0].id);
+ ASSERT_EQ(1u, video_channel_->send_streams().size());
+ EXPECT_EQ(kVideoTrack2, video_channel_->send_streams()[0].id);
+}
+
+TEST_F(WebRtcSessionTest, VerifyCryptoParamsInSDP) {
+ Init();
+ SendAudioVideoStream1();
+ scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ VerifyCryptoParams(offer->description());
+ SetRemoteDescriptionWithoutError(offer.release());
+ scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(NULL));
+ VerifyCryptoParams(answer->description());
+}
+
+TEST_F(WebRtcSessionTest, VerifyNoCryptoParamsInSDP) {
+ options_.disable_encryption = true;
+ Init();
+ SendAudioVideoStream1();
+ scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ VerifyNoCryptoParams(offer->description(), false);
+}
+
+TEST_F(WebRtcSessionTest, VerifyAnswerFromNonCryptoOffer) {
+ Init();
+ VerifyAnswerFromNonCryptoOffer();
+}
+
+TEST_F(WebRtcSessionTest, VerifyAnswerFromCryptoOffer) {
+ Init();
+ VerifyAnswerFromCryptoOffer();
+}
+
+// This test verifies that setLocalDescription fails if
+// no a=ice-ufrag and a=ice-pwd lines are present in the SDP.
+TEST_F(WebRtcSessionTest, TestSetLocalDescriptionWithoutIce) {
+ Init();
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ std::string sdp;
+ RemoveIceUfragPwdLines(offer.get(), &sdp);
+ SessionDescriptionInterface* modified_offer =
+ CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+ SetLocalDescriptionOfferExpectError(kSdpWithoutIceUfragPwd, modified_offer);
+}
+
+// This test verifies that setRemoteDescription fails if
+// no a=ice-ufrag and a=ice-pwd lines are present in the SDP.
+TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionWithoutIce) {
+ Init();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+ std::string sdp;
+ RemoveIceUfragPwdLines(offer.get(), &sdp);
+ SessionDescriptionInterface* modified_offer =
+ CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+ SetRemoteDescriptionOfferExpectError(kSdpWithoutIceUfragPwd, modified_offer);
+}
+
+// This test verifies that setLocalDescription fails if local offer has
+// too short ice ufrag and pwd strings.
+TEST_F(WebRtcSessionTest, TestSetLocalDescriptionInvalidIceCredentials) {
+ Init();
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ std::string sdp;
+ // Modifying ice ufrag and pwd in local offer with strings smaller than the
+ // recommended values of 4 and 22 bytes respectively.
+ ModifyIceUfragPwdLines(offer.get(), "ice", "icepwd", &sdp);
+ SessionDescriptionInterface* modified_offer =
+ CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+ std::string error;
+ EXPECT_FALSE(session_->SetLocalDescription(modified_offer, &error));
+
+ // Test with string greater than 256.
+ sdp.clear();
+ ModifyIceUfragPwdLines(offer.get(), kTooLongIceUfragPwd, kTooLongIceUfragPwd,
+ &sdp);
+ modified_offer = CreateSessionDescription(JsepSessionDescription::kOffer, sdp,
+ NULL);
+ EXPECT_FALSE(session_->SetLocalDescription(modified_offer, &error));
+}
+
+// This test verifies that setRemoteDescription fails if remote offer has
+// too short ice ufrag and pwd strings.
+TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionInvalidIceCredentials) {
+ Init();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+ std::string sdp;
+ // Modifying ice ufrag and pwd in remote offer with strings smaller than the
+ // recommended values of 4 and 22 bytes respectively.
+ ModifyIceUfragPwdLines(offer.get(), "ice", "icepwd", &sdp);
+ SessionDescriptionInterface* modified_offer =
+ CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+ std::string error;
+ EXPECT_FALSE(session_->SetRemoteDescription(modified_offer, &error));
+
+ sdp.clear();
+ ModifyIceUfragPwdLines(offer.get(), kTooLongIceUfragPwd, kTooLongIceUfragPwd,
+ &sdp);
+ modified_offer = CreateSessionDescription(JsepSessionDescription::kOffer, sdp,
+ NULL);
+ EXPECT_FALSE(session_->SetRemoteDescription(modified_offer, &error));
+}
+
+// Test that if the remote description indicates the peer requested ICE restart
+// (via a new ufrag or pwd), the old ICE candidates are not copied,
+// and vice versa.
+TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionWithIceRestart) {
+ Init();
+ scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+
+ // Create the first offer.
+ std::string sdp;
+ ModifyIceUfragPwdLines(offer.get(), "0123456789012345",
+ "abcdefghijklmnopqrstuvwx", &sdp);
+ SessionDescriptionInterface* offer1 =
+ CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+ cricket::Candidate candidate1(1, "udp", rtc::SocketAddress("1.1.1.1", 5000),
+ 0, "", "", "relay", 0, "");
+ JsepIceCandidate ice_candidate1(kMediaContentName0, kMediaContentIndex0,
+ candidate1);
+ EXPECT_TRUE(offer1->AddCandidate(&ice_candidate1));
+ SetRemoteDescriptionWithoutError(offer1);
+ EXPECT_EQ(1, session_->remote_description()->candidates(0)->count());
+
+ // The second offer has the same ufrag and pwd but different address.
+ sdp.clear();
+ ModifyIceUfragPwdLines(offer.get(), "0123456789012345",
+ "abcdefghijklmnopqrstuvwx", &sdp);
+ SessionDescriptionInterface* offer2 =
+ CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+ candidate1.set_address(rtc::SocketAddress("1.1.1.1", 6000));
+ JsepIceCandidate ice_candidate2(kMediaContentName0, kMediaContentIndex0,
+ candidate1);
+ EXPECT_TRUE(offer2->AddCandidate(&ice_candidate2));
+ SetRemoteDescriptionWithoutError(offer2);
+ EXPECT_EQ(2, session_->remote_description()->candidates(0)->count());
+
+ // The third offer has a different ufrag and different address.
+ sdp.clear();
+ ModifyIceUfragPwdLines(offer.get(), "0123456789012333",
+ "abcdefghijklmnopqrstuvwx", &sdp);
+ SessionDescriptionInterface* offer3 =
+ CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+ candidate1.set_address(rtc::SocketAddress("1.1.1.1", 7000));
+ JsepIceCandidate ice_candidate3(kMediaContentName0, kMediaContentIndex0,
+ candidate1);
+ EXPECT_TRUE(offer3->AddCandidate(&ice_candidate3));
+ SetRemoteDescriptionWithoutError(offer3);
+ EXPECT_EQ(1, session_->remote_description()->candidates(0)->count());
+
+ // The fourth offer has no candidate but a different ufrag/pwd.
+ sdp.clear();
+ ModifyIceUfragPwdLines(offer.get(), "0123456789012444",
+ "abcdefghijklmnopqrstuvyz", &sdp);
+ SessionDescriptionInterface* offer4 =
+ CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+ SetRemoteDescriptionWithoutError(offer4);
+ EXPECT_EQ(0, session_->remote_description()->candidates(0)->count());
+}
+
+// Test that candidates sent to the "video" transport do not get pushed down to
+// the "audio" transport channel when bundling.
+TEST_F(WebRtcSessionTest, TestIgnoreCandidatesForUnusedTransportWhenBundling) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateRemoteOffer();
+ SetRemoteDescriptionWithoutError(offer);
+
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ SetLocalDescriptionWithoutError(answer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ cricket::BaseChannel* voice_channel = session_->voice_channel();
+ ASSERT(voice_channel != NULL);
+
+ // Checks if one of the transport channels contains a connection using a given
+ // port.
+ auto connection_with_remote_port = [this, voice_channel](int port) {
+ SessionStats stats;
+ session_->GetChannelTransportStats(voice_channel, &stats);
+ for (auto& kv : stats.transport_stats) {
+ for (auto& chan_stat : kv.second.channel_stats) {
+ for (auto& conn_info : chan_stat.connection_infos) {
+ if (conn_info.remote_candidate.address().port() == port) {
+ return true;
+ }
+ }
+ }
+ }
+ return false;
+ };
+
+ EXPECT_FALSE(connection_with_remote_port(5000));
+ EXPECT_FALSE(connection_with_remote_port(5001));
+ EXPECT_FALSE(connection_with_remote_port(6000));
+
+ // The way the *_WAIT checks work is they only wait if the condition fails,
+ // which does not help in the case where state is not changing. This is
+ // problematic in this test since we want to verify that adding a video
+ // candidate does _not_ change state. So we interleave candidates and assume
+ // that messages are executed in the order they were posted.
+
+ // First audio candidate.
+ cricket::Candidate candidate0;
+ candidate0.set_address(rtc::SocketAddress("1.1.1.1", 5000));
+ candidate0.set_component(1);
+ candidate0.set_protocol("udp");
+ JsepIceCandidate ice_candidate0(kMediaContentName0, kMediaContentIndex0,
+ candidate0);
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate0));
+
+ // Video candidate.
+ cricket::Candidate candidate1;
+ candidate1.set_address(rtc::SocketAddress("1.1.1.1", 6000));
+ candidate1.set_component(1);
+ candidate1.set_protocol("udp");
+ JsepIceCandidate ice_candidate1(kMediaContentName1, kMediaContentIndex1,
+ candidate1);
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate1));
+
+ // Second audio candidate.
+ cricket::Candidate candidate2;
+ candidate2.set_address(rtc::SocketAddress("1.1.1.1", 5001));
+ candidate2.set_component(1);
+ candidate2.set_protocol("udp");
+ JsepIceCandidate ice_candidate2(kMediaContentName0, kMediaContentIndex0,
+ candidate2);
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate2));
+
+ EXPECT_TRUE_WAIT(connection_with_remote_port(5000), 1000);
+ EXPECT_TRUE_WAIT(connection_with_remote_port(5001), 1000);
+
+ // No need here for a _WAIT check since we are checking that state hasn't
+ // changed: if this is false we would be doing waits for nothing and if this
+ // is true then there will be no messages processed anyways.
+ EXPECT_FALSE(connection_with_remote_port(6000));
+}
+
+// kBundlePolicyBalanced BUNDLE policy and answer contains BUNDLE.
+TEST_F(WebRtcSessionTest, TestBalancedBundleInAnswer) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_NE(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyBalanced BUNDLE policy but no BUNDLE in the answer.
+TEST_F(WebRtcSessionTest, TestBalancedNoBundleInAnswer) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_NE(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ SendAudioVideoStream2();
+
+ // Remove BUNDLE from the answer.
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateRemoteAnswer(session_->local_description()));
+ cricket::SessionDescription* answer_copy = answer->description()->Copy();
+ answer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ JsepSessionDescription* modified_answer =
+ new JsepSessionDescription(JsepSessionDescription::kAnswer);
+ modified_answer->Initialize(answer_copy, "1", "1");
+ SetRemoteDescriptionWithoutError(modified_answer); //
+
+ EXPECT_NE(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy with BUNDLE in the answer.
+TEST_F(WebRtcSessionTest, TestMaxBundleBundleInAnswer) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy with BUNDLE in the answer, but no
+// audio content in the answer.
+TEST_F(WebRtcSessionTest, TestMaxBundleRejectAudio) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ SendAudioVideoStream2();
+ cricket::MediaSessionOptions recv_options;
+ recv_options.recv_audio = false;
+ recv_options.recv_video = true;
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description(), recv_options);
+ SetRemoteDescriptionWithoutError(answer);
+
+ EXPECT_TRUE(nullptr == session_->voice_channel());
+ EXPECT_TRUE(nullptr != session_->video_rtp_transport_channel());
+
+ session_->Close();
+ EXPECT_TRUE(nullptr == session_->voice_rtp_transport_channel());
+ EXPECT_TRUE(nullptr == session_->voice_rtcp_transport_channel());
+ EXPECT_TRUE(nullptr == session_->video_rtp_transport_channel());
+ EXPECT_TRUE(nullptr == session_->video_rtcp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy but no BUNDLE in the answer.
+TEST_F(WebRtcSessionTest, TestMaxBundleNoBundleInAnswer) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ SendAudioVideoStream2();
+
+ // Remove BUNDLE from the answer.
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateRemoteAnswer(session_->local_description()));
+ cricket::SessionDescription* answer_copy = answer->description()->Copy();
+ answer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ JsepSessionDescription* modified_answer =
+ new JsepSessionDescription(JsepSessionDescription::kAnswer);
+ modified_answer->Initialize(answer_copy, "1", "1");
+ SetRemoteDescriptionWithoutError(modified_answer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy with BUNDLE in the remote offer.
+TEST_F(WebRtcSessionTest, TestMaxBundleBundleInRemoteOffer) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+ SendAudioVideoStream1();
+
+ SessionDescriptionInterface* offer = CreateRemoteOffer();
+ SetRemoteDescriptionWithoutError(offer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer = CreateAnswer(nullptr);
+ SetLocalDescriptionWithoutError(answer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy but no BUNDLE in the remote offer.
+TEST_F(WebRtcSessionTest, TestMaxBundleNoBundleInRemoteOffer) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+ SendAudioVideoStream1();
+
+ // Remove BUNDLE from the offer.
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+ cricket::SessionDescription* offer_copy = offer->description()->Copy();
+ offer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ JsepSessionDescription* modified_offer =
+ new JsepSessionDescription(JsepSessionDescription::kOffer);
+ modified_offer->Initialize(offer_copy, "1", "1");
+
+ // Expect an error when applying the remote description
+ SetRemoteDescriptionExpectError(JsepSessionDescription::kOffer,
+ kCreateChannelFailed, modified_offer);
+}
+
+// kBundlePolicyMaxCompat bundle policy and answer contains BUNDLE.
+TEST_F(WebRtcSessionTest, TestMaxCompatBundleInAnswer) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxCompat);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_NE(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ // This should lead to an audio-only call but isn't implemented
+ // correctly yet.
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxCompat BUNDLE policy but no BUNDLE in the answer.
+TEST_F(WebRtcSessionTest, TestMaxCompatNoBundleInAnswer) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxCompat);
+ SendAudioVideoStream1();
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_NE(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ SendAudioVideoStream2();
+
+ // Remove BUNDLE from the answer.
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateRemoteAnswer(session_->local_description()));
+ cricket::SessionDescription* answer_copy = answer->description()->Copy();
+ answer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ JsepSessionDescription* modified_answer =
+ new JsepSessionDescription(JsepSessionDescription::kAnswer);
+ modified_answer->Initialize(answer_copy, "1", "1");
+ SetRemoteDescriptionWithoutError(modified_answer); //
+
+ EXPECT_NE(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxbundle and then we call SetRemoteDescription first.
+TEST_F(WebRtcSessionTest, TestMaxBundleWithSetRemoteDescriptionFirst) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetRemoteDescriptionWithoutError(offer);
+
+ EXPECT_EQ(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+}
+
+TEST_F(WebRtcSessionTest, TestRequireRtcpMux) {
+ InitWithRtcpMuxPolicy(PeerConnectionInterface::kRtcpMuxPolicyRequire);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_TRUE(session_->voice_rtcp_transport_channel() == NULL);
+ EXPECT_TRUE(session_->video_rtcp_transport_channel() == NULL);
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ EXPECT_TRUE(session_->voice_rtcp_transport_channel() == NULL);
+ EXPECT_TRUE(session_->video_rtcp_transport_channel() == NULL);
+}
+
+TEST_F(WebRtcSessionTest, TestNegotiateRtcpMux) {
+ InitWithRtcpMuxPolicy(PeerConnectionInterface::kRtcpMuxPolicyNegotiate);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_TRUE(session_->voice_rtcp_transport_channel() != NULL);
+ EXPECT_TRUE(session_->video_rtcp_transport_channel() != NULL);
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ EXPECT_TRUE(session_->voice_rtcp_transport_channel() == NULL);
+ EXPECT_TRUE(session_->video_rtcp_transport_channel() == NULL);
+}
+
+// This test verifies that SetLocalDescription and SetRemoteDescription fails
+// if BUNDLE is enabled but rtcp-mux is disabled in m-lines.
+TEST_F(WebRtcSessionTest, TestDisabledRtcpMuxWithBundleEnabled) {
+ Init();
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ std::string offer_str;
+ offer->ToString(&offer_str);
+ // Disable rtcp-mux
+ const std::string rtcp_mux = "rtcp-mux";
+ const std::string xrtcp_mux = "xrtcp-mux";
+ rtc::replace_substrs(rtcp_mux.c_str(), rtcp_mux.length(),
+ xrtcp_mux.c_str(), xrtcp_mux.length(),
+ &offer_str);
+ JsepSessionDescription* local_offer =
+ new JsepSessionDescription(JsepSessionDescription::kOffer);
+ EXPECT_TRUE((local_offer)->Initialize(offer_str, NULL));
+ SetLocalDescriptionOfferExpectError(kBundleWithoutRtcpMux, local_offer);
+ JsepSessionDescription* remote_offer =
+ new JsepSessionDescription(JsepSessionDescription::kOffer);
+ EXPECT_TRUE((remote_offer)->Initialize(offer_str, NULL));
+ SetRemoteDescriptionOfferExpectError(kBundleWithoutRtcpMux, remote_offer);
+ // Trying unmodified SDP.
+ SetLocalDescriptionWithoutError(offer);
+}
+
+TEST_F(WebRtcSessionTest, SetAudioPlayout) {
+ Init();
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+ cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
+ ASSERT_TRUE(channel != NULL);
+ ASSERT_EQ(1u, channel->recv_streams().size());
+ uint32_t receive_ssrc = channel->recv_streams()[0].first_ssrc();
+ double volume;
+ EXPECT_TRUE(channel->GetOutputVolume(receive_ssrc, &volume));
+ EXPECT_EQ(1, volume);
+ session_->SetAudioPlayout(receive_ssrc, false);
+ EXPECT_TRUE(channel->GetOutputVolume(receive_ssrc, &volume));
+ EXPECT_EQ(0, volume);
+ session_->SetAudioPlayout(receive_ssrc, true);
+ EXPECT_TRUE(channel->GetOutputVolume(receive_ssrc, &volume));
+ EXPECT_EQ(1, volume);
+}
+
+TEST_F(WebRtcSessionTest, SetAudioSend) {
+ Init();
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+ cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
+ ASSERT_TRUE(channel != NULL);
+ ASSERT_EQ(1u, channel->send_streams().size());
+ uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
+ EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
+
+ cricket::AudioOptions options;
+ options.echo_cancellation.Set(true);
+
+ rtc::scoped_ptr<FakeAudioRenderer> renderer(new FakeAudioRenderer());
+ session_->SetAudioSend(send_ssrc, false, options, renderer.get());
+ EXPECT_TRUE(channel->IsStreamMuted(send_ssrc));
+ EXPECT_FALSE(channel->options().echo_cancellation.IsSet());
+ EXPECT_TRUE(renderer->sink() != NULL);
+
+ // This will trigger SetSink(NULL) to the |renderer|.
+ session_->SetAudioSend(send_ssrc, true, options, NULL);
+ EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
+ bool value;
+ EXPECT_TRUE(channel->options().echo_cancellation.Get(&value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(renderer->sink() == NULL);
+}
+
+TEST_F(WebRtcSessionTest, AudioRendererForLocalStream) {
+ Init();
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+ cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
+ ASSERT_TRUE(channel != NULL);
+ ASSERT_EQ(1u, channel->send_streams().size());
+ uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
+
+ rtc::scoped_ptr<FakeAudioRenderer> renderer(new FakeAudioRenderer());
+ cricket::AudioOptions options;
+ session_->SetAudioSend(send_ssrc, true, options, renderer.get());
+ EXPECT_TRUE(renderer->sink() != NULL);
+
+ // Delete the |renderer| and it will trigger OnClose() to the sink, and this
+ // will invalidate the |renderer_| pointer in the sink and prevent getting a
+ // SetSink(NULL) callback afterwards.
+ renderer.reset();
+
+ // This will trigger SetSink(NULL) if no OnClose() callback.
+ session_->SetAudioSend(send_ssrc, true, options, NULL);
+}
+
+TEST_F(WebRtcSessionTest, SetVideoPlayout) {
+ Init();
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+ cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
+ ASSERT_TRUE(channel != NULL);
+ ASSERT_LT(0u, channel->renderers().size());
+ EXPECT_TRUE(channel->renderers().begin()->second == NULL);
+ ASSERT_EQ(1u, channel->recv_streams().size());
+ uint32_t receive_ssrc = channel->recv_streams()[0].first_ssrc();
+ cricket::FakeVideoRenderer renderer;
+ session_->SetVideoPlayout(receive_ssrc, true, &renderer);
+ EXPECT_TRUE(channel->renderers().begin()->second == &renderer);
+ session_->SetVideoPlayout(receive_ssrc, false, &renderer);
+ EXPECT_TRUE(channel->renderers().begin()->second == NULL);
+}
+
+TEST_F(WebRtcSessionTest, SetVideoSend) {
+ Init();
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+ cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
+ ASSERT_TRUE(channel != NULL);
+ ASSERT_EQ(1u, channel->send_streams().size());
+ uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
+ EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
+ cricket::VideoOptions* options = NULL;
+ session_->SetVideoSend(send_ssrc, false, options);
+ EXPECT_TRUE(channel->IsStreamMuted(send_ssrc));
+ session_->SetVideoSend(send_ssrc, true, options);
+ EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
+}
+
+TEST_F(WebRtcSessionTest, CanNotInsertDtmf) {
+ TestCanInsertDtmf(false);
+}
+
+TEST_F(WebRtcSessionTest, CanInsertDtmf) {
+ TestCanInsertDtmf(true);
+}
+
+TEST_F(WebRtcSessionTest, InsertDtmf) {
+ // Setup
+ Init();
+ SendAudioVideoStream1();
+ CreateAndSetRemoteOfferAndLocalAnswer();
+ FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
+ EXPECT_EQ(0U, channel->dtmf_info_queue().size());
+
+ // Insert DTMF
+ const int expected_flags = DF_SEND;
+ const int expected_duration = 90;
+ session_->InsertDtmf(kAudioTrack1, 0, expected_duration);
+ session_->InsertDtmf(kAudioTrack1, 1, expected_duration);
+ session_->InsertDtmf(kAudioTrack1, 2, expected_duration);
+
+ // Verify
+ ASSERT_EQ(3U, channel->dtmf_info_queue().size());
+ const uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
+ EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[0], send_ssrc, 0,
+ expected_duration, expected_flags));
+ EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[1], send_ssrc, 1,
+ expected_duration, expected_flags));
+ EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[2], send_ssrc, 2,
+ expected_duration, expected_flags));
+}
+
+// This test verifies the |initial_offerer| flag when session initiates the
+// call.
+TEST_F(WebRtcSessionTest, TestInitiatorFlagAsOriginator) {
+ Init();
+ EXPECT_FALSE(session_->initial_offerer());
+ SessionDescriptionInterface* offer = CreateOffer();
+ SessionDescriptionInterface* answer = CreateRemoteAnswer(offer);
+ SetLocalDescriptionWithoutError(offer);
+ EXPECT_TRUE(session_->initial_offerer());
+ SetRemoteDescriptionWithoutError(answer);
+ EXPECT_TRUE(session_->initial_offerer());
+}
+
+// This test verifies the |initial_offerer| flag when session receives the call.
+TEST_F(WebRtcSessionTest, TestInitiatorFlagAsReceiver) {
+ Init();
+ EXPECT_FALSE(session_->initial_offerer());
+ SessionDescriptionInterface* offer = CreateRemoteOffer();
+ SetRemoteDescriptionWithoutError(offer);
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+
+ EXPECT_FALSE(session_->initial_offerer());
+ SetLocalDescriptionWithoutError(answer);
+ EXPECT_FALSE(session_->initial_offerer());
+}
+
+// Verifing local offer and remote answer have matching m-lines as per RFC 3264.
+TEST_F(WebRtcSessionTest, TestIncorrectMLinesInRemoteAnswer) {
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateRemoteAnswer(session_->local_description()));
+
+ cricket::SessionDescription* answer_copy = answer->description()->Copy();
+ answer_copy->RemoveContentByName("video");
+ JsepSessionDescription* modified_answer =
+ new JsepSessionDescription(JsepSessionDescription::kAnswer);
+
+ EXPECT_TRUE(modified_answer->Initialize(answer_copy,
+ answer->session_id(),
+ answer->session_version()));
+ SetRemoteDescriptionAnswerExpectError(kMlineMismatch, modified_answer);
+
+ // Different content names.
+ std::string sdp;
+ EXPECT_TRUE(answer->ToString(&sdp));
+ const std::string kAudioMid = "a=mid:audio";
+ const std::string kAudioMidReplaceStr = "a=mid:audio_content_name";
+ rtc::replace_substrs(kAudioMid.c_str(), kAudioMid.length(),
+ kAudioMidReplaceStr.c_str(),
+ kAudioMidReplaceStr.length(),
+ &sdp);
+ SessionDescriptionInterface* modified_answer1 =
+ CreateSessionDescription(JsepSessionDescription::kAnswer, sdp, NULL);
+ SetRemoteDescriptionAnswerExpectError(kMlineMismatch, modified_answer1);
+
+ // Different media types.
+ EXPECT_TRUE(answer->ToString(&sdp));
+ const std::string kAudioMline = "m=audio";
+ const std::string kAudioMlineReplaceStr = "m=video";
+ rtc::replace_substrs(kAudioMline.c_str(), kAudioMline.length(),
+ kAudioMlineReplaceStr.c_str(),
+ kAudioMlineReplaceStr.length(),
+ &sdp);
+ SessionDescriptionInterface* modified_answer2 =
+ CreateSessionDescription(JsepSessionDescription::kAnswer, sdp, NULL);
+ SetRemoteDescriptionAnswerExpectError(kMlineMismatch, modified_answer2);
+
+ SetRemoteDescriptionWithoutError(answer.release());
+}
+
+// Verifying remote offer and local answer have matching m-lines as per
+// RFC 3264.
+TEST_F(WebRtcSessionTest, TestIncorrectMLinesInLocalAnswer) {
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateRemoteOffer();
+ SetRemoteDescriptionWithoutError(offer);
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+
+ cricket::SessionDescription* answer_copy = answer->description()->Copy();
+ answer_copy->RemoveContentByName("video");
+ JsepSessionDescription* modified_answer =
+ new JsepSessionDescription(JsepSessionDescription::kAnswer);
+
+ EXPECT_TRUE(modified_answer->Initialize(answer_copy,
+ answer->session_id(),
+ answer->session_version()));
+ SetLocalDescriptionAnswerExpectError(kMlineMismatch, modified_answer);
+ SetLocalDescriptionWithoutError(answer);
+}
+
+// This test verifies that WebRtcSession does not start candidate allocation
+// before SetLocalDescription is called.
+TEST_F(WebRtcSessionTest, TestIceStartAfterSetLocalDescriptionOnly) {
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateRemoteOffer();
+ cricket::Candidate candidate;
+ candidate.set_component(1);
+ JsepIceCandidate ice_candidate(kMediaContentName0, kMediaContentIndex0,
+ candidate);
+ EXPECT_TRUE(offer->AddCandidate(&ice_candidate));
+ cricket::Candidate candidate1;
+ candidate1.set_component(1);
+ JsepIceCandidate ice_candidate1(kMediaContentName1, kMediaContentIndex1,
+ candidate1);
+ EXPECT_TRUE(offer->AddCandidate(&ice_candidate1));
+ SetRemoteDescriptionWithoutError(offer);
+ ASSERT_TRUE(session_->voice_rtp_transport_channel() != NULL);
+ ASSERT_TRUE(session_->video_rtp_transport_channel() != NULL);
+
+ // Pump for 1 second and verify that no candidates are generated.
+ rtc::Thread::Current()->ProcessMessages(1000);
+ EXPECT_TRUE(observer_.mline_0_candidates_.empty());
+ EXPECT_TRUE(observer_.mline_1_candidates_.empty());
+
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ SetLocalDescriptionWithoutError(answer);
+ EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+}
+
+// This test verifies that crypto parameter is updated in local session
+// description as per security policy set in MediaSessionDescriptionFactory.
+TEST_F(WebRtcSessionTest, TestCryptoAfterSetLocalDescription) {
+ Init();
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ // Making sure SetLocalDescription correctly sets crypto value in
+ // SessionDescription object after de-serialization of sdp string. The value
+ // will be set as per MediaSessionDescriptionFactory.
+ std::string offer_str;
+ offer->ToString(&offer_str);
+ SessionDescriptionInterface* jsep_offer_str =
+ CreateSessionDescription(JsepSessionDescription::kOffer, offer_str, NULL);
+ SetLocalDescriptionWithoutError(jsep_offer_str);
+ EXPECT_TRUE(session_->voice_channel()->secure_required());
+ EXPECT_TRUE(session_->video_channel()->secure_required());
+}
+
+// This test verifies the crypto parameter when security is disabled.
+TEST_F(WebRtcSessionTest, TestCryptoAfterSetLocalDescriptionWithDisabled) {
+ options_.disable_encryption = true;
+ Init();
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ // Making sure SetLocalDescription correctly sets crypto value in
+ // SessionDescription object after de-serialization of sdp string. The value
+ // will be set as per MediaSessionDescriptionFactory.
+ std::string offer_str;
+ offer->ToString(&offer_str);
+ SessionDescriptionInterface* jsep_offer_str =
+ CreateSessionDescription(JsepSessionDescription::kOffer, offer_str, NULL);
+ SetLocalDescriptionWithoutError(jsep_offer_str);
+ EXPECT_FALSE(session_->voice_channel()->secure_required());
+ EXPECT_FALSE(session_->video_channel()->secure_required());
+}
+
+// This test verifies that an answer contains new ufrag and password if an offer
+// with new ufrag and password is received.
+TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewUfragAndPassword) {
+ Init();
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ rtc::scoped_ptr<JsepSessionDescription> offer(
+ CreateRemoteOffer(options));
+ SetRemoteDescriptionWithoutError(offer.release());
+
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateAnswer(NULL));
+ SetLocalDescriptionWithoutError(answer.release());
+
+ // Receive an offer with new ufrag and password.
+ options.transport_options.ice_restart = true;
+ rtc::scoped_ptr<JsepSessionDescription> updated_offer1(
+ CreateRemoteOffer(options, session_->remote_description()));
+ SetRemoteDescriptionWithoutError(updated_offer1.release());
+
+ rtc::scoped_ptr<SessionDescriptionInterface> updated_answer1(
+ CreateAnswer(NULL));
+
+ CompareIceUfragAndPassword(updated_answer1->description(),
+ session_->local_description()->description(),
+ false);
+
+ SetLocalDescriptionWithoutError(updated_answer1.release());
+}
+
+// This test verifies that an answer contains old ufrag and password if an offer
+// with old ufrag and password is received.
+TEST_F(WebRtcSessionTest, TestCreateAnswerWithOldUfragAndPassword) {
+ Init();
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ rtc::scoped_ptr<JsepSessionDescription> offer(
+ CreateRemoteOffer(options));
+ SetRemoteDescriptionWithoutError(offer.release());
+
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ CreateAnswer(NULL));
+ SetLocalDescriptionWithoutError(answer.release());
+
+ // Receive an offer without changed ufrag or password.
+ options.transport_options.ice_restart = false;
+ rtc::scoped_ptr<JsepSessionDescription> updated_offer2(
+ CreateRemoteOffer(options, session_->remote_description()));
+ SetRemoteDescriptionWithoutError(updated_offer2.release());
+
+ rtc::scoped_ptr<SessionDescriptionInterface> updated_answer2(
+ CreateAnswer(NULL));
+
+ CompareIceUfragAndPassword(updated_answer2->description(),
+ session_->local_description()->description(),
+ true);
+
+ SetLocalDescriptionWithoutError(updated_answer2.release());
+}
+
+TEST_F(WebRtcSessionTest, TestSessionContentError) {
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+ const std::string session_id_orig = offer->session_id();
+ const std::string session_version_orig = offer->session_version();
+ SetLocalDescriptionWithoutError(offer);
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+ video_channel_->set_fail_set_send_codecs(true);
+
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionAnswerExpectError("ERROR_CONTENT", answer);
+
+ // Test that after a content error, setting any description will
+ // result in an error.
+ video_channel_->set_fail_set_send_codecs(false);
+ answer = CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionExpectError("", "ERROR_CONTENT", answer);
+ offer = CreateRemoteOffer();
+ SetLocalDescriptionExpectError("", "ERROR_CONTENT", offer);
+}
+
+// Runs the loopback call test with BUNDLE and STUN disabled.
+TEST_F(WebRtcSessionTest, TestIceStatesBasic) {
+ // Lets try with only UDP ports.
+ allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+ cricket::PORTALLOCATOR_DISABLE_STUN |
+ cricket::PORTALLOCATOR_DISABLE_RELAY);
+ TestLoopbackCall();
+}
+
+TEST_F(WebRtcSessionTest, TestIceStatesBasicIPv6) {
+ allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+ cricket::PORTALLOCATOR_DISABLE_STUN |
+ cricket::PORTALLOCATOR_ENABLE_IPV6 |
+ cricket::PORTALLOCATOR_DISABLE_RELAY);
+
+ // best connection is IPv6 since it has higher network preference.
+ LoopbackNetworkConfiguration config;
+ config.test_ipv6_network_ = true;
+ config.best_connection_after_initial_ice_converged_ =
+ LoopbackNetworkConfiguration::ExpectedBestConnection(0, 1);
+
+ TestLoopbackCall(config);
+}
+
+// Runs the loopback call test with BUNDLE and STUN enabled.
+TEST_F(WebRtcSessionTest, TestIceStatesBundle) {
+ allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+ cricket::PORTALLOCATOR_DISABLE_RELAY);
+ TestLoopbackCall();
+}
+
+TEST_F(WebRtcSessionTest, TestRtpDataChannel) {
+ constraints_.reset(new FakeConstraints());
+ constraints_->AddOptional(
+ webrtc::MediaConstraintsInterface::kEnableRtpDataChannels, true);
+ Init();
+
+ SetLocalDescriptionWithDataChannel();
+ EXPECT_EQ(cricket::DCT_RTP, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestRtpDataChannelConstraintTakesPrecedence) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ constraints_.reset(new FakeConstraints());
+ constraints_->AddOptional(
+ webrtc::MediaConstraintsInterface::kEnableRtpDataChannels, true);
+ options_.disable_sctp_data_channels = false;
+
+ InitWithDtls(GetParam());
+
+ SetLocalDescriptionWithDataChannel();
+ EXPECT_EQ(cricket::DCT_RTP, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestCreateOfferWithSctpEnabledWithoutStreams) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ InitWithDtls(GetParam());
+
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ EXPECT_TRUE(offer->description()->GetContentByName("data") == NULL);
+ EXPECT_TRUE(offer->description()->GetTransportInfoByName("data") == NULL);
+}
+
+TEST_P(WebRtcSessionTest, TestCreateAnswerWithSctpInOfferAndNoStreams) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ SetFactoryDtlsSrtp();
+ InitWithDtls(GetParam());
+
+ // Create remote offer with SCTP.
+ cricket::MediaSessionOptions options;
+ options.data_channel_type = cricket::DCT_SCTP;
+ JsepSessionDescription* offer =
+ CreateRemoteOffer(options, cricket::SEC_DISABLED);
+ SetRemoteDescriptionWithoutError(offer);
+
+ // Verifies the answer contains SCTP.
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(NULL));
+ EXPECT_TRUE(answer != NULL);
+ EXPECT_TRUE(answer->description()->GetContentByName("data") != NULL);
+ EXPECT_TRUE(answer->description()->GetTransportInfoByName("data") != NULL);
+}
+
+TEST_P(WebRtcSessionTest, TestSctpDataChannelWithoutDtls) {
+ constraints_.reset(new FakeConstraints());
+ constraints_->AddOptional(
+ webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, false);
+ InitWithDtls(GetParam());
+
+ SetLocalDescriptionWithDataChannel();
+ EXPECT_EQ(cricket::DCT_NONE, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestSctpDataChannelWithDtls) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ InitWithDtls(GetParam());
+
+ SetLocalDescriptionWithDataChannel();
+ EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestDisableSctpDataChannels) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ options_.disable_sctp_data_channels = true;
+ InitWithDtls(GetParam());
+
+ SetLocalDescriptionWithDataChannel();
+ EXPECT_EQ(cricket::DCT_NONE, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestSctpDataChannelSendPortParsing) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ const int new_send_port = 9998;
+ const int new_recv_port = 7775;
+
+ InitWithDtls(GetParam());
+ SetFactoryDtlsSrtp();
+
+ // By default, don't actually add the codecs to desc_factory_; they don't
+ // actually get serialized for SCTP in BuildMediaDescription(). Instead,
+ // let the session description get parsed. That'll get the proper codecs
+ // into the stream.
+ cricket::MediaSessionOptions options;
+ JsepSessionDescription* offer = CreateRemoteOfferWithSctpPort(
+ "stream1", new_send_port, options);
+
+ // SetRemoteDescription will take the ownership of the offer.
+ SetRemoteDescriptionWithoutError(offer);
+
+ SessionDescriptionInterface* answer = ChangeSDPSctpPort(
+ new_recv_port, CreateAnswer(NULL));
+ ASSERT_TRUE(answer != NULL);
+
+ // Now set the local description, which'll take ownership of the answer.
+ SetLocalDescriptionWithoutError(answer);
+
+ // TEST PLAN: Set the port number to something new, set it in the SDP,
+ // and pass it all the way down.
+ EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type());
+ CreateDataChannel();
+
+ cricket::FakeDataMediaChannel* ch = data_engine_->GetChannel(0);
+ int portnum = -1;
+ ASSERT_TRUE(ch != NULL);
+ ASSERT_EQ(1UL, ch->send_codecs().size());
+ EXPECT_EQ(cricket::kGoogleSctpDataCodecId, ch->send_codecs()[0].id);
+ EXPECT_EQ(0, strcmp(cricket::kGoogleSctpDataCodecName,
+ ch->send_codecs()[0].name.c_str()));
+ EXPECT_TRUE(ch->send_codecs()[0].GetParam(cricket::kCodecParamPort,
+ &portnum));
+ EXPECT_EQ(new_send_port, portnum);
+
+ ASSERT_EQ(1UL, ch->recv_codecs().size());
+ EXPECT_EQ(cricket::kGoogleSctpDataCodecId, ch->recv_codecs()[0].id);
+ EXPECT_EQ(0, strcmp(cricket::kGoogleSctpDataCodecName,
+ ch->recv_codecs()[0].name.c_str()));
+ EXPECT_TRUE(ch->recv_codecs()[0].GetParam(cricket::kCodecParamPort,
+ &portnum));
+ EXPECT_EQ(new_recv_port, portnum);
+}
+
+// Verifies that when a session's DataChannel receives an OPEN message,
+// WebRtcSession signals the DataChannel creation request with the expected
+// config.
+TEST_P(WebRtcSessionTest, TestSctpDataChannelOpenMessage) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ InitWithDtls(GetParam());
+
+ SetLocalDescriptionWithDataChannel();
+ EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type());
+
+ webrtc::DataChannelInit config;
+ config.id = 1;
+ rtc::Buffer payload;
+ webrtc::WriteDataChannelOpenMessage("a", config, &payload);
+ cricket::ReceiveDataParams params;
+ params.ssrc = config.id;
+ params.type = cricket::DMT_CONTROL;
+
+ cricket::DataChannel* data_channel = session_->data_channel();
+ data_channel->SignalDataReceived(data_channel, params, payload);
+
+ EXPECT_EQ("a", last_data_channel_label_);
+ EXPECT_EQ(config.id, last_data_channel_config_.id);
+ EXPECT_FALSE(last_data_channel_config_.negotiated);
+ EXPECT_EQ(webrtc::InternalDataChannelInit::kAcker,
+ last_data_channel_config_.open_handshake_role);
+}
+
+TEST_P(WebRtcSessionTest, TestUsesProvidedCertificate) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ FakeDtlsIdentityStore::GenerateCertificate();
+
+ PeerConnectionInterface::RTCConfiguration configuration;
+ configuration.certificates.push_back(certificate);
+ Init(nullptr, configuration);
+ EXPECT_TRUE_WAIT(!session_->waiting_for_certificate_for_testing(), 1000);
+
+ EXPECT_EQ(session_->certificate_for_testing(), certificate);
+}
+
+// Verifies that CreateOffer succeeds when CreateOffer is called before async
+// identity generation is finished (even if a certificate is provided this is
+// an async op).
+TEST_P(WebRtcSessionTest, TestCreateOfferBeforeIdentityRequestReturnSuccess) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ InitWithDtls(GetParam());
+
+ EXPECT_TRUE(session_->waiting_for_certificate_for_testing());
+ SendAudioVideoStream1();
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+ EXPECT_TRUE(offer != NULL);
+ VerifyNoCryptoParams(offer->description(), true);
+ VerifyFingerprintStatus(offer->description(), true);
+}
+
+// Verifies that CreateAnswer succeeds when CreateOffer is called before async
+// identity generation is finished (even if a certificate is provided this is
+// an async op).
+TEST_P(WebRtcSessionTest, TestCreateAnswerBeforeIdentityRequestReturnSuccess) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ InitWithDtls(GetParam());
+ SetFactoryDtlsSrtp();
+
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ scoped_ptr<JsepSessionDescription> offer(
+ CreateRemoteOffer(options, cricket::SEC_DISABLED));
+ ASSERT_TRUE(offer.get() != NULL);
+ SetRemoteDescriptionWithoutError(offer.release());
+
+ rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(NULL));
+ EXPECT_TRUE(answer != NULL);
+ VerifyNoCryptoParams(answer->description(), true);
+ VerifyFingerprintStatus(answer->description(), true);
+}
+
+// Verifies that CreateOffer succeeds when CreateOffer is called after async
+// identity generation is finished (even if a certificate is provided this is
+// an async op).
+TEST_P(WebRtcSessionTest, TestCreateOfferAfterIdentityRequestReturnSuccess) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ InitWithDtls(GetParam());
+
+ EXPECT_TRUE_WAIT(!session_->waiting_for_certificate_for_testing(), 1000);
+
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ EXPECT_TRUE(offer != NULL);
+}
+
+// Verifies that CreateOffer fails when CreateOffer is called after async
+// identity generation fails.
+TEST_F(WebRtcSessionTest, TestCreateOfferAfterIdentityRequestReturnFailure) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ InitWithDtlsIdentityGenFail();
+
+ EXPECT_TRUE_WAIT(!session_->waiting_for_certificate_for_testing(), 1000);
+
+ rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ EXPECT_TRUE(offer == NULL);
+}
+
+// Verifies that CreateOffer succeeds when Multiple CreateOffer calls are made
+// before async identity generation is finished.
+TEST_P(WebRtcSessionTest,
+ TestMultipleCreateOfferBeforeIdentityRequestReturnSuccess) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ VerifyMultipleAsyncCreateDescription(GetParam(),
+ CreateSessionDescriptionRequest::kOffer);
+}
+
+// Verifies that CreateOffer fails when Multiple CreateOffer calls are made
+// before async identity generation fails.
+TEST_F(WebRtcSessionTest,
+ TestMultipleCreateOfferBeforeIdentityRequestReturnFailure) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ VerifyMultipleAsyncCreateDescriptionIdentityGenFailure(
+ CreateSessionDescriptionRequest::kOffer);
+}
+
+// Verifies that CreateAnswer succeeds when Multiple CreateAnswer calls are made
+// before async identity generation is finished.
+TEST_P(WebRtcSessionTest,
+ TestMultipleCreateAnswerBeforeIdentityRequestReturnSuccess) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ VerifyMultipleAsyncCreateDescription(
+ GetParam(), CreateSessionDescriptionRequest::kAnswer);
+}
+
+// Verifies that CreateAnswer fails when Multiple CreateAnswer calls are made
+// before async identity generation fails.
+TEST_F(WebRtcSessionTest,
+ TestMultipleCreateAnswerBeforeIdentityRequestReturnFailure) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ VerifyMultipleAsyncCreateDescriptionIdentityGenFailure(
+ CreateSessionDescriptionRequest::kAnswer);
+}
+
+// Verifies that setRemoteDescription fails when DTLS is disabled and the remote
+// offer has no SDES crypto but only DTLS fingerprint.
+TEST_F(WebRtcSessionTest, TestSetRemoteOfferFailIfDtlsDisabledAndNoCrypto) {
+ // Init without DTLS.
+ Init();
+ // Create a remote offer with secured transport disabled.
+ cricket::MediaSessionOptions options;
+ JsepSessionDescription* offer(CreateRemoteOffer(
+ options, cricket::SEC_DISABLED));
+ // Adds a DTLS fingerprint to the remote offer.
+ cricket::SessionDescription* sdp = offer->description();
+ TransportInfo* audio = sdp->GetTransportInfoByName("audio");
+ ASSERT_TRUE(audio != NULL);
+ ASSERT_TRUE(audio->description.identity_fingerprint.get() == NULL);
+ audio->description.identity_fingerprint.reset(
+ rtc::SSLFingerprint::CreateFromRfc4572(
+ rtc::DIGEST_SHA_256, kFakeDtlsFingerprint));
+ SetRemoteDescriptionOfferExpectError(kSdpWithoutSdesCrypto,
+ offer);
+}
+
+// This test verifies DSCP is properly applied on the media channels.
+TEST_F(WebRtcSessionTest, TestDscpConstraint) {
+ constraints_.reset(new FakeConstraints());
+ constraints_->AddOptional(
+ webrtc::MediaConstraintsInterface::kEnableDscp, true);
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+
+ SetLocalDescriptionWithoutError(offer);
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+ voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+ ASSERT_TRUE(video_channel_ != NULL);
+ ASSERT_TRUE(voice_channel_ != NULL);
+ const cricket::AudioOptions& audio_options = voice_channel_->options();
+ const cricket::VideoOptions& video_options = video_channel_->options();
+ EXPECT_TRUE(audio_options.dscp.IsSet());
+ EXPECT_TRUE(audio_options.dscp.GetWithDefaultIfUnset(false));
+ EXPECT_TRUE(video_options.dscp.IsSet());
+ EXPECT_TRUE(video_options.dscp.GetWithDefaultIfUnset(false));
+}
+
+TEST_F(WebRtcSessionTest, TestSuspendBelowMinBitrateConstraint) {
+ constraints_.reset(new FakeConstraints());
+ constraints_->AddOptional(
+ webrtc::MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate,
+ true);
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+
+ SetLocalDescriptionWithoutError(offer);
+
+ video_channel_ = media_engine_->GetVideoChannel(0);
+
+ ASSERT_TRUE(video_channel_ != NULL);
+ const cricket::VideoOptions& video_options = video_channel_->options();
+ EXPECT_TRUE(
+ video_options.suspend_below_min_bitrate.GetWithDefaultIfUnset(false));
+}
+
+TEST_F(WebRtcSessionTest, TestNumUnsignalledRecvStreamsConstraint) {
+ // Number of unsignalled receiving streams should be between 0 and
+ // kMaxUnsignalledRecvStreams.
+ SetAndVerifyNumUnsignalledRecvStreams(10, 10);
+ SetAndVerifyNumUnsignalledRecvStreams(kMaxUnsignalledRecvStreams + 1,
+ kMaxUnsignalledRecvStreams);
+ SetAndVerifyNumUnsignalledRecvStreams(-1, 0);
+}
+
+TEST_F(WebRtcSessionTest, TestCombinedAudioVideoBweConstraint) {
+ constraints_.reset(new FakeConstraints());
+ constraints_->AddOptional(
+ webrtc::MediaConstraintsInterface::kCombinedAudioVideoBwe,
+ true);
+ Init();
+ SendAudioVideoStream1();
+ SessionDescriptionInterface* offer = CreateOffer();
+
+ SetLocalDescriptionWithoutError(offer);
+
+ voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+ ASSERT_TRUE(voice_channel_ != NULL);
+ const cricket::AudioOptions& audio_options = voice_channel_->options();
+ EXPECT_TRUE(
+ audio_options.combined_audio_video_bwe.GetWithDefaultIfUnset(false));
+}
+
+// Tests that we can renegotiate new media content with ICE candidates in the
+// new remote SDP.
+TEST_P(WebRtcSessionTest, TestRenegotiateNewMediaWithCandidatesInSdp) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ InitWithDtls(GetParam());
+ SetFactoryDtlsSrtp();
+
+ SendAudioOnlyStream2();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+
+ SessionDescriptionInterface* answer = CreateRemoteAnswer(offer);
+ SetRemoteDescriptionWithoutError(answer);
+
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ offer = CreateRemoteOffer(options, cricket::SEC_DISABLED);
+
+ cricket::Candidate candidate1;
+ candidate1.set_address(rtc::SocketAddress("1.1.1.1", 5000));
+ candidate1.set_component(1);
+ JsepIceCandidate ice_candidate(kMediaContentName1, kMediaContentIndex1,
+ candidate1);
+ EXPECT_TRUE(offer->AddCandidate(&ice_candidate));
+ SetRemoteDescriptionWithoutError(offer);
+
+ answer = CreateAnswer(NULL);
+ SetLocalDescriptionWithoutError(answer);
+}
+
+// Tests that we can renegotiate new media content with ICE candidates separated
+// from the remote SDP.
+TEST_P(WebRtcSessionTest, TestRenegotiateNewMediaWithCandidatesSeparated) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ InitWithDtls(GetParam());
+ SetFactoryDtlsSrtp();
+
+ SendAudioOnlyStream2();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+
+ SessionDescriptionInterface* answer = CreateRemoteAnswer(offer);
+ SetRemoteDescriptionWithoutError(answer);
+
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+ offer = CreateRemoteOffer(options, cricket::SEC_DISABLED);
+ SetRemoteDescriptionWithoutError(offer);
+
+ cricket::Candidate candidate1;
+ candidate1.set_address(rtc::SocketAddress("1.1.1.1", 5000));
+ candidate1.set_component(1);
+ JsepIceCandidate ice_candidate(kMediaContentName1, kMediaContentIndex1,
+ candidate1);
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate));
+
+ answer = CreateAnswer(NULL);
+ SetLocalDescriptionWithoutError(answer);
+}
+// Tests that RTX codec is removed from the answer when it isn't supported
+// by local side.
+TEST_F(WebRtcSessionTest, TestRtxRemovedByCreateAnswer) {
+ Init();
+ SendAudioVideoStream1();
+ std::string offer_sdp(kSdpWithRtx);
+
+ SessionDescriptionInterface* offer =
+ CreateSessionDescription(JsepSessionDescription::kOffer, offer_sdp, NULL);
+ EXPECT_TRUE(offer->ToString(&offer_sdp));
+
+ // Offer SDP contains the RTX codec.
+ EXPECT_TRUE(offer_sdp.find("rtx") != std::string::npos);
+ SetRemoteDescriptionWithoutError(offer);
+
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ std::string answer_sdp;
+ answer->ToString(&answer_sdp);
+ // Answer SDP removes the unsupported RTX codec.
+ EXPECT_TRUE(answer_sdp.find("rtx") == std::string::npos);
+ SetLocalDescriptionWithoutError(answer);
+}
+
+// This verifies that the voice channel after bundle has both options from video
+// and voice channels.
+TEST_F(WebRtcSessionTest, TestSetSocketOptionBeforeBundle) {
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
+ SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ session_->video_channel()->SetOption(cricket::BaseChannel::ST_RTP,
+ rtc::Socket::Option::OPT_SNDBUF, 4000);
+
+ session_->voice_channel()->SetOption(cricket::BaseChannel::ST_RTP,
+ rtc::Socket::Option::OPT_RCVBUF, 8000);
+
+ int option_val;
+ EXPECT_TRUE(session_->video_rtp_transport_channel()->GetOption(
+ rtc::Socket::Option::OPT_SNDBUF, &option_val));
+ EXPECT_EQ(4000, option_val);
+ EXPECT_FALSE(session_->voice_rtp_transport_channel()->GetOption(
+ rtc::Socket::Option::OPT_SNDBUF, &option_val));
+
+ EXPECT_TRUE(session_->voice_rtp_transport_channel()->GetOption(
+ rtc::Socket::Option::OPT_RCVBUF, &option_val));
+ EXPECT_EQ(8000, option_val);
+ EXPECT_FALSE(session_->video_rtp_transport_channel()->GetOption(
+ rtc::Socket::Option::OPT_RCVBUF, &option_val));
+
+ EXPECT_NE(session_->voice_rtp_transport_channel(),
+ session_->video_rtp_transport_channel());
+
+ SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ EXPECT_TRUE(session_->voice_rtp_transport_channel()->GetOption(
+ rtc::Socket::Option::OPT_SNDBUF, &option_val));
+ EXPECT_EQ(4000, option_val);
+
+ EXPECT_TRUE(session_->voice_rtp_transport_channel()->GetOption(
+ rtc::Socket::Option::OPT_RCVBUF, &option_val));
+ EXPECT_EQ(8000, option_val);
+}
+
+// Test creating a session, request multiple offers, destroy the session
+// and make sure we got success/failure callbacks for all of the requests.
+// Background: crbug.com/507307
+TEST_F(WebRtcSessionTest, CreateOffersAndShutdown) {
+ Init();
+
+ rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest> observers[100];
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+ cricket::MediaSessionOptions session_options;
+ session_options.recv_audio = true;
+
+ for (auto& o : observers) {
+ o = new WebRtcSessionCreateSDPObserverForTest();
+ session_->CreateOffer(o, options, session_options);
+ }
+
+ session_.reset();
+
+ for (auto& o : observers) {
+ // We expect to have received a notification now even if the session was
+ // terminated. The offer creation may or may not have succeeded, but we
+ // must have received a notification which, so the only invalid state
+ // is kInit.
+ EXPECT_NE(WebRtcSessionCreateSDPObserverForTest::kInit, o->state());
+ }
+}
+
+TEST_F(WebRtcSessionTest, TestPacketOptionsAndOnPacketSent) {
+ TestPacketOptions();
+}
+
+// TODO(bemasc): Add a TestIceStatesBundle with BUNDLE enabled. That test
+// currently fails because upon disconnection and reconnection OnIceComplete is
+// called more than once without returning to IceGatheringGathering.
+
+INSTANTIATE_TEST_CASE_P(WebRtcSessionTests,
+ WebRtcSessionTest,
+ testing::Values(ALREADY_GENERATED,
+ DTLS_IDENTITY_STORE));
diff --git a/talk/app/webrtc/webrtcsessiondescriptionfactory.cc b/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
new file mode 100644
index 0000000000..25965af79d
--- /dev/null
+++ b/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
@@ -0,0 +1,508 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
+
+#include "talk/app/webrtc/dtlsidentitystore.h"
+#include "talk/app/webrtc/jsep.h"
+#include "talk/app/webrtc/jsepsessiondescription.h"
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "talk/app/webrtc/webrtcsession.h"
+#include "webrtc/base/sslidentity.h"
+
+using cricket::MediaSessionOptions;
+
+namespace webrtc {
+namespace {
+static const char kFailedDueToIdentityFailed[] =
+ " failed because DTLS identity request failed";
+static const char kFailedDueToSessionShutdown[] =
+ " failed because the session was shut down";
+
+static const uint64_t kInitSessionVersion = 2;
+
+static bool CompareStream(const MediaSessionOptions::Stream& stream1,
+ const MediaSessionOptions::Stream& stream2) {
+ return stream1.id < stream2.id;
+}
+
+static bool SameId(const MediaSessionOptions::Stream& stream1,
+ const MediaSessionOptions::Stream& stream2) {
+ return stream1.id == stream2.id;
+}
+
+// Checks if each Stream within the |streams| has unique id.
+static bool ValidStreams(const MediaSessionOptions::Streams& streams) {
+ MediaSessionOptions::Streams sorted_streams = streams;
+ std::sort(sorted_streams.begin(), sorted_streams.end(), CompareStream);
+ MediaSessionOptions::Streams::iterator it =
+ std::adjacent_find(sorted_streams.begin(), sorted_streams.end(),
+ SameId);
+ return it == sorted_streams.end();
+}
+
+enum {
+ MSG_CREATE_SESSIONDESCRIPTION_SUCCESS,
+ MSG_CREATE_SESSIONDESCRIPTION_FAILED,
+ MSG_USE_CONSTRUCTOR_CERTIFICATE
+};
+
+struct CreateSessionDescriptionMsg : public rtc::MessageData {
+ explicit CreateSessionDescriptionMsg(
+ webrtc::CreateSessionDescriptionObserver* observer)
+ : observer(observer) {
+ }
+
+ rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserver> observer;
+ std::string error;
+ rtc::scoped_ptr<webrtc::SessionDescriptionInterface> description;
+};
+} // namespace
+
+void WebRtcIdentityRequestObserver::OnFailure(int error) {
+ SignalRequestFailed(error);
+}
+
+void WebRtcIdentityRequestObserver::OnSuccess(
+ const std::string& der_cert, const std::string& der_private_key) {
+ std::string pem_cert = rtc::SSLIdentity::DerToPem(
+ rtc::kPemTypeCertificate,
+ reinterpret_cast<const unsigned char*>(der_cert.data()),
+ der_cert.length());
+ std::string pem_key = rtc::SSLIdentity::DerToPem(
+ rtc::kPemTypeRsaPrivateKey,
+ reinterpret_cast<const unsigned char*>(der_private_key.data()),
+ der_private_key.length());
+ rtc::scoped_ptr<rtc::SSLIdentity> identity(
+ rtc::SSLIdentity::FromPEMStrings(pem_key, pem_cert));
+ SignalCertificateReady(rtc::RTCCertificate::Create(identity.Pass()));
+}
+
+void WebRtcIdentityRequestObserver::OnSuccess(
+ rtc::scoped_ptr<rtc::SSLIdentity> identity) {
+ SignalCertificateReady(rtc::RTCCertificate::Create(identity.Pass()));
+}
+
+// static
+void WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription(
+ const SessionDescriptionInterface* source_desc,
+ SessionDescriptionInterface* dest_desc) {
+ if (!source_desc)
+ return;
+ for (size_t m = 0; m < source_desc->number_of_mediasections() &&
+ m < dest_desc->number_of_mediasections(); ++m) {
+ const IceCandidateCollection* source_candidates =
+ source_desc->candidates(m);
+ const IceCandidateCollection* dest_candidates = dest_desc->candidates(m);
+ for (size_t n = 0; n < source_candidates->count(); ++n) {
+ const IceCandidateInterface* new_candidate = source_candidates->at(n);
+ if (!dest_candidates->HasCandidate(new_candidate))
+ dest_desc->AddCandidate(source_candidates->at(n));
+ }
+ }
+}
+
+// Private constructor called by other constructors.
+WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
+ rtc::Thread* signaling_thread,
+ cricket::ChannelManager* channel_manager,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ const rtc::scoped_refptr<WebRtcIdentityRequestObserver>&
+ identity_request_observer,
+ WebRtcSession* session,
+ const std::string& session_id,
+ bool dtls_enabled)
+ : signaling_thread_(signaling_thread),
+ session_desc_factory_(channel_manager, &transport_desc_factory_),
+ // RFC 4566 suggested a Network Time Protocol (NTP) format timestamp
+ // as the session id and session version. To simplify, it should be fine
+ // to just use a random number as session id and start version from
+ // |kInitSessionVersion|.
+ session_version_(kInitSessionVersion),
+ dtls_identity_store_(dtls_identity_store.Pass()),
+ identity_request_observer_(identity_request_observer),
+ session_(session),
+ session_id_(session_id),
+ certificate_request_state_(CERTIFICATE_NOT_NEEDED) {
+ session_desc_factory_.set_add_legacy_streams(false);
+ // SRTP-SDES is disabled if DTLS is on.
+ SetSdesPolicy(dtls_enabled ? cricket::SEC_DISABLED : cricket::SEC_REQUIRED);
+}
+
+WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
+ rtc::Thread* signaling_thread,
+ cricket::ChannelManager* channel_manager,
+ WebRtcSession* session,
+ const std::string& session_id)
+ : WebRtcSessionDescriptionFactory(signaling_thread,
+ channel_manager,
+ nullptr,
+ nullptr,
+ session,
+ session_id,
+ false) {
+ LOG(LS_VERBOSE) << "DTLS-SRTP disabled.";
+}
+
+WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
+ rtc::Thread* signaling_thread,
+ cricket::ChannelManager* channel_manager,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ WebRtcSession* session,
+ const std::string& session_id)
+ : WebRtcSessionDescriptionFactory(
+ signaling_thread,
+ channel_manager,
+ dtls_identity_store.Pass(),
+ new rtc::RefCountedObject<WebRtcIdentityRequestObserver>(),
+ session,
+ session_id,
+ true) {
+ RTC_DCHECK(dtls_identity_store_);
+
+ certificate_request_state_ = CERTIFICATE_WAITING;
+
+ identity_request_observer_->SignalRequestFailed.connect(
+ this, &WebRtcSessionDescriptionFactory::OnIdentityRequestFailed);
+ identity_request_observer_->SignalCertificateReady.connect(
+ this, &WebRtcSessionDescriptionFactory::SetCertificate);
+
+ rtc::KeyType key_type = rtc::KT_DEFAULT;
+ LOG(LS_VERBOSE) << "DTLS-SRTP enabled; sending DTLS identity request (key "
+ << "type: " << key_type << ").";
+
+ // Request identity. This happens asynchronously, so the caller will have a
+ // chance to connect to SignalIdentityReady.
+ dtls_identity_store_->RequestIdentity(key_type, identity_request_observer_);
+}
+
+WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
+ rtc::Thread* signaling_thread,
+ cricket::ChannelManager* channel_manager,
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate,
+ WebRtcSession* session,
+ const std::string& session_id)
+ : WebRtcSessionDescriptionFactory(signaling_thread,
+ channel_manager,
+ nullptr,
+ nullptr,
+ session,
+ session_id,
+ true) {
+ RTC_DCHECK(certificate);
+
+ certificate_request_state_ = CERTIFICATE_WAITING;
+
+ LOG(LS_VERBOSE) << "DTLS-SRTP enabled; has certificate parameter.";
+ // We already have a certificate but we wait to do SetIdentity; if we do
+ // it in the constructor then the caller has not had a chance to connect to
+ // SignalIdentityReady.
+ signaling_thread_->Post(
+ this, MSG_USE_CONSTRUCTOR_CERTIFICATE,
+ new rtc::ScopedRefMessageData<rtc::RTCCertificate>(certificate));
+}
+
+WebRtcSessionDescriptionFactory::~WebRtcSessionDescriptionFactory() {
+ ASSERT(signaling_thread_->IsCurrent());
+
+ // Fail any requests that were asked for before identity generation completed.
+ FailPendingRequests(kFailedDueToSessionShutdown);
+
+ // Process all pending notifications in the message queue. If we don't do
+ // this, requests will linger and not know they succeeded or failed.
+ rtc::MessageList list;
+ signaling_thread_->Clear(this, rtc::MQID_ANY, &list);
+ for (auto& msg : list) {
+ if (msg.message_id != MSG_USE_CONSTRUCTOR_CERTIFICATE) {
+ OnMessage(&msg);
+ } else {
+ // Skip MSG_USE_CONSTRUCTOR_CERTIFICATE because we don't want to trigger
+ // SetIdentity-related callbacks in the destructor. This can be a problem
+ // when WebRtcSession listens to the callback but it was the WebRtcSession
+ // destructor that caused WebRtcSessionDescriptionFactory's destruction.
+ // The callback is then ignored, leaking memory allocated by OnMessage for
+ // MSG_USE_CONSTRUCTOR_CERTIFICATE.
+ delete msg.pdata;
+ }
+ }
+}
+
+void WebRtcSessionDescriptionFactory::CreateOffer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ const cricket::MediaSessionOptions& session_options) {
+ std::string error = "CreateOffer";
+ if (certificate_request_state_ == CERTIFICATE_FAILED) {
+ error += kFailedDueToIdentityFailed;
+ LOG(LS_ERROR) << error;
+ PostCreateSessionDescriptionFailed(observer, error);
+ return;
+ }
+
+ if (!ValidStreams(session_options.streams)) {
+ error += " called with invalid media streams.";
+ LOG(LS_ERROR) << error;
+ PostCreateSessionDescriptionFailed(observer, error);
+ return;
+ }
+
+ CreateSessionDescriptionRequest request(
+ CreateSessionDescriptionRequest::kOffer, observer, session_options);
+ if (certificate_request_state_ == CERTIFICATE_WAITING) {
+ create_session_description_requests_.push(request);
+ } else {
+ ASSERT(certificate_request_state_ == CERTIFICATE_SUCCEEDED ||
+ certificate_request_state_ == CERTIFICATE_NOT_NEEDED);
+ InternalCreateOffer(request);
+ }
+}
+
+void WebRtcSessionDescriptionFactory::CreateAnswer(
+ CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints,
+ const cricket::MediaSessionOptions& session_options) {
+ std::string error = "CreateAnswer";
+ if (certificate_request_state_ == CERTIFICATE_FAILED) {
+ error += kFailedDueToIdentityFailed;
+ LOG(LS_ERROR) << error;
+ PostCreateSessionDescriptionFailed(observer, error);
+ return;
+ }
+ if (!session_->remote_description()) {
+ error += " can't be called before SetRemoteDescription.";
+ LOG(LS_ERROR) << error;
+ PostCreateSessionDescriptionFailed(observer, error);
+ return;
+ }
+ if (session_->remote_description()->type() !=
+ JsepSessionDescription::kOffer) {
+ error += " failed because remote_description is not an offer.";
+ LOG(LS_ERROR) << error;
+ PostCreateSessionDescriptionFailed(observer, error);
+ return;
+ }
+
+ if (!ValidStreams(session_options.streams)) {
+ error += " called with invalid media streams.";
+ LOG(LS_ERROR) << error;
+ PostCreateSessionDescriptionFailed(observer, error);
+ return;
+ }
+
+ CreateSessionDescriptionRequest request(
+ CreateSessionDescriptionRequest::kAnswer, observer, session_options);
+ if (certificate_request_state_ == CERTIFICATE_WAITING) {
+ create_session_description_requests_.push(request);
+ } else {
+ ASSERT(certificate_request_state_ == CERTIFICATE_SUCCEEDED ||
+ certificate_request_state_ == CERTIFICATE_NOT_NEEDED);
+ InternalCreateAnswer(request);
+ }
+}
+
+void WebRtcSessionDescriptionFactory::SetSdesPolicy(
+ cricket::SecurePolicy secure_policy) {
+ session_desc_factory_.set_secure(secure_policy);
+}
+
+cricket::SecurePolicy WebRtcSessionDescriptionFactory::SdesPolicy() const {
+ return session_desc_factory_.secure();
+}
+
+void WebRtcSessionDescriptionFactory::OnMessage(rtc::Message* msg) {
+ switch (msg->message_id) {
+ case MSG_CREATE_SESSIONDESCRIPTION_SUCCESS: {
+ CreateSessionDescriptionMsg* param =
+ static_cast<CreateSessionDescriptionMsg*>(msg->pdata);
+ param->observer->OnSuccess(param->description.release());
+ delete param;
+ break;
+ }
+ case MSG_CREATE_SESSIONDESCRIPTION_FAILED: {
+ CreateSessionDescriptionMsg* param =
+ static_cast<CreateSessionDescriptionMsg*>(msg->pdata);
+ param->observer->OnFailure(param->error);
+ delete param;
+ break;
+ }
+ case MSG_USE_CONSTRUCTOR_CERTIFICATE: {
+ rtc::ScopedRefMessageData<rtc::RTCCertificate>* param =
+ static_cast<rtc::ScopedRefMessageData<rtc::RTCCertificate>*>(
+ msg->pdata);
+ LOG(LS_INFO) << "Using certificate supplied to the constructor.";
+ SetCertificate(param->data());
+ delete param;
+ break;
+ }
+ default:
+ ASSERT(false);
+ break;
+ }
+}
+
+void WebRtcSessionDescriptionFactory::InternalCreateOffer(
+ CreateSessionDescriptionRequest request) {
+ cricket::SessionDescription* desc(session_desc_factory_.CreateOffer(
+ request.options, session_->local_description()
+ ? session_->local_description()->description()
+ : nullptr));
+ // RFC 3264
+ // When issuing an offer that modifies the session,
+ // the "o=" line of the new SDP MUST be identical to that in the
+ // previous SDP, except that the version in the origin field MUST
+ // increment by one from the previous SDP.
+
+ // Just increase the version number by one each time when a new offer
+ // is created regardless if it's identical to the previous one or not.
+ // The |session_version_| is a uint64_t, the wrap around should not happen.
+ ASSERT(session_version_ + 1 > session_version_);
+ JsepSessionDescription* offer(new JsepSessionDescription(
+ JsepSessionDescription::kOffer));
+ if (!offer->Initialize(desc, session_id_,
+ rtc::ToString(session_version_++))) {
+ delete offer;
+ PostCreateSessionDescriptionFailed(request.observer,
+ "Failed to initialize the offer.");
+ return;
+ }
+ if (session_->local_description() &&
+ !request.options.transport_options.ice_restart) {
+ // Include all local ice candidates in the SessionDescription unless
+ // the an ice restart has been requested.
+ CopyCandidatesFromSessionDescription(session_->local_description(), offer);
+ }
+ PostCreateSessionDescriptionSucceeded(request.observer, offer);
+}
+
+void WebRtcSessionDescriptionFactory::InternalCreateAnswer(
+ CreateSessionDescriptionRequest request) {
+ // According to http://tools.ietf.org/html/rfc5245#section-9.2.1.1
+ // an answer should also contain new ice ufrag and password if an offer has
+ // been received with new ufrag and password.
+ request.options.transport_options.ice_restart = session_->IceRestartPending();
+ // We should pass current ssl role to the transport description factory, if
+ // there is already an existing ongoing session.
+ rtc::SSLRole ssl_role;
+ if (session_->GetSslRole(&ssl_role)) {
+ request.options.transport_options.prefer_passive_role =
+ (rtc::SSL_SERVER == ssl_role);
+ }
+
+ cricket::SessionDescription* desc(session_desc_factory_.CreateAnswer(
+ session_->remote_description()
+ ? session_->remote_description()->description()
+ : nullptr,
+ request.options, session_->local_description()
+ ? session_->local_description()->description()
+ : nullptr));
+ // RFC 3264
+ // If the answer is different from the offer in any way (different IP
+ // addresses, ports, etc.), the origin line MUST be different in the answer.
+ // In that case, the version number in the "o=" line of the answer is
+ // unrelated to the version number in the o line of the offer.
+ // Get a new version number by increasing the |session_version_answer_|.
+ // The |session_version_| is a uint64_t, the wrap around should not happen.
+ ASSERT(session_version_ + 1 > session_version_);
+ JsepSessionDescription* answer(new JsepSessionDescription(
+ JsepSessionDescription::kAnswer));
+ if (!answer->Initialize(desc, session_id_,
+ rtc::ToString(session_version_++))) {
+ delete answer;
+ PostCreateSessionDescriptionFailed(request.observer,
+ "Failed to initialize the answer.");
+ return;
+ }
+ if (session_->local_description() &&
+ !request.options.transport_options.ice_restart) {
+ // Include all local ice candidates in the SessionDescription unless
+ // the remote peer has requested an ice restart.
+ CopyCandidatesFromSessionDescription(session_->local_description(), answer);
+ }
+ session_->ResetIceRestartLatch();
+ PostCreateSessionDescriptionSucceeded(request.observer, answer);
+}
+
+void WebRtcSessionDescriptionFactory::FailPendingRequests(
+ const std::string& reason) {
+ ASSERT(signaling_thread_->IsCurrent());
+ while (!create_session_description_requests_.empty()) {
+ const CreateSessionDescriptionRequest& request =
+ create_session_description_requests_.front();
+ PostCreateSessionDescriptionFailed(request.observer,
+ ((request.type == CreateSessionDescriptionRequest::kOffer) ?
+ "CreateOffer" : "CreateAnswer") + reason);
+ create_session_description_requests_.pop();
+ }
+}
+
+void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionFailed(
+ CreateSessionDescriptionObserver* observer, const std::string& error) {
+ CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer);
+ msg->error = error;
+ signaling_thread_->Post(this, MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg);
+ LOG(LS_ERROR) << "Create SDP failed: " << error;
+}
+
+void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionSucceeded(
+ CreateSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* description) {
+ CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer);
+ msg->description.reset(description);
+ signaling_thread_->Post(this, MSG_CREATE_SESSIONDESCRIPTION_SUCCESS, msg);
+}
+
+void WebRtcSessionDescriptionFactory::OnIdentityRequestFailed(int error) {
+ ASSERT(signaling_thread_->IsCurrent());
+
+ LOG(LS_ERROR) << "Async identity request failed: error = " << error;
+ certificate_request_state_ = CERTIFICATE_FAILED;
+
+ FailPendingRequests(kFailedDueToIdentityFailed);
+}
+
+void WebRtcSessionDescriptionFactory::SetCertificate(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
+ RTC_DCHECK(certificate);
+ LOG(LS_VERBOSE) << "Setting new certificate";
+
+ certificate_request_state_ = CERTIFICATE_SUCCEEDED;
+ SignalCertificateReady(certificate);
+
+ transport_desc_factory_.set_certificate(certificate);
+ transport_desc_factory_.set_secure(cricket::SEC_ENABLED);
+
+ while (!create_session_description_requests_.empty()) {
+ if (create_session_description_requests_.front().type ==
+ CreateSessionDescriptionRequest::kOffer) {
+ InternalCreateOffer(create_session_description_requests_.front());
+ } else {
+ InternalCreateAnswer(create_session_description_requests_.front());
+ }
+ create_session_description_requests_.pop();
+ }
+}
+} // namespace webrtc
diff --git a/talk/app/webrtc/webrtcsessiondescriptionfactory.h b/talk/app/webrtc/webrtcsessiondescriptionfactory.h
new file mode 100644
index 0000000000..91adc662c2
--- /dev/null
+++ b/talk/app/webrtc/webrtcsessiondescriptionfactory.h
@@ -0,0 +1,193 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_WEBRTCSESSIONDESCRIPTIONFACTORY_H_
+#define TALK_APP_WEBRTC_WEBRTCSESSIONDESCRIPTIONFACTORY_H_
+
+#include "talk/app/webrtc/dtlsidentitystore.h"
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/p2p/base/transportdescriptionfactory.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/rtccertificate.h"
+
+namespace cricket {
+class ChannelManager;
+class TransportDescriptionFactory;
+} // namespace cricket
+
+namespace webrtc {
+class CreateSessionDescriptionObserver;
+class MediaConstraintsInterface;
+class SessionDescriptionInterface;
+class WebRtcSession;
+
+// DTLS identity request callback class.
+class WebRtcIdentityRequestObserver : public DtlsIdentityRequestObserver,
+ public sigslot::has_slots<> {
+ public:
+ // DtlsIdentityRequestObserver overrides.
+ void OnFailure(int error) override;
+ void OnSuccess(const std::string& der_cert,
+ const std::string& der_private_key) override;
+ void OnSuccess(rtc::scoped_ptr<rtc::SSLIdentity> identity) override;
+
+ sigslot::signal1<int> SignalRequestFailed;
+ sigslot::signal1<const rtc::scoped_refptr<rtc::RTCCertificate>&>
+ SignalCertificateReady;
+};
+
+struct CreateSessionDescriptionRequest {
+ enum Type {
+ kOffer,
+ kAnswer,
+ };
+
+ CreateSessionDescriptionRequest(
+ Type type,
+ CreateSessionDescriptionObserver* observer,
+ const cricket::MediaSessionOptions& options)
+ : type(type),
+ observer(observer),
+ options(options) {}
+
+ Type type;
+ rtc::scoped_refptr<CreateSessionDescriptionObserver> observer;
+ cricket::MediaSessionOptions options;
+};
+
+// This class is used to create offer/answer session description with regards to
+// the async DTLS identity generation for WebRtcSession.
+// It queues the create offer/answer request until the DTLS identity
+// request has completed, i.e. when OnIdentityRequestFailed or OnIdentityReady
+// is called.
+class WebRtcSessionDescriptionFactory : public rtc::MessageHandler,
+ public sigslot::has_slots<> {
+ public:
+ // Construct with DTLS disabled.
+ WebRtcSessionDescriptionFactory(rtc::Thread* signaling_thread,
+ cricket::ChannelManager* channel_manager,
+ WebRtcSession* session,
+ const std::string& session_id);
+
+ // Construct with DTLS enabled using the specified |dtls_identity_store| to
+ // generate a certificate.
+ WebRtcSessionDescriptionFactory(
+ rtc::Thread* signaling_thread,
+ cricket::ChannelManager* channel_manager,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ WebRtcSession* session,
+ const std::string& session_id);
+
+ // Construct with DTLS enabled using the specified (already generated)
+ // |certificate|.
+ WebRtcSessionDescriptionFactory(
+ rtc::Thread* signaling_thread,
+ cricket::ChannelManager* channel_manager,
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate,
+ WebRtcSession* session,
+ const std::string& session_id);
+ virtual ~WebRtcSessionDescriptionFactory();
+
+ static void CopyCandidatesFromSessionDescription(
+ const SessionDescriptionInterface* source_desc,
+ SessionDescriptionInterface* dest_desc);
+
+ void CreateOffer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ const cricket::MediaSessionOptions& session_options);
+ void CreateAnswer(CreateSessionDescriptionObserver* observer,
+ const MediaConstraintsInterface* constraints,
+ const cricket::MediaSessionOptions& session_options);
+
+ void SetSdesPolicy(cricket::SecurePolicy secure_policy);
+ cricket::SecurePolicy SdesPolicy() const;
+
+ sigslot::signal1<const rtc::scoped_refptr<rtc::RTCCertificate>&>
+ SignalCertificateReady;
+
+ // For testing.
+ bool waiting_for_certificate_for_testing() const {
+ return certificate_request_state_ == CERTIFICATE_WAITING;
+ }
+
+ private:
+ enum CertificateRequestState {
+ CERTIFICATE_NOT_NEEDED,
+ CERTIFICATE_WAITING,
+ CERTIFICATE_SUCCEEDED,
+ CERTIFICATE_FAILED,
+ };
+
+ WebRtcSessionDescriptionFactory(
+ rtc::Thread* signaling_thread,
+ cricket::ChannelManager* channel_manager,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ const rtc::scoped_refptr<WebRtcIdentityRequestObserver>&
+ identity_request_observer,
+ WebRtcSession* session,
+ const std::string& session_id,
+ bool dtls_enabled);
+
+ // MessageHandler implementation.
+ virtual void OnMessage(rtc::Message* msg);
+
+ void InternalCreateOffer(CreateSessionDescriptionRequest request);
+ void InternalCreateAnswer(CreateSessionDescriptionRequest request);
+ // Posts failure notifications for all pending session description requests.
+ void FailPendingRequests(const std::string& reason);
+ void PostCreateSessionDescriptionFailed(
+ CreateSessionDescriptionObserver* observer,
+ const std::string& error);
+ void PostCreateSessionDescriptionSucceeded(
+ CreateSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* description);
+
+ void OnIdentityRequestFailed(int error);
+ void SetCertificate(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
+
+ std::queue<CreateSessionDescriptionRequest>
+ create_session_description_requests_;
+ rtc::Thread* const signaling_thread_;
+ cricket::TransportDescriptionFactory transport_desc_factory_;
+ cricket::MediaSessionDescriptionFactory session_desc_factory_;
+ uint64_t session_version_;
+ const rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store_;
+ const rtc::scoped_refptr<WebRtcIdentityRequestObserver>
+ identity_request_observer_;
+ // TODO(jiayl): remove the dependency on session once bug 2264 is fixed.
+ WebRtcSession* const session_;
+ const std::string session_id_;
+ CertificateRequestState certificate_request_state_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcSessionDescriptionFactory);
+};
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_WEBRTCSESSIONDESCRIPTIONFACTORY_H_