aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTreeHugger Robot <treehugger-gerrit@google.com>2020-07-23 23:07:18 +0000
committerAndroid (Google) Code Review <android-gerrit@google.com>2020-07-23 23:07:18 +0000
commit9e9b79187bff6d54f8e15db1978c157f8dda9335 (patch)
tree9ba6a46a7e4cd59e1018b94136f46578efe31f2e
parent37f9b0ea9ea6a8c490bdb0dc2f44a586b01c8ab2 (diff)
parent206ccd0b36df69a0d0d0d26ddf7c4ead20202f91 (diff)
downloadwebrtc-9e9b79187bff6d54f8e15db1978c157f8dda9335.tar.gz
Merge changes Ida3bfe62,I2d596942
* changes: Merge remote tracking branch 'upstream-master' Generate new Android.bp file and correct build errors
-rw-r--r--.gitignore2
-rw-r--r--AUTHORS6
-rw-r--r--Android.bp5492
-rw-r--r--Android.mk0
-rw-r--r--BUILD.gn14
-rw-r--r--DEPS716
-rwxr-xr-xPRESUBMIT.py132
-rw-r--r--README.chromium27
-rw-r--r--abseil-in-webrtc.md13
-rw-r--r--api/BUILD.gn142
-rw-r--r--api/DEPS10
-rw-r--r--api/adaptation/BUILD.gn23
-rw-r--r--api/adaptation/DEPS7
-rw-r--r--api/adaptation/resource.cc30
-rw-r--r--api/adaptation/resource.h67
-rw-r--r--api/array_view_unittest.cc4
-rw-r--r--api/audio/BUILD.gn16
-rw-r--r--api/audio/audio_frame.cc24
-rw-r--r--api/audio/audio_frame.h4
-rw-r--r--api/audio/echo_detector_creator.cc21
-rw-r--r--api/audio/echo_detector_creator.h26
-rw-r--r--api/audio/test/audio_frame_unittest.cc50
-rw-r--r--api/audio_codecs/BUILD.gn2
-rw-r--r--api/audio_codecs/L16/BUILD.gn4
-rw-r--r--api/audio_codecs/g711/BUILD.gn4
-rw-r--r--api/audio_codecs/g722/BUILD.gn4
-rw-r--r--api/audio_codecs/ilbc/BUILD.gn4
-rw-r--r--api/audio_codecs/isac/BUILD.gn8
-rw-r--r--api/audio_codecs/opus/BUILD.gn10
-rw-r--r--api/audio_options.h2
-rw-r--r--api/data_channel_interface.h5
-rw-r--r--api/frame_transformer_interface.h3
-rw-r--r--api/neteq/BUILD.gn4
-rw-r--r--api/neteq/neteq.cc3
-rw-r--r--api/neteq/neteq.h4
-rw-r--r--api/peer_connection_interface.cc21
-rw-r--r--api/peer_connection_interface.h58
-rw-r--r--api/peer_connection_proxy.h1
-rw-r--r--api/priority.h (renamed from test/fuzzers/rtp_rtcp_demuxer_helper_fuzzer.cc)21
-rw-r--r--api/proxy.h11
-rw-r--r--api/rtc_event_log_output_file_unittest.cc6
-rw-r--r--api/rtp_headers.cc4
-rw-r--r--api/rtp_headers.h6
-rw-r--r--api/rtp_parameters.cc16
-rw-r--r--api/rtp_parameters.h25
-rw-r--r--api/rtp_transceiver_interface.cc6
-rw-r--r--api/rtp_transceiver_interface.h7
-rw-r--r--api/stats/rtc_stats.h8
-rw-r--r--api/stats/rtcstats_objects.h21
-rw-r--r--api/task_queue/BUILD.gn6
-rw-r--r--api/task_queue/task_queue_test.cc11
-rw-r--r--api/test/DEPS1
-rw-r--r--api/test/audio_quality_analyzer_interface.h4
-rw-r--r--api/test/audioproc_float.cc6
-rw-r--r--api/test/audioproc_float.h16
-rw-r--r--api/test/compile_all_headers.cc5
-rw-r--r--api/test/create_network_emulation_manager.h2
-rw-r--r--api/test/create_peerconnection_quality_test_fixture.cc6
-rw-r--r--api/test/create_peerconnection_quality_test_fixture.h6
-rw-r--r--api/test/create_time_controller.cc9
-rw-r--r--api/test/dummy_peer_connection.h4
-rw-r--r--api/test/fake_datagram_transport.h121
-rw-r--r--api/test/fake_media_transport.h74
-rw-r--r--api/test/loopback_media_transport.cc373
-rw-r--r--api/test/loopback_media_transport.h269
-rw-r--r--api/test/loopback_media_transport_unittest.cc201
-rw-r--r--api/test/mock_audio_mixer.h9
-rw-r--r--api/test/mock_peer_connection_factory_interface.h75
-rw-r--r--api/test/mock_peerconnectioninterface.h1
-rw-r--r--api/test/mock_transformable_video_frame.h38
-rw-r--r--api/test/mock_video_bitrate_allocator_factory.h2
-rw-r--r--api/test/mock_video_decoder_factory.h4
-rw-r--r--api/test/mock_video_encoder.h54
-rw-r--r--api/test/mock_video_encoder_factory.h6
-rw-r--r--api/test/network_emulation/BUILD.gn2
-rw-r--r--api/test/network_emulation/network_emulation_interfaces.h121
-rw-r--r--api/test/peerconnection_quality_test_fixture.h57
-rw-r--r--api/test/simulated_network.h1
-rw-r--r--api/test/stats_observer_interface.h10
-rw-r--r--api/test/test_dependency_factory.cc16
-rw-r--r--api/test/time_controller.h3
-rw-r--r--api/test/track_id_stream_info_map.h42
-rw-r--r--api/test/track_id_stream_label_map.h36
-rw-r--r--api/test/video_quality_analyzer_interface.h49
-rw-r--r--api/test/video_quality_test_fixture.h81
-rw-r--r--api/transport/BUILD.gn28
-rw-r--r--api/transport/congestion_control_interface.h75
-rw-r--r--api/transport/data_channel_transport_interface.h4
-rw-r--r--api/transport/datagram_transport_interface.h151
-rw-r--r--api/transport/media/BUILD.gn52
-rw-r--r--api/transport/media/audio_transport.cc54
-rw-r--r--api/transport/media/audio_transport.h120
-rw-r--r--api/transport/media/media_transport_config.cc29
-rw-r--r--api/transport/media/media_transport_config.h38
-rw-r--r--api/transport/media/media_transport_interface.cc108
-rw-r--r--api/transport/media/media_transport_interface.h320
-rw-r--r--api/transport/media/video_transport.cc56
-rw-r--r--api/transport/media/video_transport.h101
-rw-r--r--api/transport/rtp/BUILD.gn11
-rw-r--r--api/transport/rtp/dependency_descriptor.cc54
-rw-r--r--api/transport/rtp/dependency_descriptor.h48
-rw-r--r--api/transport/test/mock_network_control.h15
-rw-r--r--api/units/data_rate_unittest.cc2
-rw-r--r--api/video/BUILD.gn44
-rw-r--r--api/video/OWNERS.webrtc1
-rw-r--r--api/video/encoded_image.h1
-rw-r--r--api/video/i010_buffer.cc12
-rw-r--r--api/video/test/BUILD.gn2
-rw-r--r--api/video/test/mock_recordable_encoded_frame.h19
-rw-r--r--api/video/video_adaptation_counters.cc9
-rw-r--r--api/video/video_adaptation_counters.h4
-rw-r--r--api/video/video_frame_marking.h29
-rw-r--r--api/video/video_frame_metadata.cc28
-rw-r--r--api/video/video_frame_metadata.h59
-rw-r--r--api/video/video_frame_metadata_unittest.cc120
-rw-r--r--api/video/video_stream_encoder_interface.h11
-rw-r--r--api/video_codecs/BUILD.gn6
-rw-r--r--api/video_codecs/test/BUILD.gn1
-rw-r--r--api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc62
-rw-r--r--api/video_codecs/video_codec.h2
-rw-r--r--api/video_codecs/video_decoder_software_fallback_wrapper.cc22
-rw-r--r--api/video_codecs/video_encoder_factory.h4
-rw-r--r--api/voip/BUILD.gn2
-rw-r--r--audio/BUILD.gn5
-rw-r--r--audio/audio_level.cc12
-rw-r--r--audio/audio_level.h14
-rw-r--r--audio/audio_send_stream.cc64
-rw-r--r--audio/audio_send_stream.h12
-rw-r--r--audio/audio_send_stream_unittest.cc64
-rw-r--r--audio/audio_state.h1
-rw-r--r--audio/audio_state_unittest.cc6
-rw-r--r--audio/audio_transport_impl.cc10
-rw-r--r--audio/audio_transport_impl.h4
-rw-r--r--audio/channel_receive.cc101
-rw-r--r--audio/channel_send.cc190
-rw-r--r--audio/channel_send.h8
-rw-r--r--audio/channel_send_frame_transformer_delegate.cc6
-rw-r--r--audio/channel_send_frame_transformer_delegate.h4
-rw-r--r--audio/mock_voe_channel_proxy.h224
-rw-r--r--audio/test/low_bandwidth_audio_test.cc1
-rw-r--r--audio/test/pc_low_bandwidth_audio_test.cc9
-rw-r--r--audio/utility/audio_frame_operations_unittest.cc6
-rw-r--r--audio/voip/BUILD.gn5
-rw-r--r--audio/voip/audio_channel.cc6
-rw-r--r--audio/voip/audio_channel.h5
-rw-r--r--audio/voip/audio_egress.cc2
-rw-r--r--audio/voip/audio_egress.h13
-rw-r--r--audio/voip/audio_ingress.cc11
-rw-r--r--audio/voip/audio_ingress.h12
-rw-r--r--audio/voip/test/BUILD.gn3
-rw-r--r--audio/voip/test/audio_egress_unittest.cc13
-rw-r--r--audio/voip/test/audio_ingress_unittest.cc7
-rw-r--r--audio/voip/voip_core.cc11
-rw-r--r--audio/voip/voip_core.h4
-rw-r--r--base/third_party/libevent/event.h1
-rw-r--r--build_overrides/build.gni23
-rw-r--r--call/BUILD.gn48
-rw-r--r--call/adaptation/BUILD.gn42
-rw-r--r--call/adaptation/adaptation_constraint.cc17
-rw-r--r--call/adaptation/adaptation_constraint.h43
-rw-r--r--call/adaptation/adaptation_listener.cc17
-rw-r--r--call/adaptation/adaptation_listener.h41
-rw-r--r--call/adaptation/broadcast_resource_listener.cc120
-rw-r--r--call/adaptation/broadcast_resource_listener.h75
-rw-r--r--call/adaptation/broadcast_resource_listener_unittest.cc121
-rw-r--r--call/adaptation/degradation_preference_provider.cc14
-rw-r--r--call/adaptation/degradation_preference_provider.h28
-rw-r--r--call/adaptation/resource.cc60
-rw-r--r--call/adaptation/resource.h79
-rw-r--r--call/adaptation/resource_adaptation_processor.cc500
-rw-r--r--call/adaptation/resource_adaptation_processor.h180
-rw-r--r--call/adaptation/resource_adaptation_processor_interface.cc5
-rw-r--r--call/adaptation/resource_adaptation_processor_interface.h71
-rw-r--r--call/adaptation/resource_adaptation_processor_unittest.cc723
-rw-r--r--call/adaptation/resource_unittest.cc40
-rw-r--r--call/adaptation/test/fake_adaptation_constraint.cc39
-rw-r--r--call/adaptation/test/fake_adaptation_constraint.h42
-rw-r--r--call/adaptation/test/fake_adaptation_listener.cc32
-rw-r--r--call/adaptation/test/fake_adaptation_listener.h38
-rw-r--r--call/adaptation/test/fake_frame_rate_provider.h58
-rw-r--r--call/adaptation/test/fake_resource.cc43
-rw-r--r--call/adaptation/test/fake_resource.h25
-rw-r--r--call/adaptation/test/mock_resource_listener.h31
-rw-r--r--call/adaptation/video_source_restrictions.cc14
-rw-r--r--call/adaptation/video_source_restrictions.h3
-rw-r--r--call/adaptation/video_stream_adapter.cc874
-rw-r--r--call/adaptation/video_stream_adapter.h239
-rw-r--r--call/adaptation/video_stream_adapter_unittest.cc876
-rw-r--r--call/adaptation/video_stream_input_state_provider.cc10
-rw-r--r--call/adaptation/video_stream_input_state_provider.h9
-rw-r--r--call/audio_send_stream.cc2
-rw-r--r--call/audio_send_stream.h1
-rw-r--r--call/bitrate_allocator_unittest.cc5
-rw-r--r--call/bitrate_estimator_tests.cc11
-rw-r--r--call/call.cc660
-rw-r--r--call/call.h42
-rw-r--r--call/call_factory.cc15
-rw-r--r--call/call_factory.h9
-rw-r--r--call/call_perf_tests.cc9
-rw-r--r--call/call_unittest.cc204
-rw-r--r--call/degraded_call.cc5
-rw-r--r--call/degraded_call.h2
-rw-r--r--call/fake_network_pipe.cc32
-rw-r--r--call/fake_network_pipe.h6
-rw-r--r--call/fake_network_pipe_unittest.cc6
-rw-r--r--call/flexfec_receive_stream_impl.cc7
-rw-r--r--call/flexfec_receive_stream_impl.h3
-rw-r--r--call/rampup_tests.cc8
-rw-r--r--call/rtcp_demuxer.cc107
-rw-r--r--call/rtcp_demuxer.h84
-rw-r--r--call/rtcp_demuxer_unittest.cc505
-rw-r--r--call/rtcp_packet_sink_interface.h29
-rw-r--r--call/rtp_demuxer.cc152
-rw-r--r--call/rtp_demuxer.h25
-rw-r--r--call/rtp_demuxer_unittest.cc248
-rw-r--r--call/rtp_payload_params.cc35
-rw-r--r--call/rtp_payload_params.h3
-rw-r--r--call/rtp_payload_params_unittest.cc67
-rw-r--r--call/rtp_rtcp_demuxer_helper.cc55
-rw-r--r--call/rtp_rtcp_demuxer_helper.h96
-rw-r--r--call/rtp_rtcp_demuxer_helper_unittest.cc120
-rw-r--r--call/rtp_stream_receiver_controller.h4
-rw-r--r--call/rtp_transport_controller_send.cc17
-rw-r--r--call/rtp_video_sender.cc86
-rw-r--r--call/rtp_video_sender.h90
-rw-r--r--call/rtp_video_sender_unittest.cc24
-rw-r--r--call/simulated_network.cc8
-rw-r--r--call/simulated_network.h4
-rw-r--r--call/ssrc_binding_observer.h40
-rw-r--r--call/test/mock_audio_send_stream.h31
-rw-r--r--call/test/mock_bitrate_allocator.h13
-rw-r--r--call/test/mock_rtp_packet_sink_interface.h2
-rw-r--r--call/test/mock_rtp_transport_controller_send.h106
-rw-r--r--call/video_send_stream.h11
-rw-r--r--common_audio/BUILD.gn2
-rw-r--r--common_audio/OWNERS.webrtc1
-rw-r--r--common_audio/channel_buffer_unittest.cc4
-rw-r--r--common_audio/mocks/mock_smoothing_filter.h6
-rw-r--r--common_audio/resampler/include/resampler.h4
-rw-r--r--common_audio/resampler/push_resampler_unittest.cc6
-rw-r--r--common_audio/resampler/resampler.cc38
-rw-r--r--common_audio/resampler/sinc_resampler_unittest.cc2
-rw-r--r--common_audio/third_party/ooura/fft_size_128/ooura_fft.cc8
-rw-r--r--common_audio/third_party/ooura/fft_size_128/ooura_fft.h4
-rw-r--r--common_audio/vad/mock/mock_vad.h14
-rw-r--r--common_video/BUILD.gn4
-rw-r--r--common_video/bitrate_adjuster.cc12
-rw-r--r--common_video/generic_frame_descriptor/BUILD.gn2
-rw-r--r--common_video/generic_frame_descriptor/generic_frame_info.cc37
-rw-r--r--common_video/generic_frame_descriptor/generic_frame_info.h9
-rw-r--r--common_video/include/bitrate_adjuster.h24
-rw-r--r--common_video/video_frame_buffer.cc4
-rw-r--r--docs/faq.md4
-rw-r--r--docs/native-code/android/index.md50
-rw-r--r--examples/BUILD.gn40
-rw-r--r--examples/aarproject/app/build.gradle2
-rw-r--r--examples/androidapp/AndroidManifest.xml2
-rw-r--r--examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java4
-rw-r--r--examples/androidnativeapi/AndroidManifest.xml2
-rw-r--r--examples/androidnativeapi/BUILD.gn4
-rw-r--r--examples/androidnativeapi/jni/android_call_client.cc12
-rw-r--r--examples/androidnativeapi/jni/android_call_client.h4
-rw-r--r--examples/androidtests/AndroidManifest.xml2
-rw-r--r--examples/androidvoip/AndroidManifest.xml38
-rw-r--r--examples/androidvoip/BUILD.gn88
-rw-r--r--examples/androidvoip/DEPS3
-rw-r--r--examples/androidvoip/OWNERS2
-rw-r--r--examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java339
-rw-r--r--examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java26
-rw-r--r--examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java188
-rw-r--r--examples/androidvoip/jni/android_voip_client.cc405
-rw-r--r--examples/androidvoip/jni/android_voip_client.h156
-rw-r--r--examples/androidvoip/jni/onload.cc28
-rw-r--r--examples/androidvoip/res/layout/activity_main.xml303
-rw-r--r--examples/androidvoip/res/values/colors.xml5
-rw-r--r--examples/androidvoip/res/values/strings.xml19
-rw-r--r--examples/objc/AppRTCMobile/ios/ARDMainViewController.m1
-rw-r--r--examples/objcnativeapi/objc/objc_call_client.h6
-rw-r--r--examples/objcnativeapi/objc/objc_call_client.mm10
-rw-r--r--logging/BUILD.gn22
-rw-r--r--logging/rtc_event_log/encoder/blob_encoding.h2
-rw-r--r--logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc11
-rw-r--r--logging/rtc_event_log/mock/mock_rtc_event_log.h18
-rw-r--r--logging/rtc_event_log/rtc_event_log_parser.cc33
-rw-r--r--logging/rtc_event_log/rtc_event_log_parser.h22
-rw-r--r--logging/rtc_event_log/rtc_event_log_unittest.cc5
-rw-r--r--media/BUILD.gn42
-rw-r--r--media/base/adapted_video_track_source.cc4
-rw-r--r--media/base/adapted_video_track_source.h6
-rw-r--r--media/base/codec.h2
-rw-r--r--media/base/fake_network_interface.h57
-rw-r--r--media/base/fake_video_renderer.cc2
-rw-r--r--media/base/fake_video_renderer.h22
-rw-r--r--media/base/media_channel.cc7
-rw-r--r--media/base/media_channel.h87
-rw-r--r--media/base/media_constants.cc5
-rw-r--r--media/base/media_constants.h8
-rw-r--r--media/base/media_engine.h12
-rw-r--r--media/base/media_engine_unittest.cc6
-rw-r--r--media/base/rtp_data_engine_unittest.cc2
-rw-r--r--media/base/rtp_utils.cc57
-rw-r--r--media/base/rtp_utils_unittest.cc107
-rw-r--r--media/base/video_adapter.cc8
-rw-r--r--media/base/video_adapter.h57
-rw-r--r--media/base/video_broadcaster.cc10
-rw-r--r--media/base/video_broadcaster.h4
-rw-r--r--media/base/vp9_profile.cc4
-rw-r--r--media/base/vp9_profile.h1
-rw-r--r--media/engine/fake_webrtc_call.cc11
-rw-r--r--media/engine/fake_webrtc_call.h7
-rw-r--r--media/engine/fake_webrtc_video_engine.cc16
-rw-r--r--media/engine/fake_webrtc_video_engine.h14
-rw-r--r--media/engine/internal_decoder_factory.cc2
-rw-r--r--media/engine/internal_decoder_factory_unittest.cc21
-rw-r--r--media/engine/payload_type_mapper.cc1
-rw-r--r--media/engine/payload_type_mapper_unittest.cc1
-rw-r--r--media/engine/simulcast_encoder_adapter_unittest.cc24
-rw-r--r--media/engine/webrtc_video_engine.cc32
-rw-r--r--media/engine/webrtc_video_engine.h8
-rw-r--r--media/engine/webrtc_video_engine_unittest.cc53
-rw-r--r--media/engine/webrtc_voice_engine.cc113
-rw-r--r--media/engine/webrtc_voice_engine_unittest.cc108
-rw-r--r--media/sctp/sctp_transport.cc333
-rw-r--r--media/sctp/sctp_transport.h27
-rw-r--r--media/sctp/sctp_transport_reliability_unittest.cc9
-rw-r--r--media/sctp/sctp_transport_unittest.cc77
-rw-r--r--modules/BUILD.gn6
-rw-r--r--modules/audio_coding/BUILD.gn63
-rw-r--r--modules/audio_coding/acm2/acm_receiver.cc14
-rw-r--r--modules/audio_coding/acm2/acm_receiver.h14
-rw-r--r--modules/audio_coding/acm2/audio_coding_module.cc62
-rw-r--r--modules/audio_coding/acm2/audio_coding_module_unittest.cc59
-rw-r--r--modules/audio_coding/audio_coding.gni3
-rw-r--r--modules/audio_coding/audio_network_adaptor/config.proto19
-rw-r--r--modules/audio_coding/audio_network_adaptor/controller_manager.cc15
-rw-r--r--modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc16
-rw-r--r--modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc73
-rw-r--r--modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h44
-rw-r--r--modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc121
-rw-r--r--modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h35
-rw-r--r--modules/audio_coding/audio_network_adaptor/mock/mock_controller.h15
-rw-r--r--modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h13
-rw-r--r--modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h28
-rw-r--r--modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc2
-rw-r--r--modules/audio_coding/codecs/cng/cng_unittest.cc6
-rw-r--r--modules/audio_coding/codecs/isac/audio_encoder_isac_t.h25
-rw-r--r--modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h61
-rw-r--r--modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc28
-rw-r--r--modules/audio_coding/codecs/opus/audio_encoder_opus.cc116
-rw-r--r--modules/audio_coding/codecs/opus/audio_encoder_opus.h2
-rw-r--r--modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc147
-rw-r--r--modules/audio_coding/codecs/opus/opus_interface.cc103
-rw-r--r--modules/audio_coding/codecs/opus/opus_interface.h16
-rw-r--r--modules/audio_coding/codecs/opus/opus_unittest.cc43
-rw-r--r--modules/audio_coding/codecs/red/audio_encoder_copy_red.cc115
-rw-r--r--modules/audio_coding/codecs/red/audio_encoder_copy_red.h3
-rw-r--r--modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc89
-rw-r--r--modules/audio_coding/neteq/audio_decoder_unittest.cc18
-rw-r--r--modules/audio_coding/neteq/merge.cc2
-rw-r--r--modules/audio_coding/neteq/mock/mock_decoder_database.h38
-rw-r--r--modules/audio_coding/neteq/mock/mock_dtmf_buffer.h17
-rw-r--r--modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h15
-rw-r--r--modules/audio_coding/neteq/mock/mock_expand.h30
-rw-r--r--modules/audio_coding/neteq/mock/mock_histogram.h4
-rw-r--r--modules/audio_coding/neteq/mock/mock_neteq_controller.h79
-rw-r--r--modules/audio_coding/neteq/mock/mock_packet_buffer.h74
-rw-r--r--modules/audio_coding/neteq/mock/mock_red_payload_splitter.h10
-rw-r--r--modules/audio_coding/neteq/mock/mock_statistics_calculator.h9
-rw-r--r--modules/audio_coding/neteq/neteq_impl.cc135
-rw-r--r--modules/audio_coding/neteq/neteq_impl.h174
-rw-r--r--modules/audio_coding/neteq/neteq_network_stats_unittest.cc25
-rw-r--r--modules/audio_coding/neteq/neteq_unittest.cc181
-rw-r--r--modules/audio_coding/neteq/packet_buffer_unittest.cc11
-rw-r--r--modules/audio_coding/neteq/time_stretch.cc4
-rw-r--r--modules/audio_coding/neteq/time_stretch.h4
-rw-r--r--modules/audio_coding/neteq/tools/neteq_stats_plotter.cc3
-rw-r--r--modules/audio_coding/neteq/tools/neteq_stats_plotter.h2
-rw-r--r--modules/audio_coding/neteq/tools/neteq_test.cc3
-rw-r--r--modules/audio_coding/neteq/tools/neteq_test.h2
-rw-r--r--modules/audio_coding/neteq/tools/neteq_test_factory.h2
-rw-r--r--modules/audio_coding/test/Channel.cc16
-rw-r--r--modules/audio_coding/test/Channel.h4
-rw-r--r--modules/audio_coding/test/TestVADDTX.cc40
-rw-r--r--modules/audio_device/BUILD.gn9
-rw-r--r--modules/audio_device/android/audio_device_unittest.cc8
-rw-r--r--modules/audio_device/android/audio_track_jni.cc32
-rw-r--r--modules/audio_device/android/audio_track_jni.h1
-rw-r--r--modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java19
-rw-r--r--modules/audio_device/audio_device_buffer.cc10
-rw-r--r--modules/audio_device/audio_device_buffer.h4
-rw-r--r--modules/audio_device/audio_device_unittest.cc29
-rw-r--r--modules/audio_device/dummy/file_audio_device.cc30
-rw-r--r--modules/audio_device/dummy/file_audio_device.h4
-rw-r--r--modules/audio_device/include/mock_audio_device.h185
-rw-r--r--modules/audio_device/include/mock_audio_transport.h66
-rw-r--r--modules/audio_device/include/test_audio_device.cc26
-rw-r--r--modules/audio_device/linux/audio_device_alsa_linux.cc34
-rw-r--r--modules/audio_device/linux/audio_device_alsa_linux.h8
-rw-r--r--modules/audio_device/linux/audio_device_pulse_linux.cc30
-rw-r--r--modules/audio_device/linux/audio_device_pulse_linux.h12
-rw-r--r--modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc18
-rw-r--r--modules/audio_device/linux/audio_mixer_manager_alsa_linux.h4
-rw-r--r--modules/audio_device/mac/audio_device_mac.cc71
-rw-r--r--modules/audio_device/mac/audio_device_mac.h38
-rw-r--r--modules/audio_device/mac/audio_mixer_manager_mac.cc32
-rw-r--r--modules/audio_device/mac/audio_mixer_manager_mac.h24
-rw-r--r--modules/audio_device/mock_audio_device_buffer.h14
-rw-r--r--modules/audio_device/win/audio_device_core_win.cc82
-rw-r--r--modules/audio_device/win/audio_device_core_win.h6
-rw-r--r--modules/audio_mixer/BUILD.gn1
-rw-r--r--modules/audio_mixer/audio_mixer_impl.cc10
-rw-r--r--modules/audio_mixer/audio_mixer_impl.h10
-rw-r--r--modules/audio_mixer/audio_mixer_impl_unittest.cc12
-rw-r--r--modules/audio_mixer/frame_combiner_unittest.cc4
-rw-r--r--modules/audio_processing/BUILD.gn24
-rw-r--r--modules/audio_processing/aec3/BUILD.gn4
-rw-r--r--modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc4
-rw-r--r--modules/audio_processing/aec3/aec3_fft.cc11
-rw-r--r--modules/audio_processing/aec3/aec3_fft.h3
-rw-r--r--modules/audio_processing/aec3/aec3_fft_unittest.cc16
-rw-r--r--modules/audio_processing/aec3/alignment_mixer_unittest.cc4
-rw-r--r--modules/audio_processing/aec3/block_framer_unittest.cc28
-rw-r--r--modules/audio_processing/aec3/block_processor_unittest.cc10
-rw-r--r--modules/audio_processing/aec3/clockdrift_detector.h2
-rw-r--r--modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc2
-rw-r--r--modules/audio_processing/aec3/decimator_unittest.cc8
-rw-r--r--modules/audio_processing/aec3/echo_canceller3_unittest.cc6
-rw-r--r--modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc6
-rw-r--r--modules/audio_processing/aec3/echo_remover_metrics_unittest.cc2
-rw-r--r--modules/audio_processing/aec3/echo_remover_unittest.cc8
-rw-r--r--modules/audio_processing/aec3/fft_data_unittest.cc4
-rw-r--r--modules/audio_processing/aec3/frame_blocker_unittest.cc27
-rw-r--r--modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc2
-rw-r--r--modules/audio_processing/aec3/matched_filter_unittest.cc8
-rw-r--r--modules/audio_processing/aec3/mock/mock_block_processor.h31
-rw-r--r--modules/audio_processing/aec3/mock/mock_echo_remover.h27
-rw-r--r--modules/audio_processing/aec3/mock/mock_render_delay_buffer.h35
-rw-r--r--modules/audio_processing/aec3/mock/mock_render_delay_controller.h17
-rw-r--r--modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc2
-rw-r--r--modules/audio_processing/aec3/render_buffer_unittest.cc6
-rw-r--r--modules/audio_processing/aec3/render_delay_buffer_unittest.cc8
-rw-r--r--modules/audio_processing/aec3/render_delay_controller_unittest.cc4
-rw-r--r--modules/audio_processing/aec3/render_signal_analyzer_unittest.cc2
-rw-r--r--modules/audio_processing/aec3/subtractor_unittest.cc2
-rw-r--r--modules/audio_processing/aec3/suppression_filter.h2
-rw-r--r--modules/audio_processing/aec3/suppression_filter_unittest.cc4
-rw-r--r--modules/audio_processing/aec3/suppression_gain_unittest.cc2
-rw-r--r--modules/audio_processing/aec_dump/mock_aec_dump.h70
-rw-r--r--modules/audio_processing/agc/BUILD.gn2
-rw-r--r--modules/audio_processing/agc/agc_manager_direct_unittest.cc33
-rw-r--r--modules/audio_processing/agc/mock_agc.h16
-rw-r--r--modules/audio_processing/agc2/BUILD.gn1
-rw-r--r--modules/audio_processing/agc2/rnn_vad/BUILD.gn2
-rw-r--r--modules/audio_processing/agc2/signal_classifier.cc12
-rw-r--r--modules/audio_processing/audio_buffer_unittest.cc2
-rw-r--r--modules/audio_processing/audio_processing_impl.cc82
-rw-r--r--modules/audio_processing/audio_processing_impl.h119
-rw-r--r--modules/audio_processing/audio_processing_impl_locking_unittest.cc30
-rw-r--r--modules/audio_processing/audio_processing_impl_unittest.cc6
-rw-r--r--modules/audio_processing/audio_processing_unittest.cc24
-rw-r--r--modules/audio_processing/echo_control_mobile_unittest.cc1
-rw-r--r--modules/audio_processing/include/mock_audio_processing.h214
-rw-r--r--modules/audio_processing/ns/BUILD.gn4
-rw-r--r--modules/audio_processing/test/aec_dump_based_simulator.cc10
-rw-r--r--modules/audio_processing/test/aec_dump_based_simulator.h1
-rw-r--r--modules/audio_processing/test/audio_processing_simulator.cc86
-rw-r--r--modules/audio_processing/test/audio_processing_simulator.h9
-rw-r--r--modules/audio_processing/test/audioproc_float_impl.cc68
-rw-r--r--modules/audio_processing/test/audioproc_float_impl.h15
-rw-r--r--modules/audio_processing/test/conversational_speech/BUILD.gn2
-rw-r--r--modules/audio_processing/test/conversational_speech/mock_wavreader.h10
-rw-r--r--modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h6
-rw-r--r--modules/audio_processing/test/echo_control_mock.h26
-rw-r--r--modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_unittest.py5
-rw-r--r--modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py5
-rw-r--r--modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py5
-rw-r--r--modules/audio_processing/test/wav_based_simulator.cc22
-rw-r--r--modules/audio_processing/test/wav_based_simulator.h1
-rw-r--r--modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc2
-rw-r--r--modules/audio_processing/utility/pffft_wrapper_unittest.cc11
-rw-r--r--modules/congestion_controller/BUILD.gn1
-rw-r--r--modules/congestion_controller/goog_cc/BUILD.gn18
-rw-r--r--modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc10
-rw-r--r--modules/congestion_controller/include/receive_side_congestion_controller.h8
-rw-r--r--modules/congestion_controller/pcc/BUILD.gn4
-rw-r--r--modules/congestion_controller/pcc/bitrate_controller_unittest.cc6
-rw-r--r--modules/congestion_controller/receive_side_congestion_controller.cc14
-rw-r--r--modules/congestion_controller/receive_side_congestion_controller_unittest.cc6
-rw-r--r--modules/congestion_controller/rtp/BUILD.gn5
-rw-r--r--modules/congestion_controller/rtp/transport_feedback_adapter.h1
-rw-r--r--modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc6
-rw-r--r--modules/congestion_controller/rtp/transport_feedback_demuxer.cc10
-rw-r--r--modules/congestion_controller/rtp/transport_feedback_demuxer.h6
-rw-r--r--modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc6
-rw-r--r--modules/desktop_capture/BUILD.gn15
-rw-r--r--modules/desktop_capture/linux/screen_capturer_x11.cc8
-rw-r--r--modules/desktop_capture/mac/desktop_configuration_monitor.cc6
-rw-r--r--modules/desktop_capture/mac/desktop_configuration_monitor.h4
-rw-r--r--modules/desktop_capture/mock_desktop_capturer_callback.h7
-rw-r--r--modules/desktop_capture/win/dxgi_duplicator_controller.h4
-rw-r--r--modules/desktop_capture/win/dxgi_output_duplicator.h1
-rw-r--r--modules/desktop_capture/win/window_capture_utils.cc91
-rw-r--r--modules/desktop_capture/win/window_capture_utils.h13
-rw-r--r--modules/desktop_capture/win/window_capturer_win_gdi.cc367
-rw-r--r--modules/desktop_capture/win/window_capturer_win_gdi.h76
-rw-r--r--modules/desktop_capture/win/window_capturer_win_wgc.cc54
-rw-r--r--modules/desktop_capture/win/window_capturer_win_wgc.h52
-rw-r--r--modules/desktop_capture/window_capturer_win.cc470
-rw-r--r--modules/include/module_common_types.cc41
-rw-r--r--modules/pacing/BUILD.gn4
-rw-r--r--modules/pacing/bitrate_prober.cc69
-rw-r--r--modules/pacing/bitrate_prober.h26
-rw-r--r--modules/pacing/bitrate_prober_unittest.cc128
-rw-r--r--modules/pacing/paced_sender.cc33
-rw-r--r--modules/pacing/paced_sender.h16
-rw-r--r--modules/pacing/paced_sender_unittest.cc15
-rw-r--r--modules/pacing/pacing_controller.cc59
-rw-r--r--modules/pacing/pacing_controller.h11
-rw-r--r--modules/pacing/pacing_controller_unittest.cc283
-rw-r--r--modules/pacing/packet_router.cc92
-rw-r--r--modules/pacing/packet_router.h78
-rw-r--r--modules/pacing/packet_router_unittest.cc110
-rw-r--r--modules/pacing/task_queue_paced_sender.cc106
-rw-r--r--modules/pacing/task_queue_paced_sender.h43
-rw-r--r--modules/pacing/task_queue_paced_sender_unittest.cc446
-rw-r--r--modules/remote_bitrate_estimator/BUILD.gn5
-rw-r--r--modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc10
-rw-r--r--modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h14
-rw-r--r--modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc14
-rw-r--r--modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h23
-rw-r--r--modules/remote_bitrate_estimator/remote_estimator_proxy.cc10
-rw-r--r--modules/remote_bitrate_estimator/remote_estimator_proxy.h4
-rw-r--r--modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc7
-rw-r--r--modules/remote_bitrate_estimator/test/bwe_test_logging.cc26
-rw-r--r--modules/remote_bitrate_estimator/test/bwe_test_logging.h4
-rw-r--r--modules/rtp_rtcp/BUILD.gn29
-rw-r--r--modules/rtp_rtcp/include/flexfec_sender.h5
-rw-r--r--modules/rtp_rtcp/include/rtp_header_extension_map.h4
-rw-r--r--modules/rtp_rtcp/include/rtp_rtcp.h448
-rw-r--r--modules/rtp_rtcp/include/rtp_rtcp_defines.cc6
-rw-r--r--modules/rtp_rtcp/include/rtp_rtcp_defines.h17
-rw-r--r--modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h2
-rw-r--r--modules/rtp_rtcp/mocks/mock_rtp_rtcp.h107
-rw-r--r--modules/rtp_rtcp/source/absolute_capture_time_receiver.cc4
-rw-r--r--modules/rtp_rtcp/source/absolute_capture_time_receiver.h22
-rw-r--r--modules/rtp_rtcp/source/absolute_capture_time_sender.cc2
-rw-r--r--modules/rtp_rtcp/source/absolute_capture_time_sender.h18
-rw-r--r--modules/rtp_rtcp/source/active_decode_targets_helper.cc124
-rw-r--r--modules/rtp_rtcp/source/active_decode_targets_helper.h63
-rw-r--r--modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc272
-rw-r--r--modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc472
-rw-r--r--modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h149
-rw-r--r--modules/rtp_rtcp/source/dtmf_queue.cc6
-rw-r--r--modules/rtp_rtcp/source/dtmf_queue.h4
-rw-r--r--modules/rtp_rtcp/source/flexfec_sender.cc4
-rw-r--r--modules/rtp_rtcp/source/nack_rtx_unittest.cc18
-rw-r--r--modules/rtp_rtcp/source/receive_statistics_impl.cc24
-rw-r--r--modules/rtp_rtcp/source/receive_statistics_impl.h6
-rw-r--r--modules/rtp_rtcp/source/rtcp_receiver.cc174
-rw-r--r--modules/rtp_rtcp/source/rtcp_receiver.h27
-rw-r--r--modules/rtp_rtcp/source/rtcp_receiver_unittest.cc14
-rw-r--r--modules/rtp_rtcp/source/rtcp_sender.cc231
-rw-r--r--modules/rtp_rtcp/source/rtcp_sender.h223
-rw-r--r--modules/rtp_rtcp/source/rtcp_sender_unittest.cc61
-rw-r--r--modules/rtp_rtcp/source/rtcp_transceiver_config.h4
-rw-r--r--modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc12
-rw-r--r--modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc5
-rw-r--r--modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc10
-rw-r--r--modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h15
-rw-r--r--modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc119
-rw-r--r--modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc22
-rw-r--r--modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc52
-rw-r--r--modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h3
-rw-r--r--modules/rtp_rtcp/source/rtp_format.cc8
-rw-r--r--modules/rtp_rtcp/source/rtp_format.h4
-rw-r--r--modules/rtp_rtcp/source/rtp_format_h264.cc14
-rw-r--r--modules/rtp_rtcp/source/rtp_format_h264.h4
-rw-r--r--modules/rtp_rtcp/source/rtp_format_h264_unittest.cc299
-rw-r--r--modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h7
-rw-r--r--modules/rtp_rtcp/source/rtp_header_extension_map.cc1
-rw-r--r--modules/rtp_rtcp/source/rtp_header_extensions.cc80
-rw-r--r--modules/rtp_rtcp/source/rtp_header_extensions.h18
-rw-r--r--modules/rtp_rtcp/source/rtp_packet.cc1
-rw-r--r--modules/rtp_rtcp/source/rtp_packet_history.cc24
-rw-r--r--modules/rtp_rtcp/source/rtp_packet_history.h4
-rw-r--r--modules/rtp_rtcp/source/rtp_packet_received.cc2
-rw-r--r--modules/rtp_rtcp/source/rtp_packet_to_send.h11
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl.cc62
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl.h27
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl2.cc760
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl2.h329
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc645
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc4
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_interface.h435
-rw-r--r--modules/rtp_rtcp/source/rtp_sender.cc84
-rw-r--r--modules/rtp_rtcp/source/rtp_sender.h174
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_audio.cc16
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_audio.h24
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc7
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_egress.cc324
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_egress.h77
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_unittest.cc789
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video.cc79
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video.h38
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc51
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h9
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video_unittest.cc278
-rw-r--r--modules/rtp_rtcp/source/rtp_utility.cc14
-rw-r--r--modules/rtp_rtcp/source/rtp_video_header.h7
-rw-r--r--modules/rtp_rtcp/source/source_tracker.cc4
-rw-r--r--modules/rtp_rtcp/source/source_tracker.h4
-rw-r--r--modules/rtp_rtcp/source/ulpfec_generator.cc12
-rw-r--r--modules/rtp_rtcp/source/ulpfec_generator.h8
-rw-r--r--modules/rtp_rtcp/source/ulpfec_receiver_impl.cc16
-rw-r--r--modules/rtp_rtcp/source/ulpfec_receiver_impl.h4
-rw-r--r--modules/utility/BUILD.gn2
-rw-r--r--modules/utility/source/process_thread_impl.h5
-rw-r--r--modules/utility/source/process_thread_impl_unittest.cc6
-rw-r--r--modules/video_capture/BUILD.gn7
-rw-r--r--modules/video_capture/linux/device_info_linux.cc21
-rw-r--r--modules/video_capture/linux/video_capture_linux.cc8
-rw-r--r--modules/video_capture/linux/video_capture_linux.h6
-rw-r--r--modules/video_capture/test/video_capture_unittest.cc18
-rw-r--r--modules/video_capture/video_capture_impl.cc8
-rw-r--r--modules/video_capture/video_capture_impl.h4
-rw-r--r--modules/video_capture/windows/video_capture_ds.cc4
-rw-r--r--modules/video_coding/BUILD.gn102
-rw-r--r--modules/video_coding/chain_diff_calculator.cc62
-rw-r--r--modules/video_coding/chain_diff_calculator.h46
-rw-r--r--modules/video_coding/chain_diff_calculator_unittest.cc126
-rw-r--r--modules/video_coding/codecs/av1/BUILD.gn99
-rw-r--r--modules/video_coding/codecs/av1/create_scalability_structure.cc73
-rw-r--r--modules/video_coding/codecs/av1/create_scalability_structure.h29
-rw-r--r--modules/video_coding/codecs/av1/libaom_av1_decoder.cc8
-rw-r--r--modules/video_coding/codecs/av1/libaom_av1_encoder.cc376
-rw-r--r--modules/video_coding/codecs/av1/libaom_av1_encoder.h3
-rw-r--r--modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc59
-rw-r--r--modules/video_coding/codecs/av1/libaom_av1_unittest.cc320
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l1t2.cc123
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l1t2.h48
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l1t3.cc109
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l1t3.h53
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t1.cc102
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t1.h43
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc107
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h43
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t1h.cc35
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t1h.h27
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t2.cc128
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t2.h53
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc130
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h53
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc130
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h53
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l3t1.cc112
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l3t1.h45
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l3t3.cc224
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_l3t3.h49
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_s2t1.cc93
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_s2t1.h41
-rw-r--r--modules/video_coding/codecs/av1/scalability_structure_unittest.cc295
-rw-r--r--modules/video_coding/codecs/av1/scalable_video_controller.h141
-rw-r--r--modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc67
-rw-r--r--modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h38
-rw-r--r--modules/video_coding/codecs/h264/h264_decoder_impl.cc1
-rw-r--r--modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h6
-rw-r--r--modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h1
-rw-r--r--modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc6
-rw-r--r--modules/video_coding/codecs/test/encoded_video_frame_producer.cc78
-rw-r--r--modules/video_coding/codecs/test/encoded_video_frame_producer.h83
-rw-r--r--modules/video_coding/codecs/test/video_codec_unittest.cc12
-rw-r--r--modules/video_coding/codecs/test/video_codec_unittest.h6
-rw-r--r--modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc3
-rw-r--r--modules/video_coding/codecs/test/videocodec_test_libaom.cc97
-rw-r--r--modules/video_coding/codecs/vp8/default_temporal_layers.cc70
-rw-r--r--modules/video_coding/codecs/vp8/default_temporal_layers.h2
-rw-r--r--modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc131
-rw-r--r--modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h5
-rw-r--r--modules/video_coding/codecs/vp8/screenshare_layers.cc24
-rw-r--r--modules/video_coding/codecs/vp8/screenshare_layers.h2
-rw-r--r--modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h148
-rw-r--r--modules/video_coding/codecs/vp9/include/vp9.h4
-rw-r--r--modules/video_coding/codecs/vp9/include/vp9_globals.h4
-rw-r--r--modules/video_coding/codecs/vp9/svc_config.cc30
-rw-r--r--modules/video_coding/codecs/vp9/svc_config_unittest.cc26
-rw-r--r--modules/video_coding/codecs/vp9/svc_rate_allocator.cc3
-rw-r--r--modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc287
-rw-r--r--modules/video_coding/codecs/vp9/vp9.cc16
-rw-r--r--modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc8
-rw-r--r--modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h4
-rw-r--r--modules/video_coding/codecs/vp9/vp9_impl.cc130
-rw-r--r--modules/video_coding/codecs/vp9/vp9_impl.h11
-rw-r--r--modules/video_coding/decoder_database.cc6
-rw-r--r--modules/video_coding/deprecated/BUILD.gn34
-rw-r--r--modules/video_coding/deprecated/nack_module.cc (renamed from modules/video_coding/nack_module.cc)73
-rw-r--r--modules/video_coding/deprecated/nack_module.h (renamed from modules/video_coding/nack_module.h)45
-rw-r--r--modules/video_coding/encoded_frame.cc14
-rw-r--r--modules/video_coding/fec_controller_default.cc9
-rw-r--r--modules/video_coding/fec_controller_default.h8
-rw-r--r--modules/video_coding/frame_buffer2.cc37
-rw-r--r--modules/video_coding/frame_buffer2.h62
-rw-r--r--modules/video_coding/frame_buffer2_unittest.cc35
-rw-r--r--modules/video_coding/frame_object.cc5
-rw-r--r--modules/video_coding/frame_object.h1
-rw-r--r--modules/video_coding/generic_decoder.cc20
-rw-r--r--modules/video_coding/generic_decoder.h6
-rw-r--r--modules/video_coding/generic_decoder_unittest.cc10
-rw-r--r--modules/video_coding/h264_sps_pps_tracker.cc5
-rw-r--r--modules/video_coding/jitter_buffer.cc49
-rw-r--r--modules/video_coding/jitter_buffer.h49
-rw-r--r--modules/video_coding/jitter_estimator.cc34
-rw-r--r--modules/video_coding/jitter_estimator.h1
-rw-r--r--modules/video_coding/jitter_estimator_tests.cc16
-rw-r--r--modules/video_coding/nack_module2.cc343
-rw-r--r--modules/video_coding/nack_module2.h140
-rw-r--r--modules/video_coding/nack_module2_unittest.cc411
-rw-r--r--modules/video_coding/nack_module_unittest.cc6
-rw-r--r--modules/video_coding/packet_buffer.cc51
-rw-r--r--modules/video_coding/packet_buffer.h50
-rw-r--r--modules/video_coding/receiver.h1
-rw-r--r--modules/video_coding/rtp_frame_reference_finder.cc126
-rw-r--r--modules/video_coding/rtp_frame_reference_finder.h1
-rw-r--r--modules/video_coding/rtp_frame_reference_finder_unittest.cc373
-rw-r--r--modules/video_coding/session_info.cc6
-rw-r--r--modules/video_coding/timing.cc34
-rw-r--r--modules/video_coding/timing.h32
-rw-r--r--modules/video_coding/utility/ivf_file_reader.h1
-rw-r--r--modules/video_coding/utility/ivf_file_writer.h1
-rw-r--r--modules/video_coding/utility/simulcast_rate_allocator_unittest.cc18
-rw-r--r--modules/video_coding/utility/vp9_uncompressed_header_parser.cc135
-rw-r--r--modules/video_coding/utility/vp9_uncompressed_header_parser.h60
-rw-r--r--modules/video_coding/video_codec_initializer.cc14
-rw-r--r--modules/video_coding/video_coding_impl.cc1
-rw-r--r--modules/video_coding/video_coding_impl.h7
-rw-r--r--modules/video_coding/video_receiver.cc11
-rw-r--r--modules/video_coding/video_receiver_unittest.cc17
-rw-r--r--p2p/BUILD.gn14
-rw-r--r--p2p/base/basic_async_resolver_factory_unittest.cc1
-rw-r--r--p2p/base/connection.cc32
-rw-r--r--p2p/base/connection_info.cc1
-rw-r--r--p2p/base/connection_info.h1
-rw-r--r--p2p/base/ice_controller_interface.h19
-rw-r--r--p2p/base/mock_async_resolver.h13
-rw-r--r--p2p/base/mock_ice_transport.h23
-rw-r--r--p2p/base/p2p_transport_channel.cc47
-rw-r--r--p2p/base/p2p_transport_channel.h12
-rw-r--r--p2p/base/p2p_transport_channel_ice_field_trials.h4
-rw-r--r--p2p/base/p2p_transport_channel_unittest.cc262
-rw-r--r--p2p/base/port_unittest.cc72
-rw-r--r--p2p/base/stun_port_unittest.cc41
-rw-r--r--p2p/base/transport_description.cc4
-rw-r--r--p2p/base/transport_description.h23
-rw-r--r--p2p/base/transport_description_factory.cc9
-rw-r--r--p2p/base/transport_description_factory.h3
-rw-r--r--p2p/base/transport_description_factory_unittest.cc68
-rw-r--r--p2p/base/turn_port.cc17
-rw-r--r--p2p/client/basic_port_allocator.cc11
-rw-r--r--p2p/client/basic_port_allocator.h1
-rw-r--r--pc/BUILD.gn38
-rw-r--r--pc/channel.cc234
-rw-r--r--pc/channel.h21
-rw-r--r--pc/channel_manager.cc21
-rw-r--r--pc/channel_manager.h3
-rw-r--r--pc/channel_manager_unittest.cc22
-rw-r--r--pc/channel_unittest.cc7
-rw-r--r--pc/composite_data_channel_transport.cc123
-rw-r--r--pc/composite_data_channel_transport.h63
-rw-r--r--pc/data_channel.h340
-rw-r--r--pc/data_channel_controller.cc341
-rw-r--r--pc/data_channel_controller.h90
-rw-r--r--pc/data_channel_unittest.cc56
-rw-r--r--pc/data_channel_utils.cc54
-rw-r--r--pc/data_channel_utils.h62
-rw-r--r--pc/datagram_rtp_transport.cc380
-rw-r--r--pc/datagram_rtp_transport.h173
-rw-r--r--pc/dtls_transport.cc6
-rw-r--r--pc/dtls_transport.h7
-rw-r--r--pc/jsep_transport.cc151
-rw-r--r--pc/jsep_transport.h131
-rw-r--r--pc/jsep_transport_controller.cc344
-rw-r--r--pc/jsep_transport_controller.h82
-rw-r--r--pc/jsep_transport_controller_unittest.cc526
-rw-r--r--pc/jsep_transport_unittest.cc4
-rw-r--r--pc/media_session.cc225
-rw-r--r--pc/media_session.h27
-rw-r--r--pc/media_session_unittest.cc551
-rw-r--r--pc/media_stream_unittest.cc2
-rw-r--r--pc/peer_connection.cc341
-rw-r--r--pc/peer_connection.h94
-rw-r--r--pc/peer_connection_adaptation_integrationtest.cc161
-rw-r--r--pc/peer_connection_crypto_unittest.cc18
-rw-r--r--pc/peer_connection_factory.cc7
-rw-r--r--pc/peer_connection_factory.h6
-rw-r--r--pc/peer_connection_header_extension_unittest.cc97
-rw-r--r--pc/peer_connection_integrationtest.cc1020
-rw-r--r--pc/peer_connection_interface_unittest.cc32
-rw-r--r--pc/peer_connection_internal.h14
-rw-r--r--pc/peer_connection_media_unittest.cc17
-rw-r--r--pc/proxy_unittest.cc16
-rw-r--r--pc/remote_audio_source.cc6
-rw-r--r--pc/remote_audio_source.h4
-rw-r--r--pc/rtc_stats_collector.cc310
-rw-r--r--pc/rtc_stats_collector.h12
-rw-r--r--pc/rtc_stats_collector_unittest.cc280
-rw-r--r--pc/rtc_stats_integrationtest.cc106
-rw-r--r--pc/rtp_data_channel.cc394
-rw-r--r--pc/rtp_data_channel.h199
-rw-r--r--pc/rtp_sender.cc6
-rw-r--r--pc/rtp_sender.h4
-rw-r--r--pc/rtp_sender_receiver_unittest.cc13
-rw-r--r--pc/rtp_transceiver.cc48
-rw-r--r--pc/rtp_transceiver.h8
-rw-r--r--pc/rtp_transceiver_unittest.cc111
-rw-r--r--pc/sctp_data_channel.cc (renamed from pc/data_channel.cc)542
-rw-r--r--pc/sctp_data_channel.h285
-rw-r--r--pc/sctp_transport.cc12
-rw-r--r--pc/sctp_transport.h7
-rw-r--r--pc/sctp_utils.cc40
-rw-r--r--pc/sctp_utils_unittest.cc28
-rw-r--r--pc/session_description.h16
-rw-r--r--pc/srtp_filter.h1
-rw-r--r--pc/srtp_session.cc9
-rw-r--r--pc/srtp_session.h4
-rw-r--r--pc/stats_collector.cc15
-rw-r--r--pc/stats_collector_unittest.cc2
-rw-r--r--pc/test/fake_audio_capture_module.cc92
-rw-r--r--pc/test/fake_audio_capture_module.h57
-rw-r--r--pc/test/fake_audio_capture_module_unittest.cc12
-rw-r--r--pc/test/fake_data_channel_provider.h27
-rw-r--r--pc/test/fake_peer_connection_base.h16
-rw-r--r--pc/test/fake_peer_connection_for_stats.h19
-rw-r--r--pc/test/fake_periodic_video_source.h12
-rw-r--r--pc/test/fake_periodic_video_track_source.h4
-rw-r--r--pc/test/mock_channel_interface.h53
-rw-r--r--pc/test/mock_data_channel.h55
-rw-r--r--pc/test/mock_delayable.h11
-rw-r--r--pc/test/mock_rtp_receiver_internal.h75
-rw-r--r--pc/test/mock_rtp_sender_internal.h86
-rw-r--r--pc/test/peer_connection_test_wrapper.cc16
-rw-r--r--pc/test/peer_connection_test_wrapper.h9
-rw-r--r--pc/track_media_info_map.cc15
-rw-r--r--pc/track_media_info_map_unittest.cc25
-rw-r--r--pc/video_rtp_receiver_unittest.cc16
-rw-r--r--pc/video_rtp_track_source.cc6
-rw-r--r--pc/video_rtp_track_source.h4
-rw-r--r--pc/video_rtp_track_source_unittest.cc6
-rw-r--r--pc/webrtc_sdp.cc115
-rw-r--r--pc/webrtc_sdp_unittest.cc177
-rw-r--r--rtc_base/BUILD.gn81
-rw-r--r--rtc_base/DEPS2
-rw-r--r--rtc_base/async_invoker.cc22
-rw-r--r--rtc_base/async_invoker.h91
-rw-r--r--rtc_base/async_invoker_inl.h1
-rw-r--r--rtc_base/bit_buffer.cc11
-rw-r--r--rtc_base/bit_buffer_unittest.cc24
-rw-r--r--rtc_base/buffer.h4
-rw-r--r--rtc_base/buffer_queue.cc10
-rw-r--r--rtc_base/buffer_queue.h8
-rw-r--r--rtc_base/buffer_unittest.cc2
-rw-r--r--rtc_base/checks.h2
-rw-r--r--rtc_base/checks_unittest.cc2
-rw-r--r--rtc_base/deprecated/recursive_critical_section.cc (renamed from rtc_base/critical_section.cc)52
-rw-r--r--rtc_base/deprecated/recursive_critical_section.h (renamed from rtc_base/critical_section.h)51
-rw-r--r--rtc_base/deprecated/recursive_critical_section_unittest.cc (renamed from rtc_base/critical_section_unittest.cc)37
-rw-r--r--rtc_base/deprecated/signal_thread.cc (renamed from rtc_base/signal_thread.cc)46
-rw-r--r--rtc_base/deprecated/signal_thread.h166
-rw-r--r--rtc_base/deprecated/signal_thread_unittest.cc (renamed from rtc_base/signal_thread_unittest.cc)18
-rw-r--r--rtc_base/event_tracer.cc12
-rw-r--r--rtc_base/event_tracer_unittest.cc12
-rw-r--r--rtc_base/experiments/BUILD.gn30
-rw-r--r--rtc_base/experiments/quality_rampup_experiment.cc4
-rw-r--r--rtc_base/experiments/quality_rampup_experiment.h2
-rw-r--r--rtc_base/fake_clock.cc6
-rw-r--r--rtc_base/fake_clock.h4
-rw-r--r--rtc_base/firewall_socket_server.cc14
-rw-r--r--rtc_base/firewall_socket_server.h6
-rw-r--r--rtc_base/logging.cc30
-rw-r--r--rtc_base/logging.h16
-rw-r--r--rtc_base/memory/BUILD.gn5
-rw-r--r--rtc_base/memory/fifo_buffer.cc26
-rw-r--r--rtc_base/memory/fifo_buffer.h17
-rw-r--r--rtc_base/nat_server.cc20
-rw-r--r--rtc_base/nat_server.h9
-rw-r--r--rtc_base/net_helpers.cc62
-rw-r--r--rtc_base/net_helpers.h36
-rw-r--r--rtc_base/network.cc24
-rw-r--r--rtc_base/network.h1
-rw-r--r--rtc_base/network/BUILD.gn6
-rw-r--r--rtc_base/one_time_event.h6
-rw-r--r--rtc_base/openssl_adapter_unittest.cc40
-rw-r--r--rtc_base/operations_chain_unittest.cc5
-rw-r--r--rtc_base/physical_socket_server.cc281
-rw-r--r--rtc_base/physical_socket_server.h58
-rw-r--r--rtc_base/physical_socket_server_unittest.cc133
-rw-r--r--rtc_base/platform_thread_types.cc28
-rw-r--r--rtc_base/rate_limiter.cc6
-rw-r--r--rtc_base/rate_limiter.h4
-rw-r--r--rtc_base/rate_statistics.cc109
-rw-r--r--rtc_base/rate_statistics.h26
-rw-r--r--rtc_base/signal_thread.h148
-rw-r--r--rtc_base/ssl_adapter_unittest.cc2
-rw-r--r--rtc_base/stream.h1
-rw-r--r--rtc_base/strings/string_builder_unittest.cc12
-rw-r--r--rtc_base/swap_queue_unittest.cc6
-rw-r--r--rtc_base/synchronization/BUILD.gn59
-rw-r--r--rtc_base/synchronization/DEPS11
-rw-r--r--rtc_base/synchronization/mutex.cc39
-rw-r--r--rtc_base/synchronization/mutex.h146
-rw-r--r--rtc_base/synchronization/mutex_abseil.h37
-rw-r--r--rtc_base/synchronization/mutex_benchmark.cc95
-rw-r--r--rtc_base/synchronization/mutex_critical_section.h54
-rw-r--r--rtc_base/synchronization/mutex_pthread.h53
-rw-r--r--rtc_base/synchronization/mutex_unittest.cc206
-rw-r--r--rtc_base/synchronization/sequence_checker.cc51
-rw-r--r--rtc_base/synchronization/sequence_checker.h24
-rw-r--r--rtc_base/synchronization/sequence_checker_unittest.cc9
-rw-r--r--rtc_base/synchronization/yield.cc36
-rw-r--r--rtc_base/synchronization/yield.h20
-rw-r--r--rtc_base/synchronization/yield_policy_unittest.cc2
-rw-r--r--rtc_base/system/BUILD.gn13
-rw-r--r--rtc_base/system/file_wrapper.h2
-rw-r--r--rtc_base/system/thread_registry.cc10
-rw-r--r--rtc_base/task_queue_libevent.cc8
-rw-r--r--rtc_base/task_queue_stdlib.cc12
-rw-r--r--rtc_base/task_queue_win.cc8
-rw-r--r--rtc_base/task_utils/BUILD.gn5
-rw-r--r--rtc_base/task_utils/pending_task_safety_flag.cc2
-rw-r--r--rtc_base/task_utils/pending_task_safety_flag.h28
-rw-r--r--rtc_base/task_utils/pending_task_safety_flag_unittest.cc32
-rw-r--r--rtc_base/task_utils/repeating_task.cc11
-rw-r--r--rtc_base/task_utils/repeating_task.h36
-rw-r--r--rtc_base/task_utils/repeating_task_unittest.cc52
-rw-r--r--rtc_base/task_utils/to_queued_task.h22
-rw-r--r--rtc_base/task_utils/to_queued_task_unittest.cc3
-rw-r--r--rtc_base/test_client.cc6
-rw-r--r--rtc_base/test_client.h4
-rw-r--r--rtc_base/thread.cc85
-rw-r--r--rtc_base/thread.h32
-rw-r--r--rtc_base/thread_unittest.cc185
-rw-r--r--rtc_base/time_utils.cc2
-rw-r--r--rtc_base/virtual_socket_server.cc1
-rw-r--r--rtc_base/virtual_socket_server.h5
-rw-r--r--rtc_base/win32_socket_server.cc6
-rw-r--r--rtc_base/win32_socket_server.h4
-rw-r--r--rtc_tools/BUILD.gn21
-rw-r--r--rtc_tools/converter/yuv_to_ivf_converter.cc6
-rw-r--r--rtc_tools/network_tester/BUILD.gn11
-rwxr-xr-xrtc_tools/network_tester/androidapp/AndroidManifest.xml2
-rw-r--r--rtc_tools/network_tester/test_controller.cc10
-rw-r--r--rtc_tools/network_tester/test_controller.h5
-rw-r--r--rtc_tools/rtc_event_log_visualizer/alerts.cc227
-rw-r--r--rtc_tools/rtc_event_log_visualizer/alerts.h86
-rw-r--r--rtc_tools/rtc_event_log_visualizer/analyze_audio.cc503
-rw-r--r--rtc_tools/rtc_event_log_visualizer/analyze_audio.h75
-rw-r--r--rtc_tools/rtc_event_log_visualizer/analyzer.cc938
-rw-r--r--rtc_tools/rtc_event_log_visualizer/analyzer.h201
-rw-r--r--rtc_tools/rtc_event_log_visualizer/analyzer_common.cc83
-rw-r--r--rtc_tools/rtc_event_log_visualizer/analyzer_common.h182
-rw-r--r--rtc_tools/rtc_event_log_visualizer/main.cc115
-rw-r--r--rtc_tools/rtc_event_log_visualizer/plot_base.cc229
-rw-r--r--rtc_tools/rtc_event_log_visualizer/plot_base.h30
-rw-r--r--rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc55
-rw-r--r--rtc_tools/rtc_event_log_visualizer/plot_protobuf.h5
-rw-r--r--rtc_tools/rtc_event_log_visualizer/plot_python.cc163
-rw-r--r--rtc_tools/rtc_event_log_visualizer/plot_python.h3
-rw-r--r--rtc_tools/rtc_event_log_visualizer/triage_notifications.h158
-rw-r--r--sdk/BUILD.gn36
-rw-r--r--sdk/android/AndroidManifest.xml2
-rw-r--r--sdk/android/BUILD.gn50
-rw-r--r--sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java11
-rw-r--r--sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java22
-rw-r--r--sdk/android/api/org/webrtc/PeerConnection.java24
-rw-r--r--sdk/android/api/org/webrtc/PeerConnectionFactory.java13
-rw-r--r--sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java12
-rw-r--r--sdk/android/api/org/webrtc/SurfaceTextureHelper.java11
-rw-r--r--sdk/android/instrumentationtests/AndroidManifest.xml2
-rw-r--r--sdk/android/native_api/jni/java_types.cc10
-rw-r--r--sdk/android/native_api/jni/java_types.h2
-rw-r--r--sdk/android/native_api/stacktrace/stacktrace.cc6
-rw-r--r--sdk/android/native_unittests/audio_device/audio_device_unittest.cc8
-rw-r--r--sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc12
-rw-r--r--sdk/android/src/java/org/webrtc/MediaCodecUtils.java34
-rw-r--r--sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java4
-rw-r--r--sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java20
-rw-r--r--sdk/android/src/jni/android_network_monitor.cc19
-rw-r--r--sdk/android/src/jni/android_network_monitor.h1
-rw-r--r--sdk/android/src/jni/audio_device/audio_device_module.cc4
-rw-r--r--sdk/android/src/jni/audio_device/audio_track_jni.cc28
-rw-r--r--sdk/android/src/jni/pc/peer_connection.cc9
-rw-r--r--sdk/android/src/jni/pc/peer_connection_factory.cc7
-rw-r--r--sdk/android/src/jni/video_decoder_wrapper.cc6
-rw-r--r--sdk/android/src/jni/video_decoder_wrapper.h4
-rw-r--r--sdk/android/tests/resources/robolectric.properties2
-rw-r--r--sdk/objc/api/peerconnection/RTCConfiguration.h12
-rw-r--r--sdk/objc/api/peerconnection/RTCConfiguration.mm9
-rw-r--r--sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm3
-rw-r--r--sdk/objc/api/peerconnection/RTCPeerConnection.h4
-rw-r--r--sdk/objc/api/peerconnection/RTCPeerConnection.mm5
-rw-r--r--sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h23
-rw-r--r--sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm48
-rw-r--r--sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm5
-rw-r--r--sdk/objc/api/peerconnection/RTCRtpSender.h4
-rw-r--r--sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h4
-rw-r--r--sdk/objc/api/peerconnection/RTCStatisticsReport.h12
-rw-r--r--sdk/objc/api/peerconnection/RTCStatisticsReport.mm11
-rw-r--r--sdk/objc/components/audio/RTCAudioSession.mm4
-rw-r--r--sdk/objc/native/src/audio/audio_device_ios.mm1
-rw-r--r--sdk/objc/native/src/audio/audio_device_module_ios.h1
-rw-r--r--sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm7
-rw-r--r--stats/rtcstats_objects.cc64
-rw-r--r--style-guide.md4
-rw-r--r--system_wrappers/BUILD.gn6
-rw-r--r--system_wrappers/source/clock.cc6
-rw-r--r--system_wrappers/source/field_trial_unittest.cc4
-rw-r--r--system_wrappers/source/metrics.cc41
-rw-r--r--system_wrappers/source/metrics_unittest.cc3
-rw-r--r--test/BUILD.gn94
-rw-r--r--test/DEPS3
-rw-r--r--test/android/AndroidManifest.xml2
-rw-r--r--test/benchmark_main.cc17
-rw-r--r--test/direct_transport.cc5
-rw-r--r--test/direct_transport.h3
-rw-r--r--test/explicit_key_value_config.cc57
-rw-r--r--test/explicit_key_value_config.h35
-rw-r--r--test/fake_encoder.cc71
-rw-r--r--test/fake_encoder.h49
-rw-r--r--test/frame_forwarder.cc20
-rw-r--r--test/frame_forwarder.h26
-rw-r--r--test/frame_generator.cc4
-rw-r--r--test/frame_generator.h10
-rw-r--r--test/frame_generator_capturer.cc23
-rw-r--r--test/frame_generator_capturer.h4
-rw-r--r--test/frame_generator_capturer_unittest.cc4
-rw-r--r--test/fuzzers/BUILD.gn46
-rw-r--r--test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc (renamed from test/fuzzers/audio_encoder_isax_fixed_fuzzer.cc)0
-rw-r--r--test/fuzzers/audio_encoder_isac_float_fuzzer.cc (renamed from test/fuzzers/audio_encoder_isax_float_fuzzer.cc)0
-rw-r--r--test/fuzzers/residual_echo_detector_fuzzer.cc5
-rw-r--r--test/fuzzers/rtcp_receiver_fuzzer.cc4
-rw-r--r--test/fuzzers/rtp_frame_reference_finder_fuzzer.cc1
-rw-r--r--test/fuzzers/rtp_packet_fuzzer.cc4
-rw-r--r--test/logging/BUILD.gn2
-rw-r--r--test/mock_audio_decoder.h22
-rw-r--r--test/mock_audio_decoder_factory.h18
-rw-r--r--test/mock_audio_encoder.h69
-rw-r--r--test/mock_audio_encoder_factory.h24
-rw-r--r--test/mock_transport.h2
-rw-r--r--test/network/BUILD.gn8
-rw-r--r--test/network/cross_traffic.cc13
-rw-r--r--test/network/emulated_network_manager.h1
-rw-r--r--test/network/fake_network_socket_server.cc4
-rw-r--r--test/network/fake_network_socket_server.h4
-rw-r--r--test/network/network_emulation.cc65
-rw-r--r--test/network/network_emulation.h3
-rw-r--r--test/network/network_emulation_unittest.cc64
-rw-r--r--test/pc/e2e/BUILD.gn1103
-rw-r--r--test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc151
-rw-r--r--test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h35
-rw-r--r--test/pc/e2e/analyzer/video/default_encoded_image_data_injector.h1
-rw-r--r--test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc756
-rw-r--r--test/pc/e2e/analyzer/video/default_video_quality_analyzer.h334
-rw-r--r--test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc407
-rw-r--r--test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc50
-rw-r--r--test/pc/e2e/analyzer/video/example_video_quality_analyzer.h38
-rw-r--r--test/pc/e2e/analyzer/video/multi_head_queue.h99
-rw-r--r--test/pc/e2e/analyzer/video/multi_head_queue_test.cc103
-rw-r--r--test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc37
-rw-r--r--test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h15
-rw-r--r--test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc42
-rw-r--r--test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h13
-rw-r--r--test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc85
-rw-r--r--test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h11
-rw-r--r--test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc74
-rw-r--r--test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc61
-rw-r--r--test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h38
-rw-r--r--test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc118
-rw-r--r--test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h32
-rw-r--r--test/pc/e2e/analyzer_helper.cc28
-rw-r--r--test/pc/e2e/analyzer_helper.h32
-rw-r--r--test/pc/e2e/cross_media_metrics_reporter.cc129
-rw-r--r--test/pc/e2e/cross_media_metrics_reporter.h70
-rw-r--r--test/pc/e2e/echo/echo_emulation.cc10
-rw-r--r--test/pc/e2e/media/media_helper.cc26
-rw-r--r--test/pc/e2e/media/media_helper.h3
-rw-r--r--test/pc/e2e/network_quality_metrics_reporter.cc76
-rw-r--r--test/pc/e2e/network_quality_metrics_reporter.h19
-rw-r--r--test/pc/e2e/peer_configurer.cc47
-rw-r--r--test/pc/e2e/peer_configurer.h49
-rw-r--r--test/pc/e2e/peer_connection_e2e_smoke_test.cc136
-rw-r--r--test/pc/e2e/peer_connection_quality_test.cc152
-rw-r--r--test/pc/e2e/peer_connection_quality_test.h20
-rw-r--r--test/pc/e2e/peer_connection_quality_test_params.h4
-rw-r--r--test/pc/e2e/stats_poller.cc9
-rw-r--r--test/pc/e2e/stats_poller.h6
-rw-r--r--test/pc/e2e/test_activities_executor.cc6
-rw-r--r--test/pc/e2e/test_activities_executor.h4
-rw-r--r--test/pc/e2e/test_peer.cc15
-rw-r--r--test/pc/e2e/test_peer.h76
-rw-r--r--test/pc/e2e/test_peer_factory.cc83
-rw-r--r--test/pc/e2e/test_peer_factory.h54
-rw-r--r--test/peer_scenario/BUILD.gn2
-rw-r--r--test/rtp_header_parser.cc17
-rw-r--r--test/rtp_rtcp_observer.h1
-rw-r--r--test/run_loop_unittest.cc1
-rw-r--r--test/scenario/BUILD.gn3
-rw-r--r--test/scenario/call_client.cc13
-rw-r--r--test/scenario/call_client.h2
-rw-r--r--test/scenario/network_node.cc8
-rw-r--r--test/scenario/network_node.h15
-rw-r--r--test/scenario/scenario_unittest.cc48
-rw-r--r--test/scenario/video_stream.cc6
-rw-r--r--test/scenario/video_stream.h7
-rw-r--r--test/scenario/video_stream_unittest.cc94
-rw-r--r--test/test_main.cc7
-rw-r--r--test/test_main_lib.cc66
-rw-r--r--test/test_video_capturer.cc2
-rw-r--r--test/test_video_capturer.h6
-rw-r--r--test/testsupport/ivf_video_frame_generator.cc12
-rw-r--r--test/testsupport/ivf_video_frame_generator.h6
-rw-r--r--test/testsupport/ivf_video_frame_generator_unittest.cc7
-rw-r--r--test/testsupport/mock/mock_frame_reader.h10
-rw-r--r--test/testsupport/perf_test.cc22
-rw-r--r--test/testsupport/perf_test.h1
-rw-r--r--test/testsupport/perf_test_histogram_writer.cc14
-rw-r--r--test/testsupport/resources_dir_flag.cc2
-rw-r--r--test/testsupport/resources_dir_flag.h2
-rw-r--r--test/testsupport/test_artifacts_unittest.cc1
-rw-r--r--test/testsupport/video_frame_writer.h1
-rw-r--r--test/time_controller/BUILD.gn3
-rw-r--r--test/time_controller/simulated_process_thread.cc22
-rw-r--r--test/time_controller/simulated_process_thread.h5
-rw-r--r--test/time_controller/simulated_task_queue.cc22
-rw-r--r--test/time_controller/simulated_task_queue.h5
-rw-r--r--test/time_controller/simulated_thread.cc8
-rw-r--r--test/time_controller/simulated_thread.h5
-rw-r--r--test/time_controller/simulated_time_controller.cc20
-rw-r--r--test/time_controller/simulated_time_controller.h28
-rw-r--r--third_party/libaom/source/libaom/aom/aom_codec.h1
-rw-r--r--third_party/libaom/source/libaom/aom/aom_decoder.h1
-rw-r--r--third_party/libaom/source/libaom/aom/aom_encoder.h1
-rw-r--r--third_party/libaom/source/libaom/aom/aomcx.h12
-rw-r--r--third_party/libaom/source/libaom/aom/aomdx.h1
-rw-r--r--third_party/libsrtp/crypto/include/auth.h1
-rw-r--r--third_party/libsrtp/crypto/include/crypto_types.h1
-rw-r--r--third_party/libsrtp/include/srtp.h1
-rw-r--r--third_party/libsrtp/include/srtp_priv.h1
-rw-r--r--third_party/libyuv/include/libyuv.h1
-rw-r--r--third_party/libyuv/include/libyuv/compare.h1
-rw-r--r--third_party/libyuv/include/libyuv/convert.h1
-rw-r--r--third_party/libyuv/include/libyuv/convert_argb.h1
-rw-r--r--third_party/libyuv/include/libyuv/convert_from.h1
-rw-r--r--third_party/libyuv/include/libyuv/planar_functions.h1
-rw-r--r--third_party/libyuv/include/libyuv/rotate_argb.h1
-rw-r--r--third_party/libyuv/include/libyuv/scale.h1
-rw-r--r--third_party/pffft/src/pffft.h1
-rw-r--r--third_party/rnnoise/src/rnn_activations.h1
-rw-r--r--third_party/rnnoise/src/rnn_vad_weights.h1
-rwxr-xr-xtools_webrtc/autoroller/roll_deps.py9
-rwxr-xr-xtools_webrtc/autoroller/unittests/roll_deps_test.py4
-rwxr-xr-xtools_webrtc/get_landmines.py1
-rwxr-xr-xtools_webrtc/libs/generate_licenses.py1
-rwxr-xr-xtools_webrtc/libs/generate_licenses_test.py9
-rw-r--r--tools_webrtc/mb/mb_config.pyl15
-rwxr-xr-xtools_webrtc/mb/mb_unittest.py2
-rw-r--r--tools_webrtc/msan/suppressions.txt (renamed from tools_webrtc/msan/blacklist.txt)3
-rw-r--r--tools_webrtc/sancov/README9
-rw-r--r--tools_webrtc/sancov/blacklist.txt21
-rw-r--r--tools_webrtc/ubsan/suppressions.txt (renamed from tools_webrtc/ubsan/blacklist.txt)0
-rw-r--r--tools_webrtc/ubsan/vptr_suppressions.txt (renamed from tools_webrtc/ubsan/vptr_blacklist.txt)0
-rw-r--r--video/BUILD.gn33
-rw-r--r--video/adaptation/BUILD.gn20
-rw-r--r--video/adaptation/encode_usage_resource.cc42
-rw-r--r--video/adaptation/encode_usage_resource.h22
-rw-r--r--video/adaptation/overuse_frame_detector_unittest.cc4
-rw-r--r--video/adaptation/quality_rampup_experiment_helper.cc80
-rw-r--r--video/adaptation/quality_rampup_experiment_helper.h68
-rw-r--r--video/adaptation/quality_scaler_resource.cc153
-rw-r--r--video/adaptation/quality_scaler_resource.h65
-rw-r--r--video/adaptation/quality_scaler_resource_unittest.cc170
-rw-r--r--video/adaptation/video_stream_encoder_resource.cc85
-rw-r--r--video/adaptation/video_stream_encoder_resource.h81
-rw-r--r--video/adaptation/video_stream_encoder_resource_manager.cc596
-rw-r--r--video/adaptation/video_stream_encoder_resource_manager.h247
-rw-r--r--video/adaptation/video_stream_encoder_resource_manager_unittest.cc98
-rw-r--r--video/call_stats.cc4
-rw-r--r--video/call_stats.h4
-rw-r--r--video/call_stats2.cc67
-rw-r--r--video/call_stats2.h45
-rw-r--r--video/call_stats2_unittest.cc13
-rw-r--r--video/call_stats_unittest.cc2
-rw-r--r--video/encoder_rtcp_feedback.cc2
-rw-r--r--video/encoder_rtcp_feedback.h6
-rw-r--r--video/end_to_end_tests/bandwidth_tests.cc22
-rw-r--r--video/end_to_end_tests/codec_tests.cc30
-rw-r--r--video/end_to_end_tests/extended_reports_tests.cc16
-rw-r--r--video/end_to_end_tests/fec_tests.cc35
-rw-r--r--video/end_to_end_tests/histogram_tests.cc9
-rw-r--r--video/end_to_end_tests/multi_codec_receive_tests.cc17
-rw-r--r--video/end_to_end_tests/network_state_tests.cc37
-rw-r--r--video/end_to_end_tests/retransmission_tests.cc33
-rw-r--r--video/end_to_end_tests/rtp_rtcp_tests.cc37
-rw-r--r--video/end_to_end_tests/stats_tests.cc25
-rw-r--r--video/end_to_end_tests/transport_feedback_tests.cc19
-rw-r--r--video/frame_encode_metadata_writer.cc10
-rw-r--r--video/frame_encode_metadata_writer.h4
-rw-r--r--video/full_stack_tests.cc109
-rw-r--r--video/pc_full_stack_tests.cc156
-rw-r--r--video/picture_id_tests.cc27
-rw-r--r--video/quality_scaling_tests.cc3
-rw-r--r--video/receive_statistics_proxy.cc34
-rw-r--r--video/receive_statistics_proxy.h82
-rw-r--r--video/receive_statistics_proxy2.cc80
-rw-r--r--video/receive_statistics_proxy2.h3
-rw-r--r--video/rtp_streams_synchronizer.cc6
-rw-r--r--video/rtp_streams_synchronizer.h12
-rw-r--r--video/rtp_streams_synchronizer2.cc45
-rw-r--r--video/rtp_streams_synchronizer2.h10
-rw-r--r--video/rtp_video_stream_receiver.cc88
-rw-r--r--video/rtp_video_stream_receiver.h72
-rw-r--r--video/rtp_video_stream_receiver2.cc1154
-rw-r--r--video/rtp_video_stream_receiver2.h367
-rw-r--r--video/rtp_video_stream_receiver2_unittest.cc1221
-rw-r--r--video/rtp_video_stream_receiver_frame_transformer_delegate.cc10
-rw-r--r--video/rtp_video_stream_receiver_frame_transformer_delegate.h17
-rw-r--r--video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc130
-rw-r--r--video/rtp_video_stream_receiver_unittest.cc53
-rw-r--r--video/screenshare_loopback.cc47
-rw-r--r--video/send_delay_stats.cc8
-rw-r--r--video/send_delay_stats.h18
-rw-r--r--video/send_statistics_proxy.cc98
-rw-r--r--video/send_statistics_proxy.h53
-rw-r--r--video/sv_loopback.cc98
-rw-r--r--video/test/mock_video_stream_encoder.h54
-rw-r--r--video/video_analyzer.cc67
-rw-r--r--video/video_analyzer.h72
-rw-r--r--video/video_loopback.cc59
-rw-r--r--video/video_quality_test.cc57
-rw-r--r--video/video_receive_stream.cc10
-rw-r--r--video/video_receive_stream.h4
-rw-r--r--video/video_receive_stream2.cc191
-rw-r--r--video/video_receive_stream2.h56
-rw-r--r--video/video_receive_stream2_unittest.cc570
-rw-r--r--video/video_receive_stream_unittest.cc35
-rw-r--r--video/video_send_stream.cc12
-rw-r--r--video/video_send_stream.h4
-rw-r--r--video/video_send_stream_impl.cc31
-rw-r--r--video/video_send_stream_impl.h4
-rw-r--r--video/video_send_stream_impl_unittest.cc279
-rw-r--r--video/video_send_stream_tests.cc224
-rw-r--r--video/video_source_sink_controller.cc48
-rw-r--r--video/video_source_sink_controller.h23
-rw-r--r--video/video_source_sink_controller_unittest.cc17
-rw-r--r--video/video_stream_decoder.h4
-rw-r--r--video/video_stream_decoder2.h1
-rw-r--r--video/video_stream_decoder_impl.cc4
-rw-r--r--video/video_stream_decoder_impl.h5
-rw-r--r--video/video_stream_decoder_impl_unittest.cc36
-rw-r--r--video/video_stream_encoder.cc345
-rw-r--r--video/video_stream_encoder.h79
-rw-r--r--video/video_stream_encoder_unittest.cc1375
-rw-r--r--webrtc.gni65
-rw-r--r--whitespace.txt1
1261 files changed, 50944 insertions, 26400 deletions
diff --git a/.gitignore b/.gitignore
index 8c7582c3a2..d1ccce7c1f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -39,14 +39,12 @@
.settings
.sw?
/Makefile
-/base
/build
/buildtools
/ios
/mojo
/out
/testing
-/third_party
/tools
/tools_webrtc/android/profiling/flamegraph
/tools_webrtc/android/profiling/simpleperf
diff --git a/AUTHORS b/AUTHORS
index 97db345ad1..63d90c271d 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -20,6 +20,7 @@ Cyril Lashkevich <notorca@gmail.com>
David Porter <david@porter.me>
Dax Booysen <dax@younow.com>
Danail Kirov <dkirovbroadsoft@gmail.com>
+Dharmesh Chauhan <dharmesh.r.chauhan@gmail.com>
Dirk-Jan C. Binnema <djcb@djcbsoftware.nl>
Dmitry Lizin <sdkdimon@gmail.com>
Eric Rescorla, RTFM Inc. <ekr@rtfm.com>
@@ -90,8 +91,11 @@ CZ Theng <cz.theng@gmail.com>
Miguel Paris <mparisdiaz@gmail.com>
Raman Budny <budnyjj@gmail.com>
Stephan Hartmann <stha09@googlemail.com>
+Lennart Grahl <lennart.grahl@gmail.com>
&yet LLC <*@andyet.com>
+8x8 Inc. <*@sip-communicator.org>
+8x8 Inc. <*@8x8.com>
Agora IO <*@agora.io>
ARM Holdings <*@arm.com>
BroadSoft Inc. <*@broadsoft.com>
@@ -108,6 +112,7 @@ Opera Software ASA <*@opera.com>
Optical Tone Ltd <*@opticaltone.com>
Pengutronix e.K. <*@pengutronix.de>
RingCentral, Inc. <*@ringcentral.com>
+Signal Messenger, LLC <*@signal.org>
Sinch AB <*@sinch.com>
struktur AG <*@struktur.de>
Telenor Digital AS <*@telenor.com>
@@ -124,3 +129,4 @@ Highfive, Inc. <*@highfive.com>
CoSMo Software Consulting, Pte Ltd <*@cosmosoftware.io>
Tuple, LLC <*@tuple.app>
Videona Socialmedia <*@videona.com>
+Threema GmbH <*@threema.ch>
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000000..24e8e6af71
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,5492 @@
+cc_defaults {
+ name: "webrtc_defaults",
+ local_include_dirs: [
+ ".",
+ ],
+ cflags: [
+ "-Wno-unused-parameter",
+ "-Wno-missing-field-initializers",
+ "-DUSE_UDEV",
+ "-DUSE_AURA=1",
+ "-DUSE_GLIB=1",
+ "-DUSE_NSS_CERTS=1",
+ "-DUSE_X11=1",
+ "-D_FILE_OFFSET_BITS=64",
+ "-D_LARGEFILE_SOURCE",
+ "-D_LARGEFILE64_SOURCE",
+ "-D_GNU_SOURCE",
+ "-DWEBRTC_ENABLE_PROTOBUF=0",
+ "-DWEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE",
+ "-DRTC_ENABLE_VP9",
+ "-DHAVE_SCTP",
+ "-DWEBRTC_LIBRARY_IMPL",
+ "-DWEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1",
+ "-DWEBRTC_POSIX",
+ "-DWEBRTC_LINUX",
+ ],
+ header_libs: [
+ "libabsl_headers",
+ ],
+ static_libs: [
+ "libaom",
+ "libevent",
+ "libopus",
+ "libsrtp2",
+ "libvpx",
+ "libyuv",
+ "libpffft",
+ "rnnoise_rnn_vad",
+ "usrsctplib",
+ ],
+ shared_libs: [
+ "libcrypto",
+ "libssl",
+ ],
+ host_supported: true,
+ device_supported: false,
+ arch: {
+ arm: {
+ enabled: false,
+ },
+ },
+}
+
+cc_library_static {
+ name: "webrtc_spl_sqrt_floor__spl_sqrt_floor",
+ defaults: ["webrtc_defaults"],
+ srcs: ["common_audio/third_party/spl_sqrt_floor/spl_sqrt_floor.c"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_fft__fft",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/third_party/fft/fft.c"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_ooura__fft_size_256",
+ defaults: ["webrtc_defaults"],
+ srcs: ["common_audio/third_party/ooura/fft_size_256/fft4g.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__audio_network_adaptor_config",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/audio_network_adaptor/audio_network_adaptor_config.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__pcm16b_c",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/codecs/pcm16b/pcm16b.c"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_sigslot__sigslot",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/third_party/sigslot/sigslot.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_network__sent_packet",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/network/sent_packet.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_pc__media_protocol_names",
+ defaults: ["webrtc_defaults"],
+ srcs: ["pc/media_protocol_names.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_g722__g722_3p",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/third_party/g722/g722_decode.c",
+ "modules/third_party/g722/g722_encode.c",
+ ],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_media__rtc_constants",
+ defaults: ["webrtc_defaults"],
+ srcs: ["media/engine/constants.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_api__transport_api",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/call/transport.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_synchronization__yield",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/synchronization/yield.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_g711__g711_3p",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/third_party/g711/g711.c"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__audio_processing_statistics",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/include/audio_processing_statistics.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_transport__bitrate_settings",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/transport/bitrate_settings.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_base64__base64",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/third_party/base64/base64.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__g711_c",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/codecs/g711/g711_interface.c"],
+ host_supported: true,
+ static_libs: ["webrtc_g711__g711_3p"],
+}
+
+cc_library_static {
+ name: "webrtc_ooura__fft_size_128",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "common_audio/third_party/ooura/fft_size_128/ooura_fft.cc",
+ "common_audio/third_party/ooura/fft_size_128/ooura_fft_sse2.cc",
+ ],
+ host_supported: true,
+ cflags: ["-msse2"],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__checks",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/checks.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__isac_vad",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/isac/main/source/filter_functions.c",
+ "modules/audio_coding/codecs/isac/main/source/isac_vad.c",
+ "modules/audio_coding/codecs/isac/main/source/pitch_estimator.c",
+ "modules/audio_coding/codecs/isac/main/source/pitch_filter.c",
+ ],
+ host_supported: true,
+ static_libs: ["webrtc_fft__fft"],
+}
+
+cc_library_static {
+ name: "webrtc_memory__aligned_malloc",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/memory/aligned_malloc.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__checks"],
+}
+
+filegroup {
+ name: "webrtc_rtp__dependency_descriptor",
+ srcs: ["api/transport/rtp/dependency_descriptor.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__g722_c",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/codecs/g722/g722_interface.c"],
+ host_supported: true,
+ static_libs: ["webrtc_g722__g722_3p"],
+}
+
+cc_library_static {
+ name: "webrtc_system_wrappers__cpu_features_linux",
+ defaults: ["webrtc_defaults"],
+ srcs: ["system_wrappers/source/cpu_features_linux.c"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_generic_frame_descriptor__generic_frame_descriptor",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "common_video/generic_frame_descriptor/generic_frame_info.cc",
+ ":webrtc_rtp__dependency_descriptor",
+ ],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__checks"],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__platform_thread_types",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/platform_thread_types.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_modules__module_api",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/include/module_common_types.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_task_queue__task_queue",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/task_queue/task_queue_base.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__checks"],
+}
+
+cc_library_static {
+ name: "webrtc_utility__pffft_wrapper",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/utility/pffft_wrapper.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__checks"],
+}
+
+filegroup {
+ name: "webrtc_audio_processing__rms_level",
+ srcs: ["modules/audio_processing/rms_level.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_utility__cascaded_biquad_filter",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/utility/cascaded_biquad_filter.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__checks"],
+}
+
+cc_library_static {
+ name: "webrtc_synchronization__yield_policy",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/synchronization/yield_policy.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__checks"],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__stringutils",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "rtc_base/string_encode.cc",
+ "rtc_base/string_to_number.cc",
+ "rtc_base/string_utils.cc",
+ "rtc_base/strings/string_builder.cc",
+ "rtc_base/strings/string_format.cc",
+ ],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__checks"],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__criticalsection",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/deprecated/recursive_critical_section.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_synchronization__yield",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__platform_thread_types",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_system__file_wrapper",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/system/file_wrapper.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__criticalsection",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_synchronization__mutex",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/synchronization/mutex.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_synchronization__yield",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__platform_thread_types",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_synchronization__rw_lock_wrapper",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "rtc_base/synchronization/rw_lock_wrapper.cc",
+ "rtc_base/synchronization/rw_lock_posix.cc",
+ ],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_time__timestamp_extrapolator",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/time/timestamp_extrapolator.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_synchronization__rw_lock_wrapper"],
+}
+
+cc_library_static {
+ name: "webrtc_utility__legacy_delay_estimator",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/utility/delay_estimator.cc",
+ "modules/audio_processing/utility/delay_estimator_wrapper.cc",
+ ],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__checks"],
+}
+
+cc_library_static {
+ name: "webrtc_synchronization__sequence_checker",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/synchronization/sequence_checker.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__platform_thread_types",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_rtc_base__criticalsection",
+ "webrtc_synchronization__mutex",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__video_bitrate_allocation",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video/video_bitrate_allocation.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_api__rtp_parameters",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/media_types.cc",
+ "api/rtp_parameters.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__video_adaptation",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video/video_adaptation_counters.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__config",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/include/config.cc"],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_api__audio_options_api",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_options.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__stringutils"],
+}
+
+cc_library_static {
+ name: "webrtc_units__data_size",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/units/data_size.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ ],
+}
+
+filegroup {
+ name: "webrtc_rtc_base__rtc_operations_chain",
+ srcs: ["rtc_base/operations_chain.cc"],
+}
+
+filegroup {
+ name: "webrtc_av1__scalable_video_controller",
+ srcs: ["modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__timeutils",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/time_utils.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_units__time_delta",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/units/time_delta.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__rtc_event",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/event.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__yield_policy",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_units__timestamp",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/units/timestamp.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_units__time_delta",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_units__frequency",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/units/frequency.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_units__time_delta",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__weak_ptr",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/weak_ptr.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_synchronization__sequence_checker"],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__platform_thread",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/platform_thread.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__platform_thread_types",
+ "webrtc_rtc_base__timeutils",
+ "webrtc_rtc_base__rtc_event",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_task_utils__pending_task_safety_flag",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/task_utils/pending_task_safety_flag.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__sequence_checker",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_event_log__rtc_event_log",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/rtc_event_log/rtc_event.cc",
+ "api/rtc_event_log/rtc_event_log.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__timeutils",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__logging",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/logging.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__platform_thread_types",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_rtc_base__criticalsection",
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__timeutils",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_units__data_rate",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/units/data_rate.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_units__data_size",
+ "webrtc_units__time_delta",
+ "webrtc_units__frequency",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_system_wrappers__field_trial",
+ defaults: ["webrtc_defaults"],
+ srcs: ["system_wrappers/source/field_trial.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_rtc_base__logging",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__chain_diff_calculator",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/video_coding/chain_diff_calculator.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__logging",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_transport__network_control",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/transport/network_types.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_units__data_size",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_units__data_rate",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__field_trial_parser",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "rtc_base/experiments/field_trial_list.cc",
+ "rtc_base/experiments/field_trial_parser.cc",
+ "rtc_base/experiments/field_trial_units.cc",
+ "rtc_base/experiments/struct_parameters_parser.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_units__data_size",
+ "webrtc_units__time_delta",
+ "webrtc_rtc_base__logging",
+ "webrtc_units__data_rate",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_logging__rtc_event_pacing",
+ defaults: ["webrtc_defaults"],
+ srcs: ["logging/rtc_event_log/events/rtc_event_alr_state.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_event_log__rtc_event_log"],
+}
+
+cc_library_static {
+ name: "webrtc_transport__field_trial_based_config",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/transport/field_trial_based_config.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_system_wrappers__field_trial"],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__frame_dependencies_calculator",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/video_coding/frame_dependencies_calculator.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_generic_frame_descriptor__generic_frame_descriptor",
+ "webrtc_rtc_base__logging",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__rtc_task_queue_libevent",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/task_queue_libevent.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__platform_thread_types",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__criticalsection",
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__timeutils",
+ "webrtc_rtc_base__platform_thread",
+ "webrtc_rtc_base__logging",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_api__rtc_error",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/rtc_error.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__logging",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_event_log__rtc_event_log_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/rtc_event_log/rtc_event_log_factory.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_event_log__rtc_event_log",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_goog_cc__link_capacity_estimator",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/congestion_controller/goog_cc/link_capacity_estimator.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_units__data_rate"],
+}
+
+cc_library_static {
+ name: "webrtc_video__video_bitrate_allocator",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video/video_bitrate_allocator.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_units__data_rate",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__keyframe_interval_settings_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/keyframe_interval_settings.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_transport__field_trial_based_config",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__rtc_task_queue",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/task_queue.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_task_queue__task_queue"],
+}
+
+cc_library_static {
+ name: "webrtc_task_queue__default_task_queue_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/task_queue/default_task_queue_factory_libevent.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__rtc_task_queue_libevent",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__rtc_base_approved",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "rtc_base/bit_buffer.cc",
+ "rtc_base/buffer_queue.cc",
+ "rtc_base/byte_buffer.cc",
+ "rtc_base/copy_on_write_buffer.cc",
+ "rtc_base/event_tracer.cc",
+ "rtc_base/location.cc",
+ "rtc_base/numerics/histogram_percentile_counter.cc",
+ "rtc_base/numerics/sample_counter.cc",
+ "rtc_base/race_checker.cc",
+ "rtc_base/random.cc",
+ "rtc_base/rate_statistics.cc",
+ "rtc_base/rate_tracker.cc",
+ "rtc_base/timestamp_aligner.cc",
+ "rtc_base/zero_memory.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_base64__base64",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__platform_thread_types",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_rtc_base__criticalsection",
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__timeutils",
+ "webrtc_rtc_base__rtc_event",
+ "webrtc_rtc_base__platform_thread",
+ "webrtc_rtc_base__logging",
+ "webrtc_rtc_base__rtc_task_queue",
+ ],
+}
+
+filegroup {
+ name: "webrtc_adaptation__resource_adaptation_api",
+ srcs: ["api/adaptation/resource.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_api__rtc_event_log_output_file",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/rtc_event_log_output_file.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_system__file_wrapper",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__jitter_upper_bound_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/jitter_upper_bound_experiment.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc2__biquad_filter",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/agc2/biquad_filter.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__rtc_base_approved"],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__rtc_numerics",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "rtc_base/numerics/event_based_exponential_moving_average.cc",
+ "rtc_base/numerics/event_rate_counter.cc",
+ "rtc_base/numerics/exp_filter.cc",
+ "rtc_base/numerics/moving_average.cc",
+ "rtc_base/numerics/sample_stats.cc",
+ "rtc_base/numerics/samples_stats_counter.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_units__data_rate",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__cpu_speed_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/cpu_speed_experiment.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_system_wrappers__system_wrappers",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "system_wrappers/source/clock.cc",
+ "system_wrappers/source/cpu_features.cc",
+ "system_wrappers/source/cpu_info.cc",
+ "system_wrappers/source/rtp_to_ntp_estimator.cc",
+ "system_wrappers/source/sleep.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_system_wrappers__cpu_features_linux",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__rw_lock_wrapper",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__video_rtp_headers",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/video/color_space.cc",
+ "api/video/hdr_metadata.cc",
+ "api/video/video_content_type.cc",
+ "api/video/video_timing.cc",
+ ],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__rtc_base_approved"],
+}
+
+cc_library_static {
+ name: "webrtc_opus__audio_encoder_opus_config",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc",
+ "api/audio_codecs/opus/audio_encoder_opus_config.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_OPUS_VARIABLE_COMPLEXITY=0"],
+ static_libs: ["webrtc_rtc_base__rtc_base_approved"],
+}
+
+cc_library_static {
+ name: "webrtc_audio__aec3_config",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio/echo_canceller3_config.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__webrtc_opus_wrapper",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/codecs/opus/opus_interface.cc"],
+ host_supported: true,
+ cflags: [
+ "-DWEBRTC_CODEC_ILBC",
+ "-DWEBRTC_CODEC_OPUS",
+ "-DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1",
+ "-DWEBRTC_CODEC_ISAC",
+ ],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc2__common",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/agc2/agc2_common.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__alr_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/alr_experiment.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__quality_scaler_settings",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/quality_scaler_settings.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__normalize_simulcast_size_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/normalize_simulcast_size_experiment.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_codecs__audio_codecs_api",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/audio_codecs/audio_codec_pair_id.cc",
+ "api/audio_codecs/audio_decoder.cc",
+ "api/audio_codecs/audio_encoder.cc",
+ "api/audio_codecs/audio_format.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_units__time_delta",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__quality_rampup_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/quality_rampup_experiment.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_stats__rtc_stats",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "stats/rtc_stats.cc",
+ "stats/rtc_stats_report.cc",
+ "stats/rtcstats_objects.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_system_wrappers__metrics",
+ defaults: ["webrtc_defaults"],
+ srcs: ["system_wrappers/source/metrics.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+filegroup {
+ name: "webrtc_neteq__tick_timer",
+ srcs: ["api/neteq/tick_timer.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__rtt_mult_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/rtt_mult_experiment.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rnn_vad__rnn_vad",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/agc2/rnn_vad/auto_correlation.cc",
+ "modules/audio_processing/agc2/rnn_vad/common.cc",
+ "modules/audio_processing/agc2/rnn_vad/features_extraction.cc",
+ "modules/audio_processing/agc2/rnn_vad/lp_residual.cc",
+ "modules/audio_processing/agc2/rnn_vad/pitch_search.cc",
+ "modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc",
+ "modules/audio_processing/agc2/rnn_vad/rnn.cc",
+ "modules/audio_processing/agc2/rnn_vad/spectral_features.cc",
+ "modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_utility__pffft_wrapper",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_agc2__biquad_filter",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__rtc_base",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "rtc_base/async_invoker.cc",
+ "rtc_base/async_packet_socket.cc",
+ "rtc_base/async_resolver_interface.cc",
+ "rtc_base/async_socket.cc",
+ "rtc_base/async_tcp_socket.cc",
+ "rtc_base/async_udp_socket.cc",
+ "rtc_base/crc32.cc",
+ "rtc_base/crypt_string.cc",
+ "rtc_base/data_rate_limiter.cc",
+ "rtc_base/deprecated/signal_thread.cc",
+ "rtc_base/file_rotating_stream.cc",
+ "rtc_base/helpers.cc",
+ "rtc_base/http_common.cc",
+ "rtc_base/ip_address.cc",
+ "rtc_base/message_digest.cc",
+ "rtc_base/message_handler.cc",
+ "rtc_base/net_helper.cc",
+ "rtc_base/net_helpers.cc",
+ "rtc_base/network.cc",
+ "rtc_base/network_constants.cc",
+ "rtc_base/network_monitor.cc",
+ "rtc_base/network_route.cc",
+ "rtc_base/null_socket_server.cc",
+ "rtc_base/openssl_adapter.cc",
+ "rtc_base/openssl_certificate.cc",
+ "rtc_base/openssl_digest.cc",
+ "rtc_base/openssl_identity.cc",
+ "rtc_base/openssl_session_cache.cc",
+ "rtc_base/openssl_stream_adapter.cc",
+ "rtc_base/openssl_utility.cc",
+ "rtc_base/physical_socket_server.cc",
+ "rtc_base/proxy_info.cc",
+ "rtc_base/rtc_certificate.cc",
+ "rtc_base/rtc_certificate_generator.cc",
+ "rtc_base/socket.cc",
+ "rtc_base/socket_adapters.cc",
+ "rtc_base/socket_address.cc",
+ "rtc_base/socket_address_pair.cc",
+ "rtc_base/ssl_adapter.cc",
+ "rtc_base/ssl_certificate.cc",
+ "rtc_base/ssl_fingerprint.cc",
+ "rtc_base/ssl_identity.cc",
+ "rtc_base/ssl_stream_adapter.cc",
+ "rtc_base/stream.cc",
+ "rtc_base/thread.cc",
+ "rtc_base/unique_id_generator.cc",
+ "rtc_base/log_sinks.cc",
+ "rtc_base/ifaddrs_converter.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_sigslot__sigslot",
+ "webrtc_network__sent_packet",
+ "webrtc_base64__base64",
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_system__file_wrapper",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_task_utils__pending_task_safety_flag",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_common_audio__common_audio_cc",
+ defaults: ["webrtc_defaults"],
+ srcs: ["common_audio/signal_processing/dot_product_with_scale.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_pacing__interval_budget",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/pacing/interval_budget.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_common_audio__common_audio_c",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "common_audio/ring_buffer.c",
+ "common_audio/signal_processing/auto_corr_to_refl_coef.c",
+ "common_audio/signal_processing/auto_correlation.c",
+ "common_audio/signal_processing/copy_set_operations.c",
+ "common_audio/signal_processing/cross_correlation.c",
+ "common_audio/signal_processing/division_operations.c",
+ "common_audio/signal_processing/downsample_fast.c",
+ "common_audio/signal_processing/energy.c",
+ "common_audio/signal_processing/filter_ar.c",
+ "common_audio/signal_processing/filter_ma_fast_q12.c",
+ "common_audio/signal_processing/get_hanning_window.c",
+ "common_audio/signal_processing/get_scaling_square.c",
+ "common_audio/signal_processing/ilbc_specific_functions.c",
+ "common_audio/signal_processing/levinson_durbin.c",
+ "common_audio/signal_processing/lpc_to_refl_coef.c",
+ "common_audio/signal_processing/min_max_operations.c",
+ "common_audio/signal_processing/randomization_functions.c",
+ "common_audio/signal_processing/real_fft.c",
+ "common_audio/signal_processing/refl_coef_to_lpc.c",
+ "common_audio/signal_processing/resample.c",
+ "common_audio/signal_processing/resample_48khz.c",
+ "common_audio/signal_processing/resample_by_2.c",
+ "common_audio/signal_processing/resample_by_2_internal.c",
+ "common_audio/signal_processing/resample_fractional.c",
+ "common_audio/signal_processing/spl_init.c",
+ "common_audio/signal_processing/spl_inl.c",
+ "common_audio/signal_processing/spl_sqrt.c",
+ "common_audio/signal_processing/splitting_filter.c",
+ "common_audio/signal_processing/sqrt_of_one_minus_x_squared.c",
+ "common_audio/signal_processing/vector_scaling_operations.c",
+ "common_audio/vad/vad_core.c",
+ "common_audio/vad/vad_filterbank.c",
+ "common_audio/vad/vad_gmm.c",
+ "common_audio/vad/vad_sp.c",
+ "common_audio/vad/webrtc_vad.c",
+ "common_audio/signal_processing/complex_fft.c",
+ "common_audio/signal_processing/complex_bit_reverse.c",
+ "common_audio/signal_processing/filter_ar_fast_q12.c",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_spl_sqrt_floor__spl_sqrt_floor",
+ "webrtc_ooura__fft_size_256",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_common_audio__common_audio_cc",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_aecm__aecm_core",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/aecm/aecm_core.cc",
+ "modules/audio_processing/aecm/echo_control_mobile.cc",
+ "modules/audio_processing/aecm/aecm_core_c.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_utility__legacy_delay_estimator",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_common_audio__common_audio_c",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_processing__video_processing_sse2",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/video_processing/util/denoiser_filter_sse2.cc"],
+ host_supported: true,
+ cflags: ["-msse2"],
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc2__gain_applier",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/agc2/gain_applier.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_agc2__common"],
+}
+
+cc_library_static {
+ name: "webrtc_task_utils__repeating_task",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/task_utils/repeating_task.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_task_queue__task_queue",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_rtc_base__timeutils",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_base__logging",
+ "webrtc_system_wrappers__system_wrappers",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__audio_format_to_string",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/strings/audio_format_to_string.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__stringutils",
+ "webrtc_audio_codecs__audio_codecs_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_memory__fifo_buffer",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/memory/fifo_buffer.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__rtc_base",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_api__rtp_headers",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/rtp_headers.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_units__timestamp",
+ "webrtc_video__video_rtp_headers",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtc_base__rate_limiter",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/rate_limiter.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__audio_coding_opus_common",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/codecs/opus/audio_coder_opus_common.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_audio_codecs__audio_codecs_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_logging__rtc_stream_config",
+ defaults: ["webrtc_defaults"],
+ srcs: ["logging/rtc_event_log/rtc_stream_config.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_api__rtp_parameters",
+ "webrtc_api__rtp_headers",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__legacy_encoded_audio_frame",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/codecs/legacy_encoded_audio_frame.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__webrtc_multiopus",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.cc",
+ "modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.cc",
+ ],
+ host_supported: true,
+ cflags: [
+ "-DWEBRTC_CODEC_ILBC",
+ "-DWEBRTC_CODEC_OPUS",
+ "-DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1",
+ "-DWEBRTC_CODEC_ISAC",
+ ],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_units__time_delta",
+ "webrtc_rtc_base__logging",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_opus__audio_encoder_opus_config",
+ "webrtc_audio_coding__webrtc_opus_wrapper",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__audio_coding_opus_common",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_api__rtp_packet_info",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/rtp_packet_info.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_api__rtp_headers",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_crypto__options",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/crypto/crypto_options.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_rtc_base__rtc_base"],
+}
+
+cc_library_static {
+ name: "webrtc_media__rtc_h264_profile_id",
+ defaults: ["webrtc_defaults"],
+ srcs: ["media/base/h264_profile_level_id.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_base",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__webrtc_cng",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/codecs/cng/webrtc_cng.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_common_audio__common_audio_c",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_common_audio__common_audio_sse2",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "common_audio/fir_filter_sse.cc",
+ "common_audio/resampler/sinc_resampler_sse.cc",
+ ],
+ host_supported: true,
+ cflags: ["-msse2"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_memory__aligned_malloc",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_logging__rtc_event_video",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc",
+ "logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_logging__rtc_stream_config",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_common_audio__common_audio",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "common_audio/audio_converter.cc",
+ "common_audio/audio_util.cc",
+ "common_audio/channel_buffer.cc",
+ "common_audio/real_fourier.cc",
+ "common_audio/real_fourier_ooura.cc",
+ "common_audio/resampler/push_resampler.cc",
+ "common_audio/resampler/push_sinc_resampler.cc",
+ "common_audio/resampler/resampler.cc",
+ "common_audio/resampler/sinc_resampler.cc",
+ "common_audio/smoothing_filter.cc",
+ "common_audio/vad/vad.cc",
+ "common_audio/wav_file.cc",
+ "common_audio/wav_header.cc",
+ "common_audio/window_generator.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_ooura__fft_size_256",
+ "webrtc_rtc_base__checks",
+ "webrtc_memory__aligned_malloc",
+ "webrtc_system__file_wrapper",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_common_audio__common_audio_sse2",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__simulated_network",
+ defaults: ["webrtc_defaults"],
+ srcs: ["call/simulated_network.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_units__data_size",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_units__data_rate",
+ "webrtc_rtc_base__rtc_base_approved",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__bitrate_allocator",
+ defaults: ["webrtc_defaults"],
+ srcs: ["call/bitrate_allocator.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_units__time_delta",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_transport__network_control",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_system_wrappers__metrics",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc2__rnn_vad_with_level",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/agc2/vad_with_level.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rnn_vad__rnn_vad",
+ "webrtc_common_audio__common_audio",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__g722",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/g722/audio_decoder_g722.cc",
+ "modules/audio_coding/codecs/g722/audio_encoder_g722.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_audio_coding__g722_c",
+ "webrtc_units__time_delta",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__legacy_encoded_audio_frame",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_device__audio_device_buffer",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_device/audio_device_buffer.cc",
+ "modules/audio_device/fine_audio_buffer.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_common_audio__common_audio_c",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio__audio_frame_api",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/audio/audio_frame.cc",
+ "api/audio/channel_layout.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_api__rtp_packet_info",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_goog_cc__alr_detector",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/congestion_controller/goog_cc/alr_detector.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__timeutils",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_logging__rtc_event_pacing",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_experiments__alr_experiment",
+ "webrtc_pacing__interval_budget",
+ ],
+}
+
+filegroup {
+ name: "webrtc_transport__stun_types",
+ srcs: ["api/transport/stun.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_video__video_frame",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/video/video_frame.cc",
+ "api/video/video_frame_buffer.cc",
+ "api/video/video_source_interface.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_api__rtp_packet_info",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__apm_logging",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/logging/apm_data_dumper.cc"],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_common_audio__common_audio",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__ilbc_c",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/ilbc/abs_quant.c",
+ "modules/audio_coding/codecs/ilbc/abs_quant_loop.c",
+ "modules/audio_coding/codecs/ilbc/augmented_cb_corr.c",
+ "modules/audio_coding/codecs/ilbc/bw_expand.c",
+ "modules/audio_coding/codecs/ilbc/cb_construct.c",
+ "modules/audio_coding/codecs/ilbc/cb_mem_energy.c",
+ "modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c",
+ "modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c",
+ "modules/audio_coding/codecs/ilbc/cb_search.c",
+ "modules/audio_coding/codecs/ilbc/cb_search_core.c",
+ "modules/audio_coding/codecs/ilbc/cb_update_best_index.c",
+ "modules/audio_coding/codecs/ilbc/chebyshev.c",
+ "modules/audio_coding/codecs/ilbc/comp_corr.c",
+ "modules/audio_coding/codecs/ilbc/constants.c",
+ "modules/audio_coding/codecs/ilbc/create_augmented_vec.c",
+ "modules/audio_coding/codecs/ilbc/decode.c",
+ "modules/audio_coding/codecs/ilbc/decode_residual.c",
+ "modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c",
+ "modules/audio_coding/codecs/ilbc/do_plc.c",
+ "modules/audio_coding/codecs/ilbc/encode.c",
+ "modules/audio_coding/codecs/ilbc/energy_inverse.c",
+ "modules/audio_coding/codecs/ilbc/enh_upsample.c",
+ "modules/audio_coding/codecs/ilbc/enhancer.c",
+ "modules/audio_coding/codecs/ilbc/enhancer_interface.c",
+ "modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c",
+ "modules/audio_coding/codecs/ilbc/frame_classify.c",
+ "modules/audio_coding/codecs/ilbc/gain_dequant.c",
+ "modules/audio_coding/codecs/ilbc/gain_quant.c",
+ "modules/audio_coding/codecs/ilbc/get_cd_vec.c",
+ "modules/audio_coding/codecs/ilbc/get_lsp_poly.c",
+ "modules/audio_coding/codecs/ilbc/get_sync_seq.c",
+ "modules/audio_coding/codecs/ilbc/hp_input.c",
+ "modules/audio_coding/codecs/ilbc/hp_output.c",
+ "modules/audio_coding/codecs/ilbc/ilbc.c",
+ "modules/audio_coding/codecs/ilbc/index_conv_dec.c",
+ "modules/audio_coding/codecs/ilbc/index_conv_enc.c",
+ "modules/audio_coding/codecs/ilbc/init_decode.c",
+ "modules/audio_coding/codecs/ilbc/init_encode.c",
+ "modules/audio_coding/codecs/ilbc/interpolate.c",
+ "modules/audio_coding/codecs/ilbc/interpolate_samples.c",
+ "modules/audio_coding/codecs/ilbc/lpc_encode.c",
+ "modules/audio_coding/codecs/ilbc/lsf_check.c",
+ "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c",
+ "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c",
+ "modules/audio_coding/codecs/ilbc/lsf_to_lsp.c",
+ "modules/audio_coding/codecs/ilbc/lsf_to_poly.c",
+ "modules/audio_coding/codecs/ilbc/lsp_to_lsf.c",
+ "modules/audio_coding/codecs/ilbc/my_corr.c",
+ "modules/audio_coding/codecs/ilbc/nearest_neighbor.c",
+ "modules/audio_coding/codecs/ilbc/pack_bits.c",
+ "modules/audio_coding/codecs/ilbc/poly_to_lsf.c",
+ "modules/audio_coding/codecs/ilbc/poly_to_lsp.c",
+ "modules/audio_coding/codecs/ilbc/refiner.c",
+ "modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c",
+ "modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c",
+ "modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c",
+ "modules/audio_coding/codecs/ilbc/simple_lsf_quant.c",
+ "modules/audio_coding/codecs/ilbc/smooth.c",
+ "modules/audio_coding/codecs/ilbc/smooth_out_data.c",
+ "modules/audio_coding/codecs/ilbc/sort_sq.c",
+ "modules/audio_coding/codecs/ilbc/split_vq.c",
+ "modules/audio_coding/codecs/ilbc/state_construct.c",
+ "modules/audio_coding/codecs/ilbc/state_search.c",
+ "modules/audio_coding/codecs/ilbc/swap_bytes.c",
+ "modules/audio_coding/codecs/ilbc/unpack_bits.c",
+ "modules/audio_coding/codecs/ilbc/vq3.c",
+ "modules/audio_coding/codecs/ilbc/vq4.c",
+ "modules/audio_coding/codecs/ilbc/window32_w32.c",
+ "modules/audio_coding/codecs/ilbc/xcorr_coef.c",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_common_audio__common_audio",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_opus__audio_encoder_multiopus",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_opus__audio_encoder_opus_config",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__webrtc_multiopus",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_utility__utility",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/utility/source/process_thread_impl.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_common_audio__common_audio",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__nack_module",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/histogram.cc",
+ "modules/video_coding/nack_module2.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_task_utils__pending_task_safety_flag",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_utility__utility",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_g722__audio_encoder_g722",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/g722/audio_encoder_g722.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__g722",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__isac_c",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/isac/main/source/arith_routines.c",
+ "modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c",
+ "modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c",
+ "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c",
+ "modules/audio_coding/codecs/isac/main/source/crc.c",
+ "modules/audio_coding/codecs/isac/main/source/decode.c",
+ "modules/audio_coding/codecs/isac/main/source/decode_bwe.c",
+ "modules/audio_coding/codecs/isac/main/source/encode.c",
+ "modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c",
+ "modules/audio_coding/codecs/isac/main/source/entropy_coding.c",
+ "modules/audio_coding/codecs/isac/main/source/filterbanks.c",
+ "modules/audio_coding/codecs/isac/main/source/intialize.c",
+ "modules/audio_coding/codecs/isac/main/source/isac.c",
+ "modules/audio_coding/codecs/isac/main/source/lattice.c",
+ "modules/audio_coding/codecs/isac/main/source/lpc_analysis.c",
+ "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c",
+ "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c",
+ "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c",
+ "modules/audio_coding/codecs/isac/main/source/lpc_tables.c",
+ "modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c",
+ "modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c",
+ "modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c",
+ "modules/audio_coding/codecs/isac/main/source/transform.c",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_fft__fft",
+ "webrtc_rtc_base__checks",
+ "webrtc_audio_coding__isac_vad",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_common_audio__common_audio",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__g711",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/g711/audio_decoder_pcm.cc",
+ "modules/audio_coding/codecs/g711/audio_encoder_pcm.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio_coding__g711_c",
+ "webrtc_rtc_base__checks",
+ "webrtc_units__time_delta",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__legacy_encoded_audio_frame",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_opus__audio_decoder_multiopus",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__webrtc_multiopus",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_common_audio__fir_filter_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "common_audio/fir_filter_c.cc",
+ "common_audio/fir_filter_factory.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_common_audio__common_audio_sse2",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__ilbc",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc",
+ "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_units__time_delta",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__legacy_encoded_audio_frame",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio_coding__ilbc_c",
+ ],
+}
+
+filegroup {
+ name: "webrtc_neteq__neteq_api",
+ srcs: ["api/neteq/neteq.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__audio_encoder_cng",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/codecs/cng/audio_encoder_cng.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_units__time_delta",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__webrtc_cng",
+ "webrtc_common_audio__common_audio",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc2__fixed_digital",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/agc2/fixed_digital_level_estimator.cc",
+ "modules/audio_processing/agc2/interpolated_gain_curve.cc",
+ "modules/audio_processing/agc2/limiter.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_agc2__common",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio_processing__apm_logging",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_logging__rtc_event_audio",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc",
+ "logging/rtc_event_log/events/rtc_event_audio_playout.cc",
+ "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc",
+ "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio_coding__audio_network_adaptor_config",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_logging__rtc_stream_config",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__min_video_bitrate_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/min_video_bitrate_experiment.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__logging",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_video__video_frame",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__encoded_image",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video/encoded_image.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_api__rtp_packet_info",
+ "webrtc_video__video_frame",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc__legacy_agc",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/agc/legacy/analog_agc.cc",
+ "modules/audio_processing/agc/legacy/digital_agc.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_ooura__fft_size_256",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_common_audio__common_audio",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_g722__audio_decoder_g722",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/g722/audio_decoder_g722.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__g722",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__pcm16b",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc",
+ "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc",
+ "modules/audio_coding/codecs/pcm16b/pcm16b_common.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio_coding__pcm16b_c",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__legacy_encoded_audio_frame",
+ "webrtc_audio_coding__g711",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__red",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_coding/codecs/red/audio_encoder_copy_red.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_units__time_delta",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_common_audio__common_audio",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_utility__audio_frame_operations",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "audio/utility/audio_frame_operations.cc",
+ "audio/utility/channel_mixer.cc",
+ "audio/utility/channel_mixing_matrix.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio__audio_frame_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__isac",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc",
+ "modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__isac_c",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_deprecated__nack_module",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/video_coding/deprecated/nack_module.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_rtc_base__criticalsection",
+ "webrtc_synchronization__mutex",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_base__logging",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_utility__utility",
+ "webrtc_video_coding__nack_module",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__video_frame_i420",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video/i420_buffer.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_memory__aligned_malloc",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_video__video_frame",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_isac__audio_encoder_isac_float",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/isac/audio_encoder_isac_float.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__isac",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__api",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/include/audio_processing.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio_processing__audio_processing_statistics",
+ "webrtc_system__file_wrapper",
+ "webrtc_audio_processing__config",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio__aec3_config",
+ "webrtc_audio__audio_frame_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_transient__transient_suppressor_impl",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/transient/moving_moments.cc",
+ "modules/audio_processing/transient/transient_detector.cc",
+ "modules/audio_processing/transient/transient_suppressor_impl.cc",
+ "modules/audio_processing/transient/wpd_node.cc",
+ "modules/audio_processing/transient/wpd_tree.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_ooura__fft_size_256",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__logging",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_common_audio__common_audio",
+ "webrtc_common_audio__fir_filter_factory",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_ilbc__audio_encoder_ilbc",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/ilbc/audio_encoder_ilbc.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__ilbc",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtp_rtcp__rtp_video_header",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/rtp_rtcp/source/rtp_video_header.cc",
+ ":webrtc_rtp__dependency_descriptor",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_video__video_rtp_headers",
+ "webrtc_video__video_frame",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc2__noise_level_estimator",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/agc2/down_sampler.cc",
+ "modules/audio_processing/agc2/noise_level_estimator.cc",
+ "modules/audio_processing/agc2/noise_spectrum_estimator.cc",
+ "modules/audio_processing/agc2/signal_classifier.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_ooura__fft_size_128",
+ "webrtc_rtc_base__checks",
+ "webrtc_agc2__biquad_filter",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio_processing__apm_logging",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__audio_buffer",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/audio_buffer.cc",
+ "modules/audio_processing/splitting_filter.cc",
+ "modules/audio_processing/three_band_filter_bank.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio_processing__api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_isac__audio_decoder_isac_float",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/isac/audio_decoder_isac_float.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__isac",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_vad__vad",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/vad/gmm.cc",
+ "modules/audio_processing/vad/pitch_based_vad.cc",
+ "modules/audio_processing/vad/pitch_internal.cc",
+ "modules/audio_processing/vad/pole_zero_filter.cc",
+ "modules/audio_processing/vad/standalone_vad.cc",
+ "modules/audio_processing/vad/vad_audio_proc.cc",
+ "modules/audio_processing/vad/vad_circular_buffer.cc",
+ "modules/audio_processing/vad/voice_activity_detector.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_ooura__fft_size_256",
+ "webrtc_rtc_base__checks",
+ "webrtc_audio_coding__isac_vad",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_common_audio__common_audio",
+ "webrtc_utility__audio_frame_operations",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_device__audio_device_generic",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_device/audio_device_generic.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_device__audio_device_buffer",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__high_pass_filter",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/high_pass_filter.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_utility__cascaded_biquad_filter",
+ "webrtc_audio_processing__audio_buffer",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_ns__ns",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/ns/fast_math.cc",
+ "modules/audio_processing/ns/histograms.cc",
+ "modules/audio_processing/ns/noise_estimator.cc",
+ "modules/audio_processing/ns/noise_suppressor.cc",
+ "modules/audio_processing/ns/ns_fft.cc",
+ "modules/audio_processing/ns/prior_signal_model.cc",
+ "modules/audio_processing/ns/prior_signal_model_estimator.cc",
+ "modules/audio_processing/ns/quantile_noise_estimator.cc",
+ "modules/audio_processing/ns/signal_model.cc",
+ "modules/audio_processing/ns/signal_model_estimator.cc",
+ "modules/audio_processing/ns/speech_probability_estimator.cc",
+ "modules/audio_processing/ns/suppression_params.cc",
+ "modules/audio_processing/ns/wiener_filter.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_ooura__fft_size_256",
+ "webrtc_ooura__fft_size_128",
+ "webrtc_rtc_base__checks",
+ "webrtc_utility__cascaded_biquad_filter",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_audio_processing__apm_logging",
+ "webrtc_audio_processing__audio_buffer",
+ "webrtc_audio_processing__high_pass_filter",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_common_video__common_video",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "common_video/bitrate_adjuster.cc",
+ "common_video/frame_rate_estimator.cc",
+ "common_video/h264/h264_bitstream_parser.cc",
+ "common_video/h264/h264_common.cc",
+ "common_video/h264/pps_parser.cc",
+ "common_video/h264/sps_parser.cc",
+ "common_video/h264/sps_vui_rewriter.cc",
+ "common_video/i420_buffer_pool.cc",
+ "common_video/incoming_video_stream.cc",
+ "common_video/libyuv/webrtc_libyuv.cc",
+ "common_video/video_frame_buffer.cc",
+ "common_video/video_render_frames.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_synchronization__mutex",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_video__video_bitrate_allocator",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_media__rtc_h264_profile_id",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_g711__audio_encoder_g711",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/g711/audio_encoder_g711.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__g711",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc2__adaptive_digital",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/agc2/adaptive_agc.cc",
+ "modules/audio_processing/agc2/adaptive_digital_gain_applier.cc",
+ "modules/audio_processing/agc2/adaptive_mode_level_estimator.cc",
+ "modules/audio_processing/agc2/saturation_protector.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_agc2__common",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_agc2__gain_applier",
+ "webrtc_common_audio__common_audio",
+ "webrtc_agc2__rnn_vad_with_level",
+ "webrtc_audio_processing__apm_logging",
+ "webrtc_audio_processing__api",
+ "webrtc_agc2__noise_level_estimator",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_L16__audio_encoder_L16",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/L16/audio_encoder_L16.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__pcm16b",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__audio_frame_proxies",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/include/audio_frame_proxies.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio__audio_frame_api",
+ "webrtc_audio_processing__api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_ilbc__audio_decoder_ilbc",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/ilbc/audio_decoder_ilbc.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__ilbc",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_g711__audio_decoder_g711",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/g711/audio_decoder_g711.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__g711",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__optionally_built_submodule_creators",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/optionally_built_submodule_creators.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_transient__transient_suppressor_impl"],
+}
+
+cc_library_static {
+ name: "webrtc_video__video_frame_i010",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video/i010_buffer.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_memory__aligned_malloc",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_video__video_frame",
+ "webrtc_video__video_frame_i420",
+ ],
+}
+
+filegroup {
+ name: "webrtc_video__video_frame_metadata",
+ srcs: ["api/video/video_frame_metadata.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_L16__audio_decoder_L16",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/L16/audio_decoder_L16.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__pcm16b",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_codecs__video_codecs_api",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/video_codecs/sdp_video_format.cc",
+ "api/video_codecs/video_codec.cc",
+ "api/video_codecs/video_decoder.cc",
+ "api/video_codecs/video_decoder_factory.cc",
+ "api/video_codecs/video_encoder.cc",
+ "api/video_codecs/video_encoder_config.cc",
+ "api/video_codecs/vp8_frame_config.cc",
+ "api/video_codecs/vp8_temporal_layers.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_units__data_rate",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__audio_network_adaptor",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.cc",
+ "modules/audio_coding/audio_network_adaptor/bitrate_controller.cc",
+ "modules/audio_coding/audio_network_adaptor/channel_controller.cc",
+ "modules/audio_coding/audio_network_adaptor/controller.cc",
+ "modules/audio_coding/audio_network_adaptor/controller_manager.cc",
+ "modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc",
+ "modules/audio_coding/audio_network_adaptor/dtx_controller.cc",
+ "modules/audio_coding/audio_network_adaptor/event_log_writer.cc",
+ "modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.cc",
+ "modules/audio_coding/audio_network_adaptor/frame_length_controller.cc",
+ "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio_coding__audio_network_adaptor_config",
+ "webrtc_rtc_base__checks",
+ "webrtc_system__file_wrapper",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_common_audio__common_audio",
+ "webrtc_logging__rtc_event_audio",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc__level_estimation",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/agc/agc.cc",
+ "modules/audio_processing/agc/loudness_histogram.cc",
+ "modules/audio_processing/agc/utility.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_vad__vad",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_api__media_stream_interface",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/media_stream_interface.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio_processing__audio_processing_statistics",
+ "webrtc_rtc_base__checks",
+ "webrtc_api__rtp_parameters",
+ "webrtc_api__audio_options_api",
+ "webrtc_video__video_frame",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_mixer__audio_frame_manipulator",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_mixer/audio_frame_manipulator.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio__audio_frame_api",
+ "webrtc_utility__audio_frame_operations",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__quality_scaling_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/quality_scaling_experiment.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video_codecs__video_codecs_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__webrtc_opus",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/codecs/opus/audio_decoder_opus.cc",
+ "modules/audio_coding/codecs/opus/audio_encoder_opus.cc",
+ ],
+ host_supported: true,
+ cflags: [
+ "-DWEBRTC_CODEC_ILBC",
+ "-DWEBRTC_CODEC_OPUS",
+ "-DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1",
+ "-DWEBRTC_CODEC_ISAC",
+ ],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_opus__audio_encoder_opus_config",
+ "webrtc_audio_coding__webrtc_opus_wrapper",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__audio_coding_opus_common",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio_coding__audio_network_adaptor",
+ ],
+}
+
+filegroup {
+ name: "webrtc_audio_processing__aec_dump_interface",
+ srcs: ["modules/audio_processing/include/aec_dump.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__voice_detection",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/voice_detection.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_audio__audio_frame_api",
+ "webrtc_audio_processing__api",
+ "webrtc_audio_processing__audio_buffer",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_media__rtc_vp9_profile",
+ defaults: ["webrtc_defaults"],
+ srcs: ["media/base/vp9_profile.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video_codecs__video_codecs_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_aec3__aec3",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/aec3/adaptive_fir_filter.cc",
+ "modules/audio_processing/aec3/adaptive_fir_filter_erl.cc",
+ "modules/audio_processing/aec3/aec3_common.cc",
+ "modules/audio_processing/aec3/aec3_fft.cc",
+ "modules/audio_processing/aec3/aec_state.cc",
+ "modules/audio_processing/aec3/alignment_mixer.cc",
+ "modules/audio_processing/aec3/api_call_jitter_metrics.cc",
+ "modules/audio_processing/aec3/block_buffer.cc",
+ "modules/audio_processing/aec3/block_delay_buffer.cc",
+ "modules/audio_processing/aec3/block_framer.cc",
+ "modules/audio_processing/aec3/block_processor.cc",
+ "modules/audio_processing/aec3/block_processor_metrics.cc",
+ "modules/audio_processing/aec3/clockdrift_detector.cc",
+ "modules/audio_processing/aec3/coarse_filter_update_gain.cc",
+ "modules/audio_processing/aec3/comfort_noise_generator.cc",
+ "modules/audio_processing/aec3/decimator.cc",
+ "modules/audio_processing/aec3/dominant_nearend_detector.cc",
+ "modules/audio_processing/aec3/downsampled_render_buffer.cc",
+ "modules/audio_processing/aec3/echo_audibility.cc",
+ "modules/audio_processing/aec3/echo_canceller3.cc",
+ "modules/audio_processing/aec3/echo_path_delay_estimator.cc",
+ "modules/audio_processing/aec3/echo_path_variability.cc",
+ "modules/audio_processing/aec3/echo_remover.cc",
+ "modules/audio_processing/aec3/echo_remover_metrics.cc",
+ "modules/audio_processing/aec3/erl_estimator.cc",
+ "modules/audio_processing/aec3/erle_estimator.cc",
+ "modules/audio_processing/aec3/fft_buffer.cc",
+ "modules/audio_processing/aec3/filter_analyzer.cc",
+ "modules/audio_processing/aec3/frame_blocker.cc",
+ "modules/audio_processing/aec3/fullband_erle_estimator.cc",
+ "modules/audio_processing/aec3/matched_filter.cc",
+ "modules/audio_processing/aec3/matched_filter_lag_aggregator.cc",
+ "modules/audio_processing/aec3/moving_average.cc",
+ "modules/audio_processing/aec3/refined_filter_update_gain.cc",
+ "modules/audio_processing/aec3/render_buffer.cc",
+ "modules/audio_processing/aec3/render_delay_buffer.cc",
+ "modules/audio_processing/aec3/render_delay_controller.cc",
+ "modules/audio_processing/aec3/render_delay_controller_metrics.cc",
+ "modules/audio_processing/aec3/render_signal_analyzer.cc",
+ "modules/audio_processing/aec3/residual_echo_estimator.cc",
+ "modules/audio_processing/aec3/reverb_decay_estimator.cc",
+ "modules/audio_processing/aec3/reverb_frequency_response.cc",
+ "modules/audio_processing/aec3/reverb_model.cc",
+ "modules/audio_processing/aec3/reverb_model_estimator.cc",
+ "modules/audio_processing/aec3/signal_dependent_erle_estimator.cc",
+ "modules/audio_processing/aec3/spectrum_buffer.cc",
+ "modules/audio_processing/aec3/stationarity_estimator.cc",
+ "modules/audio_processing/aec3/subband_erle_estimator.cc",
+ "modules/audio_processing/aec3/subband_nearend_detector.cc",
+ "modules/audio_processing/aec3/subtractor.cc",
+ "modules/audio_processing/aec3/subtractor_output.cc",
+ "modules/audio_processing/aec3/subtractor_output_analyzer.cc",
+ "modules/audio_processing/aec3/suppression_filter.cc",
+ "modules/audio_processing/aec3/suppression_gain.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_ooura__fft_size_128",
+ "webrtc_rtc_base__checks",
+ "webrtc_utility__cascaded_biquad_filter",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio__aec3_config",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_audio_processing__apm_logging",
+ "webrtc_audio_processing__audio_buffer",
+ "webrtc_audio_processing__high_pass_filter",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_opus__audio_decoder_opus",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/opus/audio_decoder_opus.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__webrtc_opus",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__rate_control_settings",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/rate_control_settings.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_units__data_size",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video_codecs__video_codecs_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__video_codec_interface",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/include/video_codec_interface.cc",
+ "modules/video_coding/video_coding_defines.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_generic_frame_descriptor__generic_frame_descriptor",
+ "webrtc_modules__module_api",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_video__video_frame",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_av1__libaom_av1_encoder",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/codecs/av1/libaom_av1_encoder.cc",
+ ":webrtc_av1__scalable_video_controller",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__logging",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_device__audio_device_impl",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_device/dummy/audio_device_dummy.cc",
+ "modules/audio_device/dummy/file_audio_device.cc",
+ "modules/audio_device/include/test_audio_device.cc",
+ "modules/audio_device/audio_device_data_observer.cc",
+ "modules/audio_device/audio_device_impl.cc",
+ "modules/audio_device/linux/alsasymboltable_linux.cc",
+ "modules/audio_device/linux/audio_device_alsa_linux.cc",
+ "modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc",
+ "modules/audio_device/linux/latebindingsymboltable_linux.cc",
+ "modules/audio_device/linux/audio_device_pulse_linux.cc",
+ "modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc",
+ "modules/audio_device/linux/pulseaudiosymboltable_linux.cc",
+ "modules/audio_device/dummy/file_audio_device_factory.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_DUMMY_FILE_DEVICES"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_system__file_wrapper",
+ "webrtc_synchronization__mutex",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio_device__audio_device_buffer",
+ "webrtc_utility__utility",
+ "webrtc_audio_device__audio_device_generic",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_av1__libaom_av1_decoder",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/video_coding/codecs/av1/libaom_av1_decoder.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__logging",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__neteq",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/neteq/accelerate.cc",
+ "modules/audio_coding/neteq/audio_multi_vector.cc",
+ "modules/audio_coding/neteq/audio_vector.cc",
+ "modules/audio_coding/neteq/background_noise.cc",
+ "modules/audio_coding/neteq/buffer_level_filter.cc",
+ "modules/audio_coding/neteq/comfort_noise.cc",
+ "modules/audio_coding/neteq/cross_correlation.cc",
+ "modules/audio_coding/neteq/decision_logic.cc",
+ "modules/audio_coding/neteq/decoder_database.cc",
+ "modules/audio_coding/neteq/delay_manager.cc",
+ "modules/audio_coding/neteq/dsp_helper.cc",
+ "modules/audio_coding/neteq/dtmf_buffer.cc",
+ "modules/audio_coding/neteq/dtmf_tone_generator.cc",
+ "modules/audio_coding/neteq/expand.cc",
+ "modules/audio_coding/neteq/expand_uma_logger.cc",
+ "modules/audio_coding/neteq/histogram.cc",
+ "modules/audio_coding/neteq/merge.cc",
+ "modules/audio_coding/neteq/nack_tracker.cc",
+ "modules/audio_coding/neteq/neteq_impl.cc",
+ "modules/audio_coding/neteq/normal.cc",
+ "modules/audio_coding/neteq/packet.cc",
+ "modules/audio_coding/neteq/packet_buffer.cc",
+ "modules/audio_coding/neteq/post_decode_vad.cc",
+ "modules/audio_coding/neteq/preemptive_expand.cc",
+ "modules/audio_coding/neteq/random_vector.cc",
+ "modules/audio_coding/neteq/red_payload_splitter.cc",
+ "modules/audio_coding/neteq/statistics_calculator.cc",
+ "modules/audio_coding/neteq/sync_buffer.cc",
+ "modules/audio_coding/neteq/time_stretch.cc",
+ "modules/audio_coding/neteq/timestamp_scaler.cc",
+ ":webrtc_neteq__tick_timer",
+ ":webrtc_neteq__neteq_api",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_synchronization__mutex",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_rtc_base__audio_format_to_string",
+ "webrtc_api__rtp_headers",
+ "webrtc_api__rtp_packet_info",
+ "webrtc_audio_coding__webrtc_cng",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio__audio_frame_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_goog_cc__pushback_controller",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_units__data_size",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__rate_control_settings",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_processing__video_processing",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_processing/util/denoiser_filter.cc",
+ "modules/video_processing/util/denoiser_filter_c.cc",
+ "modules/video_processing/util/noise_estimation.cc",
+ "modules/video_processing/util/skin_detection.cc",
+ "modules/video_processing/video_denoiser.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_video_processing__video_processing_sse2",
+ "webrtc_common_audio__common_audio",
+ "webrtc_video__video_frame",
+ "webrtc_utility__utility",
+ "webrtc_video__video_frame_i420",
+ "webrtc_common_video__common_video",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtp_rtcp__rtp_rtcp_format",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/rtp_rtcp/include/report_block_data.cc",
+ "modules/rtp_rtcp/include/rtp_rtcp_defines.cc",
+ "modules/rtp_rtcp/source/rtcp_packet.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/app.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/bye.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/common_header.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/dlrr.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/extended_reports.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/fir.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/loss_notification.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/nack.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/pli.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/psfb.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/remb.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/report_block.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/rrtr.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/sdes.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/sender_report.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc",
+ "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc",
+ "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc",
+ "modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc",
+ "modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc",
+ "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc",
+ "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc",
+ "modules/rtp_rtcp/source/rtp_header_extension_map.cc",
+ "modules/rtp_rtcp/source/rtp_header_extensions.cc",
+ "modules/rtp_rtcp/source/rtp_packet.cc",
+ "modules/rtp_rtcp/source/rtp_packet_received.cc",
+ "modules/rtp_rtcp/source/rtp_packet_to_send.cc",
+ ":webrtc_rtp__dependency_descriptor",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_api__rtp_parameters",
+ "webrtc_units__time_delta",
+ "webrtc_transport__network_control",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_api__rtp_headers",
+ "webrtc_video__video_frame",
+ "webrtc_common_video__common_video",
+ ],
+}
+
+filegroup {
+ name: "webrtc_neteq__default_neteq_controller_factory",
+ srcs: ["api/neteq/default_neteq_controller_factory.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__balanced_degradation_settings",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/balanced_degradation_settings.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video_codecs__video_codecs_api",
+ ],
+}
+
+filegroup {
+ name: "webrtc_audio_coding__default_neteq_factory",
+ srcs: ["modules/audio_coding/neteq/default_neteq_factory.cc"],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__encoded_frame",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/video_coding/encoded_frame.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_experiments__alr_experiment",
+ "webrtc_experiments__rtt_mult_experiment",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ "webrtc_rtp_rtcp__rtp_video_header",
+ "webrtc_video_coding__video_codec_interface",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_experiments__stable_target_rate_experiment",
+ defaults: ["webrtc_defaults"],
+ srcs: ["rtc_base/experiments/stable_target_rate_experiment.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_experiments__rate_control_settings",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_mixer__audio_mixer_impl",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_mixer/audio_mixer_impl.cc",
+ "modules/audio_mixer/default_output_rate_calculator.cc",
+ "modules/audio_mixer/frame_combiner.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio__audio_frame_api",
+ "webrtc_audio_processing__apm_logging",
+ "webrtc_agc2__fixed_digital",
+ "webrtc_utility__audio_frame_operations",
+ "webrtc_audio_processing__api",
+ "webrtc_audio_mixer__audio_frame_manipulator",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc2__level_estimation_agc",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc"],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_agc2__common",
+ "webrtc_agc2__gain_applier",
+ "webrtc_common_audio__common_audio",
+ "webrtc_agc2__rnn_vad_with_level",
+ "webrtc_audio_processing__apm_logging",
+ "webrtc_audio_processing__api",
+ "webrtc_agc2__noise_level_estimator",
+ "webrtc_vad__vad",
+ "webrtc_agc2__adaptive_digital",
+ "webrtc_agc__level_estimation",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/remote_bitrate_estimator/aimd_rate_control.cc",
+ "modules/remote_bitrate_estimator/bwe_defines.cc",
+ "modules/remote_bitrate_estimator/inter_arrival.cc",
+ "modules/remote_bitrate_estimator/overuse_detector.cc",
+ "modules/remote_bitrate_estimator/overuse_estimator.cc",
+ "modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc",
+ "modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc",
+ "modules/remote_bitrate_estimator/remote_estimator_proxy.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_synchronization__mutex",
+ "webrtc_units__timestamp",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_goog_cc__link_capacity_estimator",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_api__rtp_headers",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_agc__agc",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/audio_processing/agc/agc_manager_direct.cc"],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__logging",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio_processing__apm_logging",
+ "webrtc_audio_processing__audio_buffer",
+ "webrtc_vad__vad",
+ "webrtc_agc__level_estimation",
+ "webrtc_agc2__level_estimation_agc",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_opus__audio_encoder_opus",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/opus/audio_encoder_opus.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_opus__audio_encoder_opus_config",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_audio_coding__webrtc_opus",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtp__transport_feedback",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/congestion_controller/rtp/transport_feedback_adapter.cc",
+ "modules/congestion_controller/rtp/transport_feedback_demuxer.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_network__sent_packet",
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__mutex",
+ "webrtc_units__data_size",
+ "webrtc_units__timestamp",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_transport__network_control",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_codecs__builtin_audio_decoder_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/builtin_audio_decoder_factory.cc"],
+ host_supported: true,
+ cflags: [
+ "-DWEBRTC_USE_BUILTIN_ILBC=1",
+ "-DWEBRTC_USE_BUILTIN_OPUS=1",
+ "-DWEBRTC_USE_BUILTIN_ISAC_FIX=0",
+ "-DWEBRTC_USE_BUILTIN_ISAC_FLOAT=1",
+ ],
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_opus__audio_decoder_multiopus",
+ "webrtc_g722__audio_decoder_g722",
+ "webrtc_ilbc__audio_decoder_ilbc",
+ "webrtc_g711__audio_decoder_g711",
+ "webrtc_L16__audio_decoder_L16",
+ "webrtc_opus__audio_decoder_opus",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_coding__audio_coding",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_coding/acm2/acm_receiver.cc",
+ "modules/audio_coding/acm2/acm_remixing.cc",
+ "modules/audio_coding/acm2/acm_resampler.cc",
+ "modules/audio_coding/acm2/audio_coding_module.cc",
+ "modules/audio_coding/acm2/call_statistics.cc",
+ ":webrtc_neteq__neteq_api",
+ ":webrtc_audio_coding__default_neteq_factory",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_rtc_base__audio_format_to_string",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio__audio_frame_api",
+ "webrtc_audio_coding__neteq",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio__aec3_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio/echo_canceller3_factory.cc"],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio__aec3_config",
+ "webrtc_aec3__aec3",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_logging__rtc_event_rtp_rtcp",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc",
+ "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc",
+ "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc",
+ "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__webrtc_vp9_helpers",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/codecs/vp9/svc_config.cc",
+ "modules/video_coding/codecs/vp9/svc_rate_allocator.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_rtc_base__logging",
+ "webrtc_video__video_bitrate_allocator",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_experiments__stable_target_rate_experiment",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_aec_dump__null_aec_dump_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/aec_dump/null_aec_dump_factory.cc",
+ ":webrtc_audio_processing__aec_dump_interface",
+ ],
+ host_supported: true,
+}
+
+cc_library_static {
+ name: "webrtc_video__encoded_frame",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video/encoded_frame.cc"],
+ host_supported: true,
+ static_libs: ["webrtc_video_coding__encoded_frame"],
+}
+
+cc_library_static {
+ name: "webrtc_logging__rtc_event_bwe",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc",
+ "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc",
+ "logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc",
+ "logging/rtc_event_log/events/rtc_event_probe_result_failure.cc",
+ "logging/rtc_event_log/events/rtc_event_probe_result_success.cc",
+ "logging/rtc_event_log/events/rtc_event_route_change.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_units__data_rate",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_codecs__builtin_audio_encoder_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/audio_codecs/builtin_audio_encoder_factory.cc"],
+ host_supported: true,
+ cflags: [
+ "-DWEBRTC_USE_BUILTIN_ILBC=1",
+ "-DWEBRTC_USE_BUILTIN_OPUS=1",
+ "-DWEBRTC_USE_BUILTIN_ISAC_FIX=0",
+ "-DWEBRTC_USE_BUILTIN_ISAC_FLOAT=1",
+ ],
+ static_libs: [
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_opus__audio_encoder_multiopus",
+ "webrtc_g722__audio_encoder_g722",
+ "webrtc_ilbc__audio_encoder_ilbc",
+ "webrtc_g711__audio_encoder_g711",
+ "webrtc_L16__audio_encoder_L16",
+ "webrtc_opus__audio_encoder_opus",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio_processing__audio_processing",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/audio_processing/audio_processing_builder_impl.cc",
+ "modules/audio_processing/audio_processing_impl.cc",
+ "modules/audio_processing/echo_control_mobile_impl.cc",
+ "modules/audio_processing/echo_detector/circular_buffer.cc",
+ "modules/audio_processing/echo_detector/mean_variance_estimator.cc",
+ "modules/audio_processing/echo_detector/moving_max.cc",
+ "modules/audio_processing/echo_detector/normalized_covariance_estimator.cc",
+ "modules/audio_processing/gain_control_impl.cc",
+ "modules/audio_processing/gain_controller2.cc",
+ "modules/audio_processing/level_estimator.cc",
+ "modules/audio_processing/residual_echo_detector.cc",
+ "modules/audio_processing/typing_detection.cc",
+ ":webrtc_audio_processing__rms_level",
+ ":webrtc_audio_processing__aec_dump_interface",
+ ],
+ host_supported: true,
+ cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"],
+ static_libs: [
+ "webrtc_ooura__fft_size_256",
+ "webrtc_audio_processing__audio_processing_statistics",
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__mutex",
+ "webrtc_audio_processing__config",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_audio__aec3_config",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_aecm__aecm_core",
+ "webrtc_agc2__gain_applier",
+ "webrtc_common_audio__common_audio",
+ "webrtc_audio__audio_frame_api",
+ "webrtc_audio_processing__apm_logging",
+ "webrtc_common_audio__fir_filter_factory",
+ "webrtc_agc2__fixed_digital",
+ "webrtc_agc__legacy_agc",
+ "webrtc_utility__audio_frame_operations",
+ "webrtc_audio_processing__api",
+ "webrtc_audio_processing__audio_buffer",
+ "webrtc_vad__vad",
+ "webrtc_audio_processing__high_pass_filter",
+ "webrtc_ns__ns",
+ "webrtc_agc2__adaptive_digital",
+ "webrtc_audio_processing__audio_frame_proxies",
+ "webrtc_audio_processing__optionally_built_submodule_creators",
+ "webrtc_audio_processing__voice_detection",
+ "webrtc_aec3__aec3",
+ "webrtc_agc__agc",
+ "webrtc_aec_dump__null_aec_dump_factory",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_goog_cc__probe_controller",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/congestion_controller/goog_cc/probe_controller.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__logging",
+ "webrtc_units__data_rate",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_logging__rtc_event_pacing",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_logging__rtc_event_bwe",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_goog_cc__loss_based_controller",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc",
+ "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__logging",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_logging__rtc_event_bwe",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_goog_cc__estimators",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc",
+ "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc",
+ "modules/congestion_controller/goog_cc/bitrate_estimator.cc",
+ "modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc",
+ "modules/congestion_controller/goog_cc/robust_throughput_estimator.cc",
+ "modules/congestion_controller/goog_cc/trendline_estimator.cc",
+ ],
+ host_supported: true,
+ cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__logging",
+ "webrtc_units__data_rate",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_logging__rtc_event_bwe",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__rtp_interfaces",
+ defaults: ["webrtc_defaults"],
+ srcs: ["call/rtp_config.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_transport__bitrate_settings",
+ "webrtc_rtc_base__checks",
+ "webrtc_api__rtp_parameters",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_api__rtp_headers",
+ "webrtc_crypto__options",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__video_stream_api",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "call/video_receive_stream.cc",
+ "call/video_send_stream.cc",
+ ":webrtc_adaptation__resource_adaptation_api",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_api__transport_api",
+ "webrtc_rtc_base__checks",
+ "webrtc_api__rtp_parameters",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_api__rtp_headers",
+ "webrtc_crypto__options",
+ "webrtc_video__video_frame",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_call__rtp_interfaces",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__call_interfaces",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "call/audio_receive_stream.cc",
+ "call/audio_state.cc",
+ "call/call_config.cc",
+ "call/flexfec_receive_stream.cc",
+ "call/syncable.cc",
+ "call/audio_send_stream.cc",
+ ":webrtc_adaptation__resource_adaptation_api",
+ ":webrtc_neteq__neteq_api",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_network__sent_packet",
+ "webrtc_api__transport_api",
+ "webrtc_audio_processing__audio_processing_statistics",
+ "webrtc_transport__bitrate_settings",
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_api__rtp_parameters",
+ "webrtc_transport__network_control",
+ "webrtc_api__rtc_error",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_rtc_base__audio_format_to_string",
+ "webrtc_api__rtp_headers",
+ "webrtc_crypto__options",
+ "webrtc_utility__utility",
+ "webrtc_audio_processing__api",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_audio_processing__audio_processing",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_call__video_stream_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_media__rtc_media_base",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "media/base/adapted_video_track_source.cc",
+ "media/base/codec.cc",
+ "media/base/media_channel.cc",
+ "media/base/media_constants.cc",
+ "media/base/media_engine.cc",
+ "media/base/rid_description.cc",
+ "media/base/rtp_data_engine.cc",
+ "media/base/rtp_utils.cc",
+ "media/base/stream_params.cc",
+ "media/base/turn_utils.cc",
+ "media/base/video_adapter.cc",
+ "media/base/video_broadcaster.cc",
+ "media/base/video_common.cc",
+ "media/base/video_source_base.cc",
+ ":webrtc_transport__stun_types",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_sigslot__sigslot",
+ "webrtc_audio_processing__audio_processing_statistics",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_system__file_wrapper",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_api__rtp_parameters",
+ "webrtc_api__audio_options_api",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_api__rtc_error",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_crypto__options",
+ "webrtc_media__rtc_h264_profile_id",
+ "webrtc_video__video_frame",
+ "webrtc_video__video_frame_i420",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_api__media_stream_interface",
+ "webrtc_media__rtc_vp9_profile",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_call__video_stream_api",
+ "webrtc_call__call_interfaces",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_capture__video_capture_module",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_capture/device_info_impl.cc",
+ "modules/video_capture/video_capture_factory.cc",
+ "modules/video_capture/video_capture_impl.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_modules__module_api",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__rw_lock_wrapper",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_video__video_frame",
+ "webrtc_video__video_frame_i420",
+ "webrtc_common_video__common_video",
+ "webrtc_media__rtc_media_base",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__fake_network",
+ defaults: ["webrtc_defaults"],
+ srcs: ["call/fake_network_pipe.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_api__transport_api",
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_api__rtp_parameters",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_call__simulated_network",
+ "webrtc_utility__utility",
+ "webrtc_call__call_interfaces",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_api__libjingle_peerconnection_api",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/candidate.cc",
+ "api/data_channel_interface.cc",
+ "api/dtls_transport_interface.cc",
+ "api/jsep.cc",
+ "api/jsep_ice_candidate.cc",
+ "api/peer_connection_interface.cc",
+ "api/proxy.cc",
+ "api/rtp_receiver_interface.cc",
+ "api/rtp_sender_interface.cc",
+ "api/rtp_transceiver_interface.cc",
+ "api/sctp_transport_interface.cc",
+ "api/stats_types.cc",
+ ":webrtc_adaptation__resource_adaptation_api",
+ ":webrtc_neteq__neteq_api",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio_processing__audio_processing_statistics",
+ "webrtc_transport__bitrate_settings",
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_api__rtp_parameters",
+ "webrtc_api__audio_options_api",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_units__data_rate",
+ "webrtc_transport__network_control",
+ "webrtc_api__rtc_error",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_api__rtp_packet_info",
+ "webrtc_crypto__options",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_api__media_stream_interface",
+ "webrtc_media__rtc_media_base",
+ ],
+}
+
+filegroup {
+ name: "webrtc_video_capture__video_capture_internal_impl",
+ srcs: [
+ "modules/video_capture/linux/device_info_linux.cc",
+ "modules/video_capture/linux/video_capture_linux.cc",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_logging__ice_log",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc",
+ "logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc",
+ "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc",
+ "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc",
+ "logging/rtc_event_log/ice_logger.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_api__libjingle_peerconnection_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__bitrate_configurator",
+ defaults: ["webrtc_defaults"],
+ srcs: ["call/rtp_bitrate_configurator.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_transport__bitrate_settings",
+ "webrtc_rtc_base__checks",
+ "webrtc_units__data_rate",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_api__libjingle_peerconnection_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtp_rtcp__rtp_rtcp",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/rtp_rtcp/source/absolute_capture_time_receiver.cc",
+ "modules/rtp_rtcp/source/absolute_capture_time_sender.cc",
+ "modules/rtp_rtcp/source/active_decode_targets_helper.cc",
+ "modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc",
+ "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc",
+ "modules/rtp_rtcp/source/dtmf_queue.cc",
+ "modules/rtp_rtcp/source/fec_private_tables_bursty.cc",
+ "modules/rtp_rtcp/source/fec_private_tables_random.cc",
+ "modules/rtp_rtcp/source/flexfec_header_reader_writer.cc",
+ "modules/rtp_rtcp/source/flexfec_receiver.cc",
+ "modules/rtp_rtcp/source/flexfec_sender.cc",
+ "modules/rtp_rtcp/source/forward_error_correction.cc",
+ "modules/rtp_rtcp/source/forward_error_correction_internal.cc",
+ "modules/rtp_rtcp/source/packet_loss_stats.cc",
+ "modules/rtp_rtcp/source/receive_statistics_impl.cc",
+ "modules/rtp_rtcp/source/remote_ntp_time_estimator.cc",
+ "modules/rtp_rtcp/source/rtcp_nack_stats.cc",
+ "modules/rtp_rtcp/source/rtcp_receiver.cc",
+ "modules/rtp_rtcp/source/rtcp_sender.cc",
+ "modules/rtp_rtcp/source/rtp_descriptor_authentication.cc",
+ "modules/rtp_rtcp/source/rtp_format.cc",
+ "modules/rtp_rtcp/source/rtp_format_h264.cc",
+ "modules/rtp_rtcp/source/rtp_format_video_generic.cc",
+ "modules/rtp_rtcp/source/rtp_format_vp8.cc",
+ "modules/rtp_rtcp/source/rtp_format_vp9.cc",
+ "modules/rtp_rtcp/source/rtp_header_extension_size.cc",
+ "modules/rtp_rtcp/source/rtp_packet_history.cc",
+ "modules/rtp_rtcp/source/rtp_packetizer_av1.cc",
+ "modules/rtp_rtcp/source/rtp_rtcp_impl.cc",
+ "modules/rtp_rtcp/source/rtp_rtcp_impl2.cc",
+ "modules/rtp_rtcp/source/rtp_sender.cc",
+ "modules/rtp_rtcp/source/rtp_sender_audio.cc",
+ "modules/rtp_rtcp/source/rtp_sender_egress.cc",
+ "modules/rtp_rtcp/source/rtp_sender_video.cc",
+ "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc",
+ "modules/rtp_rtcp/source/rtp_sequence_number_map.cc",
+ "modules/rtp_rtcp/source/rtp_utility.cc",
+ "modules/rtp_rtcp/source/source_tracker.cc",
+ "modules/rtp_rtcp/source/time_util.cc",
+ "modules/rtp_rtcp/source/tmmbr_help.cc",
+ "modules/rtp_rtcp/source/ulpfec_generator.cc",
+ "modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc",
+ "modules/rtp_rtcp/source/ulpfec_receiver_impl.cc",
+ "modules/rtp_rtcp/source/video_rtp_depacketizer.cc",
+ "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc",
+ "modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc",
+ "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc",
+ "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc",
+ "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc",
+ "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc",
+ ":webrtc_rtp__dependency_descriptor",
+ ],
+ host_supported: true,
+ cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"],
+ static_libs: [
+ "webrtc_api__transport_api",
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_task_queue__task_queue",
+ "webrtc_synchronization__mutex",
+ "webrtc_time__timestamp_extrapolator",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_api__rtp_parameters",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_task_utils__pending_task_safety_flag",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_units__data_rate",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_video__video_bitrate_allocator",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_api__rtp_headers",
+ "webrtc_rtc_base__rate_limiter",
+ "webrtc_api__rtp_packet_info",
+ "webrtc_video__video_frame",
+ "webrtc_logging__rtc_event_audio",
+ "webrtc_video__encoded_image",
+ "webrtc_rtp_rtcp__rtp_video_header",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_logging__rtc_event_rtp_rtcp",
+ "webrtc_video__encoded_frame",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_api__libjingle_peerconnection_api",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__rtp_receiver",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "call/rtp_demuxer.cc",
+ "call/rtp_stream_receiver_controller.cc",
+ "call/rtx_receive_stream.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_api__rtp_headers",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_p2p__rtc_p2p",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "p2p/base/async_stun_tcp_socket.cc",
+ "p2p/base/basic_async_resolver_factory.cc",
+ "p2p/base/basic_ice_controller.cc",
+ "p2p/base/basic_packet_socket_factory.cc",
+ "p2p/base/connection.cc",
+ "p2p/base/connection_info.cc",
+ "p2p/base/default_ice_transport_factory.cc",
+ "p2p/base/dtls_transport.cc",
+ "p2p/base/dtls_transport_internal.cc",
+ "p2p/base/ice_controller_interface.cc",
+ "p2p/base/ice_credentials_iterator.cc",
+ "p2p/base/ice_transport_internal.cc",
+ "p2p/base/mdns_message.cc",
+ "p2p/base/p2p_constants.cc",
+ "p2p/base/p2p_transport_channel.cc",
+ "p2p/base/packet_transport_internal.cc",
+ "p2p/base/port.cc",
+ "p2p/base/port_allocator.cc",
+ "p2p/base/port_interface.cc",
+ "p2p/base/pseudo_tcp.cc",
+ "p2p/base/regathering_controller.cc",
+ "p2p/base/stun_port.cc",
+ "p2p/base/stun_request.cc",
+ "p2p/base/tcp_port.cc",
+ "p2p/base/transport_description.cc",
+ "p2p/base/transport_description_factory.cc",
+ "p2p/base/turn_port.cc",
+ "p2p/client/basic_port_allocator.cc",
+ "p2p/client/turn_port_factory.cc",
+ ":webrtc_transport__stun_types",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_sigslot__sigslot",
+ "webrtc_network__sent_packet",
+ "webrtc_base64__base64",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__weak_ptr",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_api__rtc_error",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_memory__fifo_buffer",
+ "webrtc_crypto__options",
+ "webrtc_api__libjingle_peerconnection_api",
+ "webrtc_logging__ice_log",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_pacing__pacing",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/pacing/bitrate_prober.cc",
+ "modules/pacing/paced_sender.cc",
+ "modules/pacing/pacing_controller.cc",
+ "modules/pacing/packet_router.cc",
+ "modules/pacing/round_robin_packet_queue.cc",
+ "modules/pacing/task_queue_paced_sender.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_task_queue__task_queue",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_units__data_size",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_units__data_rate",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_logging__rtc_event_pacing",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_pacing__interval_budget",
+ "webrtc_utility__utility",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_logging__rtc_event_bwe",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_media__rtc_data",
+ defaults: ["webrtc_defaults"],
+ srcs: ["media/sctp/sctp_transport.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_sigslot__sigslot",
+ "webrtc_api__transport_api",
+ "webrtc_synchronization__mutex",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_media__rtc_media_base",
+ "webrtc_p2p__rtc_p2p",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_goog_cc__delay_based_bwe",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/congestion_controller/goog_cc/delay_based_bwe.cc"],
+ host_supported: true,
+ cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_logging__rtc_event_bwe",
+ "webrtc_goog_cc__estimators",
+ "webrtc_pacing__pacing",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__video_coding_utility",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/utility/decoded_frames_history.cc",
+ "modules/video_coding/utility/frame_dropper.cc",
+ "modules/video_coding/utility/framerate_controller.cc",
+ "modules/video_coding/utility/ivf_file_reader.cc",
+ "modules/video_coding/utility/ivf_file_writer.cc",
+ "modules/video_coding/utility/quality_scaler.cc",
+ "modules/video_coding/utility/simulcast_rate_allocator.cc",
+ "modules/video_coding/utility/simulcast_utility.cc",
+ "modules/video_coding/utility/vp8_header_parser.cc",
+ "modules/video_coding/utility/vp9_uncompressed_header_parser.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_system__file_wrapper",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_video__video_adaptation",
+ "webrtc_rtc_base__weak_ptr",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_video__video_bitrate_allocator",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_experiments__quality_scaler_settings",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_experiments__quality_scaling_experiment",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_experiments__stable_target_rate_experiment",
+ "webrtc_video__encoded_frame",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_audio__audio",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "audio/audio_level.cc",
+ "audio/audio_receive_stream.cc",
+ "audio/audio_send_stream.cc",
+ "audio/audio_state.cc",
+ "audio/audio_transport_impl.cc",
+ "audio/channel_receive.cc",
+ "audio/channel_receive_frame_transformer_delegate.cc",
+ "audio/channel_send.cc",
+ "audio/channel_send_frame_transformer_delegate.cc",
+ "audio/null_audio_poller.cc",
+ "audio/remix_resample.cc",
+ ":webrtc_audio_processing__rms_level",
+ ":webrtc_neteq__neteq_api",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_audio_coding__audio_network_adaptor_config",
+ "webrtc_api__transport_api",
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_api__rtp_parameters",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_rtc_base__audio_format_to_string",
+ "webrtc_api__rtp_headers",
+ "webrtc_rtc_base__rate_limiter",
+ "webrtc_logging__rtc_stream_config",
+ "webrtc_crypto__options",
+ "webrtc_common_audio__common_audio",
+ "webrtc_call__bitrate_allocator",
+ "webrtc_audio__audio_frame_api",
+ "webrtc_utility__utility",
+ "webrtc_audio_coding__audio_encoder_cng",
+ "webrtc_logging__rtc_event_audio",
+ "webrtc_audio_coding__red",
+ "webrtc_utility__audio_frame_operations",
+ "webrtc_audio_processing__api",
+ "webrtc_audio_processing__audio_frame_proxies",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_audio_coding__audio_coding",
+ "webrtc_audio__aec3_factory",
+ "webrtc_audio_processing__audio_processing",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_call__call_interfaces",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ "webrtc_pacing__pacing",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_api__ice_transport_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/ice_transport_factory.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_api__libjingle_peerconnection_api",
+ "webrtc_p2p__rtc_p2p",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_adaptation__resource_adaptation",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "call/adaptation/adaptation_constraint.cc",
+ "call/adaptation/adaptation_listener.cc",
+ "call/adaptation/broadcast_resource_listener.cc",
+ "call/adaptation/degradation_preference_provider.cc",
+ "call/adaptation/encoder_settings.cc",
+ "call/adaptation/resource_adaptation_processor.cc",
+ "call/adaptation/resource_adaptation_processor_interface.cc",
+ "call/adaptation/video_source_restrictions.cc",
+ "call/adaptation/video_stream_adapter.cc",
+ "call/adaptation/video_stream_input_state.cc",
+ "call/adaptation/video_stream_input_state_provider.cc",
+ ":webrtc_adaptation__resource_adaptation_api",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_api__rtp_parameters",
+ "webrtc_video__video_adaptation",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_frame",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_experiments__balanced_degradation_settings",
+ "webrtc_video_coding__video_coding_utility",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_rtp__control_handler",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/congestion_controller/rtp/control_handler.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_units__data_size",
+ "webrtc_units__time_delta",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_transport__network_control",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_pacing__pacing",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__webrtc_vp8_temporal_layers",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/codecs/vp8/default_temporal_layers.cc",
+ "modules/video_coding/codecs/vp8/screenshare_layers.cc",
+ "modules/video_coding/codecs/vp8/temporal_layers_checker.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_video_coding__video_coding_utility",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_congestion_controller__congestion_controller",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/congestion_controller/receive_side_congestion_controller.cc"],
+ host_supported: true,
+ cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"],
+ static_libs: [
+ "webrtc_modules__module_api",
+ "webrtc_synchronization__mutex",
+ "webrtc_transport__network_control",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_pacing__pacing",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__webrtc_multiplex",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc",
+ "modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc",
+ "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc",
+ "modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_synchronization__mutex",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_media__rtc_media_base",
+ "webrtc_video_coding__video_coding_utility",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__builtin_video_bitrate_allocator_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video/builtin_video_bitrate_allocator_factory.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_video__video_bitrate_allocator",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__webrtc_vp9_helpers",
+ "webrtc_media__rtc_media_base",
+ "webrtc_video_coding__video_coding_utility",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_codecs__rtc_software_fallback_wrappers",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "api/video_codecs/video_decoder_software_fallback_wrapper.cc",
+ "api/video_codecs/video_encoder_software_fallback_wrapper.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_media__rtc_h264_profile_id",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_media__rtc_media_base",
+ "webrtc_video_coding__video_coding_utility",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_goog_cc__goog_cc",
+ defaults: ["webrtc_defaults"],
+ srcs: ["modules/congestion_controller/goog_cc/goog_cc_network_control.cc"],
+ host_supported: true,
+ cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"],
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_units__data_size",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__logging",
+ "webrtc_units__data_rate",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_logging__rtc_event_pacing",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_experiments__alr_experiment",
+ "webrtc_goog_cc__alr_detector",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_goog_cc__pushback_controller",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_logging__rtc_event_bwe",
+ "webrtc_goog_cc__probe_controller",
+ "webrtc_goog_cc__loss_based_controller",
+ "webrtc_goog_cc__estimators",
+ "webrtc_goog_cc__delay_based_bwe",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__webrtc_vp9",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/codecs/vp9/vp9.cc",
+ "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc",
+ "modules/video_coding/codecs/vp9/vp9_impl.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_synchronization__mutex",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_video__video_frame",
+ "webrtc_common_video__common_video",
+ "webrtc_video__video_frame_i010",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_media__rtc_vp9_profile",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_video_coding__webrtc_vp9_helpers",
+ "webrtc_media__rtc_media_base",
+ "webrtc_video_coding__video_coding_utility",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__webrtc_h264",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/codecs/h264/h264.cc",
+ "modules/video_coding/codecs/h264/h264_color_space.cc",
+ "modules/video_coding/codecs/h264/h264_decoder_impl.cc",
+ "modules/video_coding/codecs/h264/h264_encoder_impl.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_media__rtc_h264_profile_id",
+ "webrtc_video__video_frame",
+ "webrtc_video__video_frame_i420",
+ "webrtc_common_video__common_video",
+ "webrtc_video__video_frame_i010",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_media__rtc_media_base",
+ "webrtc_video_coding__video_coding_utility",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_media__rtc_simulcast_encoder_adapter",
+ defaults: ["webrtc_defaults"],
+ srcs: ["media/engine/simulcast_encoder_adapter.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_video__video_frame",
+ "webrtc_video__video_frame_i420",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_call__video_stream_api",
+ "webrtc_media__rtc_media_base",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_video_codecs__rtc_software_fallback_wrappers",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_adaptation__video_adaptation",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "video/adaptation/encode_usage_resource.cc",
+ "video/adaptation/overuse_frame_detector.cc",
+ "video/adaptation/quality_rampup_experiment_helper.cc",
+ "video/adaptation/quality_scaler_resource.cc",
+ "video/adaptation/video_stream_encoder_resource.cc",
+ "video/adaptation/video_stream_encoder_resource_manager.cc",
+ ":webrtc_adaptation__resource_adaptation_api",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_api__rtp_parameters",
+ "webrtc_video__video_adaptation",
+ "webrtc_rtc_base__timeutils",
+ "webrtc_rtc_base__rtc_event",
+ "webrtc_rtc_base__logging",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_experiments__quality_scaler_settings",
+ "webrtc_experiments__quality_rampup_experiment",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_video__video_frame",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_experiments__balanced_degradation_settings",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_adaptation__resource_adaptation",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_transport__goog_cc",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/transport/goog_cc_factory.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_transport__network_control",
+ "webrtc_goog_cc__goog_cc",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_codecs__vp8_temporal_layers_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video_codecs/vp8_temporal_layers_factory.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_video_coding__webrtc_vp8_temporal_layers",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_test__fake_video_codecs",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "test/configurable_frame_size_encoder.cc",
+ "test/fake_decoder.cc",
+ "test/fake_encoder.cc",
+ "test/fake_vp8_decoder.cc",
+ "test/fake_vp8_encoder.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__criticalsection",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_rtc_base__timeutils",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_video_codecs__vp8_temporal_layers_factory",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_media__rtc_encoder_simulcast_proxy",
+ defaults: ["webrtc_defaults"],
+ srcs: ["media/engine/encoder_simulcast_proxy.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_video__video_frame",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_media__rtc_simulcast_encoder_adapter",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_pc__rtc_pc_base",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "pc/channel.cc",
+ "pc/channel_manager.cc",
+ "pc/composite_rtp_transport.cc",
+ "pc/dtls_srtp_transport.cc",
+ "pc/dtls_transport.cc",
+ "pc/external_hmac.cc",
+ "pc/ice_transport.cc",
+ "pc/jsep_transport.cc",
+ "pc/jsep_transport_controller.cc",
+ "pc/media_session.cc",
+ "pc/rtcp_mux_filter.cc",
+ "pc/rtp_media_utils.cc",
+ "pc/rtp_transport.cc",
+ "pc/sctp_data_channel_transport.cc",
+ "pc/sctp_transport.cc",
+ "pc/sctp_utils.cc",
+ "pc/session_description.cc",
+ "pc/simulcast_description.cc",
+ "pc/srtp_filter.cc",
+ "pc/srtp_session.cc",
+ "pc/srtp_transport.cc",
+ "pc/transport_stats.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_sigslot__sigslot",
+ "webrtc_pc__media_protocol_names",
+ "webrtc_base64__base64",
+ "webrtc_rtc_base__checks",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_system__file_wrapper",
+ "webrtc_synchronization__mutex",
+ "webrtc_api__rtp_parameters",
+ "webrtc_api__audio_options_api",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_api__rtc_error",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_api__rtp_headers",
+ "webrtc_crypto__options",
+ "webrtc_media__rtc_h264_profile_id",
+ "webrtc_video__video_frame",
+ "webrtc_common_video__common_video",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_call__call_interfaces",
+ "webrtc_media__rtc_media_base",
+ "webrtc_api__libjingle_peerconnection_api",
+ "webrtc_logging__ice_log",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ "webrtc_call__rtp_receiver",
+ "webrtc_p2p__rtc_p2p",
+ "webrtc_media__rtc_data",
+ "webrtc_api__ice_transport_factory",
+ "webrtc_video__builtin_video_bitrate_allocator_factory",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__rtp_sender",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "call/rtp_payload_params.cc",
+ "call/rtp_transport_controller_send.cc",
+ "call/rtp_video_sender.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_api__transport_api",
+ "webrtc_rtc_base__checks",
+ "webrtc_synchronization__mutex",
+ "webrtc_api__rtp_parameters",
+ "webrtc_units__time_delta",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_units__data_rate",
+ "webrtc_video_coding__chain_diff_calculator",
+ "webrtc_transport__network_control",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_video_coding__frame_dependencies_calculator",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_rtc_base__rate_limiter",
+ "webrtc_video__video_frame",
+ "webrtc_utility__utility",
+ "webrtc_rtp_rtcp__rtp_video_header",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_rtp__transport_feedback",
+ "webrtc_logging__rtc_event_bwe",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_call__bitrate_configurator",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ "webrtc_pacing__pacing",
+ "webrtc_rtp__control_handler",
+ "webrtc_congestion_controller__congestion_controller",
+ "webrtc_transport__goog_cc",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__video_coding",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/codec_timer.cc",
+ "modules/video_coding/decoder_database.cc",
+ "modules/video_coding/fec_controller_default.cc",
+ "modules/video_coding/frame_buffer2.cc",
+ "modules/video_coding/frame_object.cc",
+ "modules/video_coding/generic_decoder.cc",
+ "modules/video_coding/h264_sprop_parameter_sets.cc",
+ "modules/video_coding/h264_sps_pps_tracker.cc",
+ "modules/video_coding/inter_frame_delay.cc",
+ "modules/video_coding/jitter_estimator.cc",
+ "modules/video_coding/loss_notification_controller.cc",
+ "modules/video_coding/media_opt_util.cc",
+ "modules/video_coding/packet_buffer.cc",
+ "modules/video_coding/rtp_frame_reference_finder.cc",
+ "modules/video_coding/rtt_filter.cc",
+ "modules/video_coding/timestamp_map.cc",
+ "modules/video_coding/timing.cc",
+ "modules/video_coding/unique_timestamp_counter.cc",
+ "modules/video_coding/video_codec_initializer.cc",
+ "modules/video_coding/video_receiver2.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_base64__base64",
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_synchronization__mutex",
+ "webrtc_time__timestamp_extrapolator",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_video__video_adaptation",
+ "webrtc_units__time_delta",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_video__video_bitrate_allocator",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_experiments__jitter_upper_bound_experiment",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_experiments__alr_experiment",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_experiments__rtt_mult_experiment",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_api__rtp_headers",
+ "webrtc_api__rtp_packet_info",
+ "webrtc_video__video_frame",
+ "webrtc_experiments__min_video_bitrate_experiment",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ "webrtc_rtp_rtcp__rtp_video_header",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_video_coding__encoded_frame",
+ "webrtc_video_coding__webrtc_vp9_helpers",
+ "webrtc_video__encoded_frame",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_video__builtin_video_bitrate_allocator_factory",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_coding__webrtc_vp8",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "modules/video_coding/codecs/vp8/libvpx_interface.cc",
+ "modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc",
+ "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_experiments__cpu_speed_experiment",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_video_coding__webrtc_vp8_temporal_layers",
+ "webrtc_video_codecs__vp8_temporal_layers_factory",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_media__rtc_internal_video_codecs",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "media/engine/fake_video_codec_factory.cc",
+ "media/engine/internal_decoder_factory.cc",
+ "media/engine/internal_encoder_factory.cc",
+ "media/engine/multiplex_codec_factory.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_media__rtc_constants",
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_media__rtc_h264_profile_id",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_av1__libaom_av1_encoder",
+ "webrtc_av1__libaom_av1_decoder",
+ "webrtc_call__video_stream_api",
+ "webrtc_call__call_interfaces",
+ "webrtc_media__rtc_media_base",
+ "webrtc_video_coding__webrtc_multiplex",
+ "webrtc_video_codecs__rtc_software_fallback_wrappers",
+ "webrtc_video_coding__webrtc_vp9",
+ "webrtc_video_coding__webrtc_h264",
+ "webrtc_media__rtc_simulcast_encoder_adapter",
+ "webrtc_test__fake_video_codecs",
+ "webrtc_media__rtc_encoder_simulcast_proxy",
+ "webrtc_video_coding__webrtc_vp8",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_codecs__builtin_video_encoder_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video_codecs/builtin_video_encoder_factory.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_media__rtc_media_base",
+ "webrtc_media__rtc_encoder_simulcast_proxy",
+ "webrtc_media__rtc_internal_video_codecs",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__frame_dumping_decoder",
+ defaults: ["webrtc_defaults"],
+ srcs: ["video/frame_dumping_decoder.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_system__file_wrapper",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_video__encoded_image",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_video__encoded_frame",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_video_coding__video_coding",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_pc__peerconnection",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "pc/audio_rtp_receiver.cc",
+ "pc/audio_track.cc",
+ "pc/data_channel_controller.cc",
+ "pc/data_channel_utils.cc",
+ "pc/dtmf_sender.cc",
+ "pc/ice_server_parsing.cc",
+ "pc/jitter_buffer_delay.cc",
+ "pc/jsep_ice_candidate.cc",
+ "pc/jsep_session_description.cc",
+ "pc/local_audio_source.cc",
+ "pc/media_stream.cc",
+ "pc/media_stream_observer.cc",
+ "pc/peer_connection.cc",
+ "pc/peer_connection_factory.cc",
+ "pc/remote_audio_source.cc",
+ "pc/rtc_stats_collector.cc",
+ "pc/rtc_stats_traversal.cc",
+ "pc/rtp_data_channel.cc",
+ "pc/rtp_parameters_conversion.cc",
+ "pc/rtp_receiver.cc",
+ "pc/rtp_sender.cc",
+ "pc/rtp_transceiver.cc",
+ "pc/sctp_data_channel.cc",
+ "pc/sdp_serializer.cc",
+ "pc/sdp_utils.cc",
+ "pc/stats_collector.cc",
+ "pc/track_media_info_map.cc",
+ "pc/video_rtp_receiver.cc",
+ "pc/video_rtp_track_source.cc",
+ "pc/video_track.cc",
+ "pc/video_track_source.cc",
+ "pc/webrtc_sdp.cc",
+ "pc/webrtc_session_description_factory.cc",
+ ":webrtc_rtc_base__rtc_operations_chain",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_sigslot__sigslot",
+ "webrtc_base64__base64",
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_system__file_wrapper",
+ "webrtc_synchronization__mutex",
+ "webrtc_api__rtp_parameters",
+ "webrtc_api__audio_options_api",
+ "webrtc_rtc_base__weak_ptr",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_api__rtc_error",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_api__rtc_event_log_output_file",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_video__video_frame",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_api__media_stream_interface",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_call__call_interfaces",
+ "webrtc_media__rtc_media_base",
+ "webrtc_api__libjingle_peerconnection_api",
+ "webrtc_logging__ice_log",
+ "webrtc_p2p__rtc_p2p",
+ "webrtc_media__rtc_data",
+ "webrtc_api__ice_transport_factory",
+ "webrtc_video__builtin_video_bitrate_allocator_factory",
+ "webrtc_pc__rtc_pc_base",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__video_stream_encoder_impl",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "video/encoder_bitrate_adjuster.cc",
+ "video/encoder_overshoot_detector.cc",
+ "video/frame_encode_metadata_writer.cc",
+ "video/video_source_sink_controller.cc",
+ "video/video_stream_encoder.cc",
+ ":webrtc_adaptation__resource_adaptation_api",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__criticalsection",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_api__rtp_parameters",
+ "webrtc_video__video_adaptation",
+ "webrtc_rtc_base__timeutils",
+ "webrtc_rtc_base__rtc_event",
+ "webrtc_rtc_base__logging",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_video__video_bitrate_allocator",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_experiments__alr_experiment",
+ "webrtc_experiments__quality_scaler_settings",
+ "webrtc_experiments__quality_rampup_experiment",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_video__video_frame",
+ "webrtc_video__encoded_image",
+ "webrtc_video__video_frame_i420",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_experiments__quality_scaling_experiment",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_experiments__balanced_degradation_settings",
+ "webrtc_video_coding__webrtc_vp9_helpers",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_adaptation__resource_adaptation",
+ "webrtc_adaptation__video_adaptation",
+ "webrtc_video_coding__video_coding",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__video_stream_encoder_create",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video/video_stream_encoder_create.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_task_queue__task_queue",
+ "webrtc_video__video_frame",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_adaptation__video_adaptation",
+ "webrtc_video__video_stream_encoder_impl",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video_codecs__builtin_video_decoder_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/video_codecs/builtin_video_decoder_factory.cc"],
+ host_supported: true,
+ static_libs: [
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_media__rtc_internal_video_codecs",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_video__video",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "video/buffered_frame_decryptor.cc",
+ "video/call_stats.cc",
+ "video/call_stats2.cc",
+ "video/encoder_rtcp_feedback.cc",
+ "video/quality_limitation_reason_tracker.cc",
+ "video/quality_threshold.cc",
+ "video/receive_statistics_proxy.cc",
+ "video/receive_statistics_proxy2.cc",
+ "video/report_block_stats.cc",
+ "video/rtp_streams_synchronizer.cc",
+ "video/rtp_streams_synchronizer2.cc",
+ "video/rtp_video_stream_receiver.cc",
+ "video/rtp_video_stream_receiver2.cc",
+ "video/rtp_video_stream_receiver_frame_transformer_delegate.cc",
+ "video/send_delay_stats.cc",
+ "video/send_statistics_proxy.cc",
+ "video/stats_counter.cc",
+ "video/stream_synchronization.cc",
+ "video/transport_adapter.cc",
+ "video/video_quality_observer.cc",
+ "video/video_quality_observer2.cc",
+ "video/video_receive_stream.cc",
+ "video/video_receive_stream2.cc",
+ "video/video_send_stream.cc",
+ "video/video_send_stream_impl.cc",
+ "video/video_stream_decoder.cc",
+ "video/video_stream_decoder2.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_api__transport_api",
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_synchronization__mutex",
+ "webrtc_time__timestamp_extrapolator",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_api__rtp_parameters",
+ "webrtc_units__timestamp",
+ "webrtc_rtc_base__weak_ptr",
+ "webrtc_task_utils__pending_task_safety_flag",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_video__video_bitrate_allocator",
+ "webrtc_experiments__keyframe_interval_settings_experiment",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_experiments__alr_experiment",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_rtc_base__rate_limiter",
+ "webrtc_crypto__options",
+ "webrtc_media__rtc_h264_profile_id",
+ "webrtc_call__bitrate_allocator",
+ "webrtc_video__video_frame",
+ "webrtc_utility__utility",
+ "webrtc_video_coding__nack_module",
+ "webrtc_experiments__min_video_bitrate_experiment",
+ "webrtc_video__encoded_image",
+ "webrtc_deprecated__nack_module",
+ "webrtc_video__video_frame_i420",
+ "webrtc_rtp_rtcp__rtp_video_header",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_experiments__quality_scaling_experiment",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_video_processing__video_processing",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_call__video_stream_api",
+ "webrtc_call__call_interfaces",
+ "webrtc_media__rtc_media_base",
+ "webrtc_api__libjingle_peerconnection_api",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ "webrtc_call__rtp_receiver",
+ "webrtc_pacing__pacing",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_call__rtp_sender",
+ "webrtc_video_coding__video_coding",
+ "webrtc_video__frame_dumping_decoder",
+ "webrtc_video__video_stream_encoder_create",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_call__call",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "call/call.cc",
+ "call/call_factory.cc",
+ "call/degraded_call.cc",
+ "call/flexfec_receive_stream_impl.cc",
+ "call/receive_time_calculator.cc",
+ ],
+ host_supported: true,
+ static_libs: [
+ "webrtc_network__sent_packet",
+ "webrtc_api__transport_api",
+ "webrtc_rtc_base__checks",
+ "webrtc_modules__module_api",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_api__rtp_parameters",
+ "webrtc_units__time_delta",
+ "webrtc_task_utils__pending_task_safety_flag",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_api__rtp_headers",
+ "webrtc_rtc_base__rate_limiter",
+ "webrtc_logging__rtc_stream_config",
+ "webrtc_logging__rtc_event_video",
+ "webrtc_call__simulated_network",
+ "webrtc_call__bitrate_allocator",
+ "webrtc_utility__utility",
+ "webrtc_logging__rtc_event_audio",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_logging__rtc_event_rtp_rtcp",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_call__video_stream_api",
+ "webrtc_call__call_interfaces",
+ "webrtc_call__fake_network",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ "webrtc_call__rtp_receiver",
+ "webrtc_pacing__pacing",
+ "webrtc_audio__audio",
+ "webrtc_adaptation__resource_adaptation",
+ "webrtc_congestion_controller__congestion_controller",
+ "webrtc_call__rtp_sender",
+ "webrtc_video_coding__video_coding",
+ "webrtc_video__video",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_media__rtc_audio_video",
+ defaults: ["webrtc_defaults"],
+ srcs: [
+ "media/engine/adm_helpers.cc",
+ "media/engine/payload_type_mapper.cc",
+ "media/engine/simulcast.cc",
+ "media/engine/unhandled_packets_buffer.cc",
+ "media/engine/webrtc_media_engine.cc",
+ "media/engine/webrtc_video_engine.cc",
+ "media/engine/webrtc_voice_engine.cc",
+ ":webrtc_video_capture__video_capture_internal_impl",
+ ],
+ host_supported: true,
+ cflags: ["-DHAVE_WEBRTC_VIDEO"],
+ static_libs: [
+ "webrtc_media__rtc_constants",
+ "webrtc_api__transport_api",
+ "webrtc_transport__bitrate_settings",
+ "webrtc_base64__base64",
+ "webrtc_rtc_base__checks",
+ "webrtc_task_queue__task_queue",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_synchronization__mutex",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_api__rtp_parameters",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_experiments__normalize_simulcast_size_experiment",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_rtc_base__audio_format_to_string",
+ "webrtc_video__video_frame",
+ "webrtc_experiments__min_video_bitrate_experiment",
+ "webrtc_video__video_frame_i420",
+ "webrtc_audio_processing__api",
+ "webrtc_common_video__common_video",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_api__media_stream_interface",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_audio_device__audio_device_impl",
+ "webrtc_audio_mixer__audio_mixer_impl",
+ "webrtc_aec_dump__null_aec_dump_factory",
+ "webrtc_call__video_stream_api",
+ "webrtc_call__call_interfaces",
+ "webrtc_media__rtc_media_base",
+ "webrtc_api__libjingle_peerconnection_api",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_video_codecs__rtc_software_fallback_wrappers",
+ "webrtc_video_coding__video_coding",
+ "webrtc_call__call",
+ ],
+}
+
+cc_library_static {
+ name: "webrtc_api__create_peerconnection_factory",
+ defaults: ["webrtc_defaults"],
+ srcs: ["api/create_peerconnection_factory.cc"],
+ host_supported: true,
+ cflags: ["-DHAVE_WEBRTC_VIDEO"],
+ static_libs: [
+ "webrtc_rtc_event_log__rtc_event_log_factory",
+ "webrtc_task_queue__default_task_queue_factory",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_audio_processing__api",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_media__rtc_media_base",
+ "webrtc_api__libjingle_peerconnection_api",
+ "webrtc_pc__peerconnection",
+ "webrtc_media__rtc_audio_video",
+ ],
+}
+
+cc_library_static {
+ name: "libwebrtc",
+ defaults: ["webrtc_defaults"],
+ export_include_dirs: ["."],
+ whole_static_libs: [
+ "webrtc_spl_sqrt_floor__spl_sqrt_floor",
+ "webrtc_fft__fft",
+ "webrtc_ooura__fft_size_256",
+ "webrtc_audio_coding__audio_network_adaptor_config",
+ "webrtc_audio_coding__pcm16b_c",
+ "webrtc_sigslot__sigslot",
+ "webrtc_network__sent_packet",
+ "webrtc_pc__media_protocol_names",
+ "webrtc_g722__g722_3p",
+ "webrtc_media__rtc_constants",
+ "webrtc_api__transport_api",
+ "webrtc_synchronization__yield",
+ "webrtc_g711__g711_3p",
+ "webrtc_audio_processing__audio_processing_statistics",
+ "webrtc_transport__bitrate_settings",
+ "webrtc_base64__base64",
+ "webrtc_audio_coding__g711_c",
+ "webrtc_ooura__fft_size_128",
+ "webrtc_rtc_base__checks",
+ "webrtc_audio_coding__isac_vad",
+ "webrtc_memory__aligned_malloc",
+ "webrtc_audio_coding__g722_c",
+ "webrtc_system_wrappers__cpu_features_linux",
+ "webrtc_generic_frame_descriptor__generic_frame_descriptor",
+ "webrtc_rtc_base__platform_thread_types",
+ "webrtc_modules__module_api",
+ "webrtc_task_queue__task_queue",
+ "webrtc_utility__pffft_wrapper",
+ "webrtc_utility__cascaded_biquad_filter",
+ "webrtc_synchronization__yield_policy",
+ "webrtc_rtc_base__stringutils",
+ "webrtc_rtc_base__criticalsection",
+ "webrtc_system__file_wrapper",
+ "webrtc_synchronization__mutex",
+ "webrtc_synchronization__rw_lock_wrapper",
+ "webrtc_time__timestamp_extrapolator",
+ "webrtc_utility__legacy_delay_estimator",
+ "webrtc_synchronization__sequence_checker",
+ "webrtc_video__video_bitrate_allocation",
+ "webrtc_api__rtp_parameters",
+ "webrtc_video__video_adaptation",
+ "webrtc_audio_processing__config",
+ "webrtc_api__audio_options_api",
+ "webrtc_units__data_size",
+ "webrtc_rtc_base__timeutils",
+ "webrtc_units__time_delta",
+ "webrtc_rtc_base__rtc_event",
+ "webrtc_units__timestamp",
+ "webrtc_units__frequency",
+ "webrtc_rtc_base__weak_ptr",
+ "webrtc_rtc_base__platform_thread",
+ "webrtc_task_utils__pending_task_safety_flag",
+ "webrtc_rtc_event_log__rtc_event_log",
+ "webrtc_rtc_base__logging",
+ "webrtc_units__data_rate",
+ "webrtc_system_wrappers__field_trial",
+ "webrtc_video_coding__chain_diff_calculator",
+ "webrtc_transport__network_control",
+ "webrtc_experiments__field_trial_parser",
+ "webrtc_logging__rtc_event_pacing",
+ "webrtc_transport__field_trial_based_config",
+ "webrtc_video_coding__frame_dependencies_calculator",
+ "webrtc_rtc_base__rtc_task_queue_libevent",
+ "webrtc_api__rtc_error",
+ "webrtc_rtc_event_log__rtc_event_log_factory",
+ "webrtc_goog_cc__link_capacity_estimator",
+ "webrtc_video__video_bitrate_allocator",
+ "webrtc_experiments__keyframe_interval_settings_experiment",
+ "webrtc_rtc_base__rtc_task_queue",
+ "webrtc_task_queue__default_task_queue_factory",
+ "webrtc_rtc_base__rtc_base_approved",
+ "webrtc_api__rtc_event_log_output_file",
+ "webrtc_experiments__jitter_upper_bound_experiment",
+ "webrtc_agc2__biquad_filter",
+ "webrtc_rtc_base__rtc_numerics",
+ "webrtc_experiments__cpu_speed_experiment",
+ "webrtc_system_wrappers__system_wrappers",
+ "webrtc_video__video_rtp_headers",
+ "webrtc_opus__audio_encoder_opus_config",
+ "webrtc_audio__aec3_config",
+ "webrtc_audio_coding__webrtc_opus_wrapper",
+ "webrtc_agc2__common",
+ "webrtc_experiments__alr_experiment",
+ "webrtc_experiments__quality_scaler_settings",
+ "webrtc_experiments__normalize_simulcast_size_experiment",
+ "webrtc_audio_codecs__audio_codecs_api",
+ "webrtc_experiments__quality_rampup_experiment",
+ "webrtc_stats__rtc_stats",
+ "webrtc_system_wrappers__metrics",
+ "webrtc_experiments__rtt_mult_experiment",
+ "webrtc_rnn_vad__rnn_vad",
+ "webrtc_rtc_base__rtc_base",
+ "webrtc_common_audio__common_audio_cc",
+ "webrtc_pacing__interval_budget",
+ "webrtc_common_audio__common_audio_c",
+ "webrtc_aecm__aecm_core",
+ "webrtc_video_processing__video_processing_sse2",
+ "webrtc_agc2__gain_applier",
+ "webrtc_task_utils__repeating_task",
+ "webrtc_rtc_base__audio_format_to_string",
+ "webrtc_memory__fifo_buffer",
+ "webrtc_api__rtp_headers",
+ "webrtc_rtc_base__rate_limiter",
+ "webrtc_audio_coding__audio_coding_opus_common",
+ "webrtc_logging__rtc_stream_config",
+ "webrtc_audio_coding__legacy_encoded_audio_frame",
+ "webrtc_audio_coding__webrtc_multiopus",
+ "webrtc_api__rtp_packet_info",
+ "webrtc_crypto__options",
+ "webrtc_media__rtc_h264_profile_id",
+ "webrtc_audio_coding__webrtc_cng",
+ "webrtc_common_audio__common_audio_sse2",
+ "webrtc_logging__rtc_event_video",
+ "webrtc_common_audio__common_audio",
+ "webrtc_call__simulated_network",
+ "webrtc_call__bitrate_allocator",
+ "webrtc_agc2__rnn_vad_with_level",
+ "webrtc_audio_coding__g722",
+ "webrtc_audio_device__audio_device_buffer",
+ "webrtc_audio__audio_frame_api",
+ "webrtc_goog_cc__alr_detector",
+ "webrtc_video__video_frame",
+ "webrtc_audio_processing__apm_logging",
+ "webrtc_audio_coding__ilbc_c",
+ "webrtc_opus__audio_encoder_multiopus",
+ "webrtc_utility__utility",
+ "webrtc_video_coding__nack_module",
+ "webrtc_g722__audio_encoder_g722",
+ "webrtc_audio_coding__isac_c",
+ "webrtc_audio_coding__g711",
+ "webrtc_opus__audio_decoder_multiopus",
+ "webrtc_common_audio__fir_filter_factory",
+ "webrtc_audio_coding__ilbc",
+ "webrtc_audio_coding__audio_encoder_cng",
+ "webrtc_agc2__fixed_digital",
+ "webrtc_logging__rtc_event_audio",
+ "webrtc_experiments__min_video_bitrate_experiment",
+ "webrtc_video__encoded_image",
+ "webrtc_agc__legacy_agc",
+ "webrtc_g722__audio_decoder_g722",
+ "webrtc_audio_coding__pcm16b",
+ "webrtc_audio_coding__red",
+ "webrtc_utility__audio_frame_operations",
+ "webrtc_audio_coding__isac",
+ "webrtc_deprecated__nack_module",
+ "webrtc_video__video_frame_i420",
+ "webrtc_isac__audio_encoder_isac_float",
+ "webrtc_audio_processing__api",
+ "webrtc_transient__transient_suppressor_impl",
+ "webrtc_ilbc__audio_encoder_ilbc",
+ "webrtc_rtp_rtcp__rtp_video_header",
+ "webrtc_agc2__noise_level_estimator",
+ "webrtc_audio_processing__audio_buffer",
+ "webrtc_isac__audio_decoder_isac_float",
+ "webrtc_vad__vad",
+ "webrtc_audio_device__audio_device_generic",
+ "webrtc_audio_processing__high_pass_filter",
+ "webrtc_ns__ns",
+ "webrtc_common_video__common_video",
+ "webrtc_g711__audio_encoder_g711",
+ "webrtc_agc2__adaptive_digital",
+ "webrtc_L16__audio_encoder_L16",
+ "webrtc_audio_processing__audio_frame_proxies",
+ "webrtc_ilbc__audio_decoder_ilbc",
+ "webrtc_g711__audio_decoder_g711",
+ "webrtc_audio_processing__optionally_built_submodule_creators",
+ "webrtc_video__video_frame_i010",
+ "webrtc_L16__audio_decoder_L16",
+ "webrtc_video_codecs__video_codecs_api",
+ "webrtc_audio_coding__audio_network_adaptor",
+ "webrtc_agc__level_estimation",
+ "webrtc_api__media_stream_interface",
+ "webrtc_audio_mixer__audio_frame_manipulator",
+ "webrtc_experiments__quality_scaling_experiment",
+ "webrtc_audio_coding__webrtc_opus",
+ "webrtc_audio_processing__voice_detection",
+ "webrtc_media__rtc_vp9_profile",
+ "webrtc_aec3__aec3",
+ "webrtc_opus__audio_decoder_opus",
+ "webrtc_experiments__rate_control_settings",
+ "webrtc_video_coding__video_codec_interface",
+ "webrtc_av1__libaom_av1_encoder",
+ "webrtc_audio_device__audio_device_impl",
+ "webrtc_av1__libaom_av1_decoder",
+ "webrtc_audio_coding__neteq",
+ "webrtc_goog_cc__pushback_controller",
+ "webrtc_video_processing__video_processing",
+ "webrtc_rtp_rtcp__rtp_rtcp_format",
+ "webrtc_experiments__balanced_degradation_settings",
+ "webrtc_video_coding__encoded_frame",
+ "webrtc_experiments__stable_target_rate_experiment",
+ "webrtc_audio_mixer__audio_mixer_impl",
+ "webrtc_agc2__level_estimation_agc",
+ "webrtc_remote_bitrate_estimator__remote_bitrate_estimator",
+ "webrtc_agc__agc",
+ "webrtc_opus__audio_encoder_opus",
+ "webrtc_rtp__transport_feedback",
+ "webrtc_audio_codecs__builtin_audio_decoder_factory",
+ "webrtc_audio_coding__audio_coding",
+ "webrtc_audio__aec3_factory",
+ "webrtc_logging__rtc_event_rtp_rtcp",
+ "webrtc_video_coding__webrtc_vp9_helpers",
+ "webrtc_aec_dump__null_aec_dump_factory",
+ "webrtc_video__encoded_frame",
+ "webrtc_logging__rtc_event_bwe",
+ "webrtc_audio_codecs__builtin_audio_encoder_factory",
+ "webrtc_audio_processing__audio_processing",
+ "webrtc_goog_cc__probe_controller",
+ "webrtc_goog_cc__loss_based_controller",
+ "webrtc_goog_cc__estimators",
+ "webrtc_call__rtp_interfaces",
+ "webrtc_call__video_stream_api",
+ "webrtc_call__call_interfaces",
+ "webrtc_media__rtc_media_base",
+ "webrtc_video_capture__video_capture_module",
+ "webrtc_call__fake_network",
+ "webrtc_api__libjingle_peerconnection_api",
+ "webrtc_logging__ice_log",
+ "webrtc_call__bitrate_configurator",
+ "webrtc_rtp_rtcp__rtp_rtcp",
+ "webrtc_call__rtp_receiver",
+ "webrtc_p2p__rtc_p2p",
+ "webrtc_pacing__pacing",
+ "webrtc_media__rtc_data",
+ "webrtc_goog_cc__delay_based_bwe",
+ "webrtc_video_coding__video_coding_utility",
+ "webrtc_audio__audio",
+ "webrtc_api__ice_transport_factory",
+ "webrtc_adaptation__resource_adaptation",
+ "webrtc_rtp__control_handler",
+ "webrtc_video_coding__webrtc_vp8_temporal_layers",
+ "webrtc_congestion_controller__congestion_controller",
+ "webrtc_video_coding__webrtc_multiplex",
+ "webrtc_video__builtin_video_bitrate_allocator_factory",
+ "webrtc_video_codecs__rtc_software_fallback_wrappers",
+ "webrtc_goog_cc__goog_cc",
+ "webrtc_video_coding__webrtc_vp9",
+ "webrtc_video_coding__webrtc_h264",
+ "webrtc_media__rtc_simulcast_encoder_adapter",
+ "webrtc_adaptation__video_adaptation",
+ "webrtc_transport__goog_cc",
+ "webrtc_video_codecs__vp8_temporal_layers_factory",
+ "webrtc_test__fake_video_codecs",
+ "webrtc_media__rtc_encoder_simulcast_proxy",
+ "webrtc_pc__rtc_pc_base",
+ "webrtc_call__rtp_sender",
+ "webrtc_video_coding__video_coding",
+ "webrtc_video_coding__webrtc_vp8",
+ "webrtc_media__rtc_internal_video_codecs",
+ "webrtc_video_codecs__builtin_video_encoder_factory",
+ "webrtc_video__frame_dumping_decoder",
+ "webrtc_pc__peerconnection",
+ "webrtc_video__video_stream_encoder_impl",
+ "webrtc_video__video_stream_encoder_create",
+ "webrtc_video_codecs__builtin_video_decoder_factory",
+ "webrtc_video__video",
+ "webrtc_call__call",
+ "webrtc_media__rtc_audio_video",
+ "webrtc_api__create_peerconnection_factory",
+ "libpffft",
+ "rnnoise_rnn_vad",
+ "usrsctplib",
+ ],
+ srcs: [
+ ":webrtc_rtp__dependency_descriptor",
+ ":webrtc_audio_processing__rms_level",
+ ":webrtc_rtc_base__rtc_operations_chain",
+ ":webrtc_av1__scalable_video_controller",
+ ":webrtc_adaptation__resource_adaptation_api",
+ ":webrtc_neteq__tick_timer",
+ ":webrtc_transport__stun_types",
+ ":webrtc_neteq__neteq_api",
+ ":webrtc_video__video_frame_metadata",
+ ":webrtc_audio_processing__aec_dump_interface",
+ ":webrtc_neteq__default_neteq_controller_factory",
+ ":webrtc_audio_coding__default_neteq_factory",
+ ":webrtc_video_capture__video_capture_internal_impl",
+ ],
+}
diff --git a/Android.mk b/Android.mk
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/Android.mk
diff --git a/BUILD.gn b/BUILD.gn
index f7d15f47a9..b676d1e15e 100644
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -265,6 +265,10 @@ config("common_config") {
defines += [ "WEBRTC_USE_H264" ]
}
+ if (rtc_use_absl_mutex) {
+ defines += [ "WEBRTC_ABSL_MUTEX" ]
+ }
+
if (rtc_disable_logging) {
defines += [ "RTC_DISABLE_LOGGING" ]
}
@@ -410,7 +414,7 @@ config("common_config") {
}
config("common_objc") {
- libs = [ "Foundation.framework" ]
+ frameworks = [ "Foundation.framework" ]
if (rtc_use_metal_rendering) {
defines = [ "RTC_SUPPORTS_METAL" ]
@@ -580,6 +584,14 @@ if (rtc_include_tests) {
}
}
+ rtc_test("benchmarks") {
+ testonly = true
+ deps = [
+ "rtc_base/synchronization:mutex_benchmark",
+ "test:benchmark_main",
+ ]
+ }
+
# This runs tests that must run in real time and therefore can take some
# time to execute. They are in a separate executable to avoid making the
# regular unittest suite too slow to run frequently.
diff --git a/DEPS b/DEPS
index 2cda0d3ed6..6b5c55de2e 100644
--- a/DEPS
+++ b/DEPS
@@ -1,44 +1,51 @@
# This file contains dependencies for WebRTC.
gclient_gn_args_file = 'src/build/config/gclient_args.gni'
-gclient_gn_args = []
+gclient_gn_args = [
+ 'mac_xcode_version',
+]
vars = {
# By default, we should check out everything needed to run on the main
# chromium waterfalls. More info at: crbug.com/570091.
'checkout_configuration': 'default',
'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"',
- 'chromium_revision': 'a775f4aeaa5e5847ca65467a39d5b4a2d1d54d6f',
+ 'chromium_revision': '6b2bcf62a8d67c7a80bbb0ac1ef11921b7c57d79',
+
+ # This can be overridden, e.g. with custom_vars, to download a nonstandard
+ # Xcode version in build/mac_toolchain.py
+ # instead of downloading the prebuilt pinned revision.
+ 'mac_xcode_version': 'default',
}
deps = {
# TODO(kjellander): Move this to be Android-only once the libevent dependency
# in base/third_party/libevent is solved.
'src/base':
- 'https://chromium.googlesource.com/chromium/src/base@2e2abc3c3844639a5f8d8ec3762875f01f9c1384',
+ 'https://chromium.googlesource.com/chromium/src/base@db84a3cbe5b7a0402ded5e1836ecbd1ed7e681a5',
'src/build':
- 'https://chromium.googlesource.com/chromium/src/build@212b25dc86151e32abc71fa5d129614937666d69',
+ 'https://chromium.googlesource.com/chromium/src/build@124d030a73694c96efe4675a1c0df607e86516fe',
'src/buildtools':
- 'https://chromium.googlesource.com/chromium/src/buildtools@2c41dfb19abe40908834803b6fed797b0f341fe1',
+ 'https://chromium.googlesource.com/chromium/src/buildtools@9e121212d42be62a7cce38072f925f8398d11e49',
# Gradle 4.3-rc4. Used for testing Android Studio project generation for WebRTC.
'src/examples/androidtests/third_party/gradle': {
'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@89af43c4d0506f69980f00dde78c97b2f81437f8',
'condition': 'checkout_android',
},
'src/ios': {
- 'url': 'https://chromium.googlesource.com/chromium/src/ios@2f8d00d1cb5b73ec4329dff7ec1ababf6a05e628',
+ 'url': 'https://chromium.googlesource.com/chromium/src/ios@d51d66a3c98ad30c027bd11966507bf51e6b6a5f',
'condition': 'checkout_ios',
},
'src/testing':
- 'https://chromium.googlesource.com/chromium/src/testing@e5ced5141379ee8ae28b4f93d3c02df039d2b052',
+ 'https://chromium.googlesource.com/chromium/src/testing@0411afc27fd137a2d82348d439aa4daff078117b',
'src/third_party':
- 'https://chromium.googlesource.com/chromium/src/third_party@76bcf1e4994827b25b4539a9ef5eed382d2de992',
+ 'https://chromium.googlesource.com/chromium/src/third_party@98978dde2ae4cebc3c99203ca4674a58f762f1b5',
'src/buildtools/linux64': {
'packages': [
{
'package': 'gn/gn/linux-amd64',
- 'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9',
+ 'version': 'git_revision:3028c6a426a4aaf6da91c4ebafe716ae370225fe',
}
],
'dep_type': 'cipd',
@@ -48,7 +55,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/mac-amd64',
- 'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9',
+ 'version': 'git_revision:3028c6a426a4aaf6da91c4ebafe716ae370225fe',
}
],
'dep_type': 'cipd',
@@ -58,7 +65,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/windows-amd64',
- 'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9',
+ 'version': 'git_revision:3028c6a426a4aaf6da91c4ebafe716ae370225fe',
}
],
'dep_type': 'cipd',
@@ -72,7 +79,7 @@ deps = {
'src/buildtools/third_party/libc++abi/trunk':
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@196ba1aaa8ac285d94f4ea8d9836390a45360533',
'src/buildtools/third_party/libunwind/trunk':
- 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@43bb9f872232f531bac80093ceb4de61c64b9ab7',
+ 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@d999d54f4bca789543a2eb6c995af2d9b5a1f3ed',
'src/tools/clang/dsymutil': {
'packages': [
@@ -100,7 +107,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_build_tools/aapt2',
- 'version': 'LKH_DI44rZhQ4RkScMFQLGSJ4jZyuPcff0llITnq-i4C',
+ 'version': 'R2k5wwOlIaS6sjv2TIyHotiPJod-6KqnZO8NH-KFK8sC',
},
],
'condition': 'checkout_android',
@@ -119,20 +126,20 @@ deps = {
},
'src/third_party/boringssl/src':
- 'https://boringssl.googlesource.com/boringssl.git@f9e0cda2d81858d10ceeadb0d21f4026f8602cf7',
+ 'https://boringssl.googlesource.com/boringssl.git@eda849d2e6e6a15a5a4dc728568ec12f21ebfb6d',
'src/third_party/breakpad/breakpad':
- 'https://chromium.googlesource.com/breakpad/breakpad.git@f32b83eb08e9ee158d3037b2114357187fd45a05',
+ 'https://chromium.googlesource.com/breakpad/breakpad.git@2757a2c9c819fcae3784576aef0c8400c7ad06d7',
'src/third_party/catapult':
- 'https://chromium.googlesource.com/catapult.git@4b4e8791324284c92a5e6a84d93ed9ccdbbd375e',
+ 'https://chromium.googlesource.com/catapult.git@a30bbb68c2e324a8ab6b3f54ab17ab47fca46298',
'src/third_party/ced/src': {
'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5',
},
'src/third_party/colorama/src':
'https://chromium.googlesource.com/external/colorama.git@799604a1041e9b3bc5d2789ecbd7e8db2e18e6b8',
'src/third_party/depot_tools':
- 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@e1a9c8db7e7cf6ba7d70b06ec3f3cf6d74451680',
+ 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@9949ab7a4b1cb3c342b38129b4e0bfcfb2ef5749',
'src/third_party/ffmpeg':
- 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@31886e8f39a47a9d7107d4c937bb053dcf5699ce',
+ 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@d2dd36c03501e995e8ce2d792d834392b2e62bfe',
'src/third_party/findbugs': {
'url': 'https://chromium.googlesource.com/chromium/deps/findbugs.git@4275d9ac8610db6b1bc9a5e887f97e41b33fac67',
'condition': 'checkout_android',
@@ -143,12 +150,15 @@ deps = {
'condition': 'checkout_linux',
},
'src/third_party/freetype/src':
- 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@11beee855e29757a07320fd60e85de2e8da4e037',
+ 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@b7c467b6efa5a91945854de81632be45d6f360ff',
'src/third_party/harfbuzz-ng/src':
- 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@0d5695983e8bf3184ecd4cb92f737b9dfe5d6d25',
+ 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@e3af529e511ca492284cdd9f4584666b88a9e00f',
+ 'src/third_party/google_benchmark/src': {
+ 'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@367119482ff4abc3d73e4a109b410090fc281337',
+ },
# WebRTC-only dependency (not present in Chromium).
'src/third_party/gtest-parallel':
- 'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@df0b4e476f98516cea7d593e5dbb0fca44f6ee7f',
+ 'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@aabba21acd68a8814c70a6c2937f1625de715411',
'src/third_party/google-truth': {
'packages': [
{
@@ -160,13 +170,9 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/googletest/src':
- 'https://chromium.googlesource.com/external/github.com/google/googletest.git@e3f0319d89f4cbf32993de595d984183b1a9fc57',
+ 'https://chromium.googlesource.com/external/github.com/google/googletest.git@4fe018038f87675c083d0cfb6a6b57c274fb1753',
'src/third_party/icu': {
- 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@13cfcd5874f6c39c34ec57fa5295e7910ae90b8d',
- },
- 'src/third_party/jsr-305/src': {
- 'url': 'https://chromium.googlesource.com/external/jsr-305.git@642c508235471f7220af6d5df2d3210e3bfc0919',
- 'condition': 'checkout_android',
+ 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@79326efe26e5440f530963704c3c0ff965b3a4ac',
},
'src/third_party/jdk': {
'packages': [
@@ -196,9 +202,15 @@ deps = {
'src/third_party/libsrtp':
'https://chromium.googlesource.com/chromium/deps/libsrtp.git@650611720ecc23e0e6b32b0e3100f8b4df91696c',
'src/third_party/libaom/source/libaom':
- 'https://aomedia.googlesource.com/aom.git@611c58e511042782869dfcb6e0450587b30fc5f2',
+ 'https://aomedia.googlesource.com/aom.git@0a7c8715d79222adf6cfc4c1cdc3fcb8fb3951f3',
+ 'src/third_party/libunwindstack': {
+ 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@11659d420a71e7323b379ea8781f07c6f384bc7e',
+ 'condition': 'checkout_android',
+ },
+ 'src/third_party/perfetto':
+ 'https://android.googlesource.com/platform/external/perfetto.git@026b1e2be09a8cd8ced37f99e771eca00ea439b0',
'src/third_party/libvpx/source/libvpx':
- 'https://chromium.googlesource.com/webm/libvpx.git@8dc6f353c6d04329cf59529f41a6f46d9dbfcafa',
+ 'https://chromium.googlesource.com/webm/libvpx.git@a1cee8dc919df1980d802e1a9bce1259ec34cba8',
'src/third_party/libyuv':
'https://chromium.googlesource.com/libyuv/libyuv.git@6afd9becdf58822b1da6770598d8597c583ccfad',
'src/third_party/lss': {
@@ -212,16 +224,16 @@ deps = {
# Used by boringssl.
'src/third_party/nasm': {
- 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@4fa54ca5f7fc3a15a8c78ac94688e64d3e4e4fa1'
+ 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@19f3fad68da99277b2882939d3b2fa4c4b8d51d9'
},
'src/third_party/openh264/src':
- 'https://chromium.googlesource.com/external/github.com/cisco/openh264@6f26bce0b1c4e8ce0e13332f7c0083788def5fdf',
+ 'https://chromium.googlesource.com/external/github.com/cisco/openh264@a5473711f3e20c6bd1c33d81b6c7b9a0618aa18f',
'src/third_party/r8': {
'packages': [
{
'package': 'chromium/third_party/r8',
- 'version': 'I91wspV6GMc7l_m-k9v3-ooP-CBrK76OVc3rfnB5T7kC',
+ 'version': 'vvymFSkKtWKWNmfz0PL_0H8MD8V40P--A9aUfxfpF6QC',
},
],
'condition': 'checkout_android',
@@ -241,27 +253,21 @@ deps = {
'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@f172b30356d821d180fa4ecfa3e71c7274a32de4',
'condition': 'checkout_android',
},
- 'src/third_party/robolectric/robolectric': {
- 'url': 'https://chromium.googlesource.com/external/robolectric.git@f2df0efb033bb402399ebfb9bf58aefee5cced05',
- 'condition': 'checkout_android',
- },
'src/third_party/ub-uiautomator/lib': {
'url': 'https://chromium.googlesource.com/chromium/third_party/ub-uiautomator.git@00270549ce3161ae72ceb24712618ea28b4f9434',
'condition': 'checkout_android',
},
'src/third_party/usrsctp/usrsctplib':
- 'https://chromium.googlesource.com/external/github.com/sctplab/usrsctp@bee946a606752a443bd70bca1cb296527fed706d',
+ 'https://chromium.googlesource.com/external/github.com/sctplab/usrsctp@a8c51df76caae94254b1e59999405f739467490e',
# Dependency used by libjpeg-turbo.
'src/third_party/yasm/binaries': {
'url': 'https://chromium.googlesource.com/chromium/deps/yasm/binaries.git@52f9b3f4b0aa06da24ef8b123058bb61ee468881',
'condition': 'checkout_win',
},
- 'src/third_party/yasm/source/patched-yasm':
- 'https://chromium.googlesource.com/chromium/deps/yasm/patched-yasm.git@720b70524a4424b15fc57e82263568c8ba0496ad',
'src/tools':
- 'https://chromium.googlesource.com/chromium/src/tools@3f15275073fc339e14f39df61fbace5bad82e93d',
+ 'https://chromium.googlesource.com/chromium/src/tools@d50c1e9d058bfd361917e50ca9a66d486a8c6d3b',
'src/tools/swarming_client':
- 'https://chromium.googlesource.com/infra/luci/client-py.git@160b445a44e0daacf6f3f8570ca2707ec451f374',
+ 'https://chromium.googlesource.com/infra/luci/client-py.git@4c095d04179dc725a300085ae21fe3b79900d072',
'src/third_party/accessibility_test_framework': {
'packages': [
@@ -333,7 +339,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/espresso',
- 'version': 'c92dcfc4e894555a0b3c309f2b7939640eb1fee4',
+ 'version': 'y8fIfH8Leo2cPm7iGCYnBxZpwOlgLv8rm2mlcmJlvGsC',
},
],
'condition': 'checkout_android',
@@ -344,7 +350,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/guava',
- 'version': 'a6fba501f3a0de88b9be1daa2052632de5b96a46',
+ 'version': 'y8Zx7cKTiOunLhOrfC4hOt5kDQrLJ_Rq7ISDmXkPdYsC',
},
],
'condition': 'checkout_android',
@@ -398,12 +404,8 @@ deps = {
'version': '4gxhM8E62bvZpQs7Q3d0DinQaW0RLCIefhXrQBFkNy8C',
},
{
- 'package': 'chromium/third_party/android_sdk/public/tools-lint',
- 'version': '89hXqZYzCum3delB5RV7J_QyWkaRodqdtQS0s3LMh3wC',
- },
- {
'package': 'chromium/third_party/android_sdk/public/cmdline-tools',
- 'version': 'CR25ixsRhwuRnhdgDpGFyl9S0C_0HO9SUgFrwX46zq8C',
+ 'version': 'uM0XtAW9BHh8phcbhBDA9GfzP3bku2SP7AiMahhimnoC',
},
],
'condition': 'checkout_android',
@@ -469,7 +471,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/turbine',
- 'version': '3UJ600difG3ThRhtYrN9AfZ5kh8wCYtBiii1-NMlCrMC',
+ 'version': 'O_jNDJ4VdwYKBSDbd2BJ3mknaTFoVkvE7Po8XIiKy8sC',
},
],
'condition': 'checkout_android',
@@ -477,7 +479,7 @@ deps = {
},
'src/third_party/turbine/src': {
- 'url': 'https://chromium.googlesource.com/external/github.com/google/turbine.git' + '@' + '95f6fb6f1e962e8b6ec672905b0b04233f002dc2',
+ 'url': 'https://chromium.googlesource.com/external/github.com/google/turbine.git' + '@' + '0f2a5024fe4a9bb745bcd5ac7c655cebe11649bc',
'condition': 'checkout_android',
},
@@ -496,15 +498,15 @@ deps = {
'packages': [
{
'package': 'infra/tools/luci/isolate/${{platform}}',
- 'version': 'git_revision:513b1319d7d855f6c42bc01471562df910fd61b3',
+ 'version': 'git_revision:56ae79476e3caf14da59d75118408aa778637936',
},
{
'package': 'infra/tools/luci/isolated/${{platform}}',
- 'version': 'git_revision:513b1319d7d855f6c42bc01471562df910fd61b3',
+ 'version': 'git_revision:56ae79476e3caf14da59d75118408aa778637936',
},
{
'package': 'infra/tools/luci/swarming/${{platform}}',
- 'version': 'git_revision:513b1319d7d855f6c42bc01471562df910fd61b3',
+ 'version': 'git_revision:56ae79476e3caf14da59d75118408aa778637936',
},
],
'dep_type': 'cipd',
@@ -601,6 +603,17 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/androidx_activity_activity': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_activity_activity',
+ 'version': 'version:1.1.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/androidx_annotation_annotation': {
'packages': [
{
@@ -612,11 +625,33 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/androidx_annotation_annotation_experimental': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_annotation_annotation_experimental',
+ 'version': 'version:1.0.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/androidx_appcompat_appcompat': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_appcompat_appcompat',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.2.0-beta01-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/androidx_appcompat_appcompat_resources': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_appcompat_appcompat_resources',
+ 'version': 'version:1.2.0-beta01-cr0',
},
],
'condition': 'checkout_android',
@@ -627,7 +662,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_arch_core_core_common',
- 'version': 'version:2.0.0-cr0',
+ 'version': 'version:2.1.0-cr0',
},
],
'condition': 'checkout_android',
@@ -638,7 +673,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_arch_core_core_runtime',
- 'version': 'version:2.0.0-cr0',
+ 'version': 'version:2.1.0-cr0',
},
],
'condition': 'checkout_android',
@@ -671,7 +706,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_collection_collection',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.1.0-cr0',
},
],
'condition': 'checkout_android',
@@ -693,7 +728,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_coordinatorlayout_coordinatorlayout',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.1.0-cr0',
},
],
'condition': 'checkout_android',
@@ -704,7 +739,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_core_core',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.3.0-beta01-cr0',
},
],
'condition': 'checkout_android',
@@ -755,11 +790,22 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/androidx_exifinterface_exifinterface': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_exifinterface_exifinterface',
+ 'version': 'version:1.0.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/androidx_fragment_fragment': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_fragment_fragment',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.2.5-cr0',
},
],
'condition': 'checkout_android',
@@ -869,7 +915,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_common',
- 'version': 'version:2.0.0-cr0',
+ 'version': 'version:2.2.0-cr0',
},
],
'condition': 'checkout_android',
@@ -902,7 +948,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_livedata_core',
- 'version': 'version:2.0.0-cr0',
+ 'version': 'version:2.2.0-cr0',
},
],
'condition': 'checkout_android',
@@ -913,7 +959,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_runtime',
- 'version': 'version:2.0.0-cr0',
+ 'version': 'version:2.2.0-cr0',
},
],
'condition': 'checkout_android',
@@ -924,7 +970,18 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel',
- 'version': 'version:2.0.0-cr0',
+ 'version': 'version:2.2.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel_savedstate': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel_savedstate',
+ 'version': 'version:2.2.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1001,7 +1058,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_preference_preference',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.1.1-cr0',
},
],
'condition': 'checkout_android',
@@ -1023,6 +1080,17 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_recyclerview_recyclerview',
+ 'version': 'version:1.1.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/androidx_savedstate_savedstate': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_savedstate_savedstate',
'version': 'version:1.0.0-cr0',
},
],
@@ -1063,11 +1131,22 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib',
+ 'version': 'version:3.2.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/androidx_test_espresso_espresso_core': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_core',
- 'version': 'version:3.1.0-cr0',
+ 'version': 'version:3.2.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1078,7 +1157,29 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_idling_resource',
- 'version': 'version:3.1.0-cr0',
+ 'version': 'version:3.2.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_intents': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_intents',
+ 'version': 'version:3.2.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_web': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_web',
+ 'version': 'version:3.2.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1144,7 +1245,18 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_transition_transition',
- 'version': 'version:1.0.0-rc02-cr0',
+ 'version': 'version:1.2.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/androidx_tvprovider_tvprovider': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_tvprovider_tvprovider',
+ 'version': 'version:1.0.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1155,7 +1267,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.1.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1166,7 +1278,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable_animated',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.1.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1177,6 +1289,17 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_versionedparcelable_versionedparcelable',
+ 'version': 'version:1.1.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/androidx_viewpager2_viewpager2': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_viewpager2_viewpager2',
'version': 'version:1.0.0-cr0',
},
],
@@ -1195,6 +1318,28 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/backport_util_concurrent_backport_util_concurrent': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/backport_util_concurrent_backport_util_concurrent',
+ 'version': 'version:3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/classworlds_classworlds': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/classworlds_classworlds',
+ 'version': 'version:1.1-alpha-2-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/com_android_support_animated_vector_drawable': {
'packages': [
{
@@ -1635,11 +1780,22 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs',
+ 'version': 'version:1.0.5-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine',
- 'version': 'version:2.7.0-cr0',
+ 'version': 'version:2.8.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1881,7 +2037,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_android_material_material',
- 'version': 'version:1.0.0-rc02-cr0',
+ 'version': 'version:1.2.0-alpha06-cr0',
},
],
'condition': 'checkout_android',
@@ -1921,6 +2077,17 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations',
+ 'version': 'version:1.7-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/com_google_code_findbugs_jFormatString': {
'packages': [
{
@@ -2002,7 +2169,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotation',
- 'version': 'version:2.3.4-cr0',
+ 'version': 'version:2.4.0-cr0',
},
],
'condition': 'checkout_android',
@@ -2013,7 +2180,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations',
- 'version': 'version:2.3.4-cr0',
+ 'version': 'version:2.4.0-cr0',
},
],
'condition': 'checkout_android',
@@ -2024,7 +2191,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_check_api',
- 'version': 'version:2.3.4-cr0',
+ 'version': 'version:2.4.0-cr0',
},
],
'condition': 'checkout_android',
@@ -2035,7 +2202,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_core',
- 'version': 'version:2.3.4-cr0',
+ 'version': 'version:2.4.0-cr0',
},
],
'condition': 'checkout_android',
@@ -2046,7 +2213,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_type_annotations',
- 'version': 'version:2.3.4-cr0',
+ 'version': 'version:2.4.0-cr0',
},
],
'condition': 'checkout_android',
@@ -2145,7 +2312,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite',
- 'version': 'version:3.11.4-cr0',
+ 'version': 'version:3.12.2-cr0',
},
],
'condition': 'checkout_android',
@@ -2229,6 +2396,28 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/nekohtml_nekohtml': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/nekohtml_nekohtml',
+ 'version': 'version:1.9.6.2-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/nekohtml_xercesMinimal': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/nekohtml_xercesminimal',
+ 'version': 'version:1.9.6.2-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/net_ltgt_gradle_incap_incap': {
'packages': [
{
@@ -2251,6 +2440,193 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/org_apache_ant_ant': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_ant_ant',
+ 'version': 'version:1.8.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_ant_ant_launcher': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_ant_ant_launcher',
+ 'version': 'version:1.8.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_ant_tasks': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_ant_tasks',
+ 'version': 'version:2.1.3-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_artifact': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_artifact',
+ 'version': 'version:2.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_artifact_manager': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_artifact_manager',
+ 'version': 'version:2.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_error_diagnostics': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_error_diagnostics',
+ 'version': 'version:2.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_model': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_model',
+ 'version': 'version:2.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_plugin_registry': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_plugin_registry',
+ 'version': 'version:2.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_profile': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_profile',
+ 'version': 'version:2.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_project': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_project',
+ 'version': 'version:2.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_repository_metadata': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_repository_metadata',
+ 'version': 'version:2.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_maven_settings': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_settings',
+ 'version': 'version:2.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_file': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_file',
+ 'version': 'version:1.0-beta-6-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_lightweight': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_lightweight',
+ 'version': 'version:1.0-beta-6-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_shared': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_shared',
+ 'version': 'version:1.0-beta-6-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_provider_api': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_provider_api',
+ 'version': 'version:1.0-beta-6-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup',
+ 'version': 'version:1.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/org_checkerframework_checker_compat_qual': {
'packages': [
{
@@ -2266,40 +2642,62 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_qual',
- 'version': 'version:3.0.0-cr0',
+ 'version': 'version:2.10.0-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
- 'src/third_party/android_deps/libs/org_checkerframework_dataflow': {
+ 'src/third_party/android_deps/libs/org_checkerframework_dataflow_shaded': {
'packages': [
{
- 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_dataflow',
- 'version': 'version:3.0.0-cr0',
+ 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_dataflow_shaded',
+ 'version': 'version:3.1.2-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
- 'src/third_party/android_deps/libs/org_checkerframework_javacutil': {
+ 'src/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations': {
'packages': [
{
- 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_javacutil',
- 'version': 'version:3.0.0-cr0',
+ 'package': 'chromium/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations',
+ 'version': 'version:1.17-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
- 'src/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations': {
+ 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_container_default': {
'packages': [
{
- 'package': 'chromium/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations',
- 'version': 'version:1.17-cr0',
+ 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_container_default',
+ 'version': 'version:1.0-alpha-9-stable-1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_interpolation': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_interpolation',
+ 'version': 'version:1.11-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_utils': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_utils',
+ 'version': 'version:1.5.15-cr0',
},
],
'condition': 'checkout_android',
@@ -2427,33 +2825,154 @@ deps = {
'dep_type': 'cipd',
},
- 'src/third_party/android_deps/libs/org_plumelib_plume_util': {
+ 'src/third_party/android_deps/libs/org_robolectric_annotations': {
'packages': [
{
- 'package': 'chromium/third_party/android_deps/libs/org_plumelib_plume_util',
- 'version': 'version:1.0.6-cr0',
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_annotations',
+ 'version': 'version:4.3.1-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
- 'src/third_party/android_deps/libs/org_plumelib_reflection_util': {
+ 'src/third_party/android_deps/libs/org_robolectric_junit': {
'packages': [
{
- 'package': 'chromium/third_party/android_deps/libs/org_plumelib_reflection_util',
- 'version': 'version:0.0.2-cr0',
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_junit',
+ 'version': 'version:4.3.1-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
- 'src/third_party/android_deps/libs/org_plumelib_require_javadoc': {
+ 'src/third_party/android_deps/libs/org_robolectric_pluginapi': {
'packages': [
{
- 'package': 'chromium/third_party/android_deps/libs/org_plumelib_require_javadoc',
- 'version': 'version:0.1.0-cr0',
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_pluginapi',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_resources': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_resources',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_robolectric': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_robolectric',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_sandbox': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_sandbox',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_shadowapi': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadowapi',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_shadows_framework': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_framework',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_shadows_multidex': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_multidex',
+ 'version': 'version:4.3.1-cr1',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_shadows_playservices': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_playservices',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_utils': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_utils_reflector': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils_reflector',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_threeten_threeten_extra': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_threeten_threeten_extra',
+ 'version': 'version:1.5.0-cr0',
},
],
'condition': 'checkout_android',
@@ -2712,11 +3231,12 @@ include_rules = [
"+test",
"+rtc_tools",
- # Abseil whitelist. Keep this in sync with abseil-in-webrtc.md.
+ # Abseil allowlist. Keep this in sync with abseil-in-webrtc.md.
"+absl/algorithm/algorithm.h",
"+absl/algorithm/container.h",
"+absl/base/attributes.h",
"+absl/base/config.h",
+ "+absl/base/const_init.h",
"+absl/base/macros.h",
"+absl/container/inlined_vector.h",
"+absl/memory/memory.h",
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index 247b78eaa0..b5d4534c0b 100755
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -14,7 +14,7 @@ from collections import defaultdict
from contextlib import contextmanager
# Files and directories that are *skipped* by cpplint in the presubmit script.
-CPPLINT_BLACKLIST = [
+CPPLINT_EXCEPTIONS = [
'api/video_codecs/video_decoder.h',
'common_types.cc',
'common_types.h',
@@ -45,12 +45,15 @@ CPPLINT_BLACKLIST = [
#
# Justifications for each filter:
# - build/c++11 : Rvalue ref checks are unreliable (false positives),
-# include file and feature blacklists are
+# include file and feature blocklists are
# google3-specific.
+# - runtime/references : Mutable references are not banned by the Google
+# C++ style guide anymore (starting from May 2020).
# - whitespace/operators: Same as above (doesn't seem sufficient to eliminate
# all move-related errors).
-BLACKLIST_LINT_FILTERS = [
+DISABLED_LINT_FILTERS = [
'-build/c++11',
+ '-runtime/references',
'-whitespace/operators',
]
@@ -94,15 +97,20 @@ LEGACY_API_DIRS = (
API_DIRS = NATIVE_API_DIRS[:] + LEGACY_API_DIRS[:]
# TARGET_RE matches a GN target, and extracts the target name and the contents.
-TARGET_RE = re.compile(r'(?P<indent>\s*)\w+\("(?P<target_name>\w+)"\) {'
- r'(?P<target_contents>.*?)'
- r'(?P=indent)}',
- re.MULTILINE | re.DOTALL)
+TARGET_RE = re.compile(
+ r'(?P<indent>\s*)(?P<target_type>\w+)\("(?P<target_name>\w+)"\) {'
+ r'(?P<target_contents>.*?)'
+ r'(?P=indent)}',
+ re.MULTILINE | re.DOTALL)
# SOURCES_RE matches a block of sources inside a GN target.
SOURCES_RE = re.compile(r'sources \+?= \[(?P<sources>.*?)\]',
re.MULTILINE | re.DOTALL)
+# DEPS_RE matches a block of sources inside a GN target.
+DEPS_RE = re.compile(r'\bdeps \+?= \[(?P<deps>.*?)\]',
+ re.MULTILINE | re.DOTALL)
+
# FILE_PATH_RE matchies a file path.
FILE_PATH_RE = re.compile(r'"(?P<file_path>(\w|\/)+)(?P<extension>\.\w+)"')
@@ -168,7 +176,7 @@ def CheckNativeApiHeaderChanges(input_api, output_api):
"""Checks to remind proper changing of native APIs."""
files = []
source_file_filter = lambda x: input_api.FilterSourceFile(
- x, white_list=[r'.+\.(gn|gni|h)$'])
+ x, allow_list=[r'.+\.(gn|gni|h)$'])
for f in input_api.AffectedSourceFiles(source_file_filter):
for path in API_DIRS:
dn = os.path.dirname(f.LocalPath())
@@ -254,9 +262,9 @@ def CheckNoFRIEND_TEST(input_api, output_api, # pylint: disable=invalid-name
'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))]
-def IsLintBlacklisted(blacklist_paths, file_path):
- """ Checks if a file is blacklisted for lint check."""
- for path in blacklist_paths:
+def IsLintDisabled(disabled_paths, file_path):
+ """ Checks if a file is disabled for lint check."""
+ for path in disabled_paths:
if file_path == path or os.path.dirname(file_path).startswith(path):
return True
return False
@@ -264,7 +272,7 @@ def IsLintBlacklisted(blacklist_paths, file_path):
def CheckApprovedFilesLintClean(input_api, output_api,
source_file_filter=None):
- """Checks that all new or non-blacklisted .cc and .h files pass cpplint.py.
+ """Checks that all new or non-exempt .cc and .h files pass cpplint.py.
This check is based on CheckChangeLintsClean in
depot_tools/presubmit_canned_checks.py but has less filters and only checks
added files."""
@@ -277,22 +285,22 @@ def CheckApprovedFilesLintClean(input_api, output_api,
cpplint._cpplint_state.ResetErrorCounts()
lint_filters = cpplint._Filters()
- lint_filters.extend(BLACKLIST_LINT_FILTERS)
+ lint_filters.extend(DISABLED_LINT_FILTERS)
cpplint._SetFilters(','.join(lint_filters))
- # Create a platform independent blacklist for cpplint.
- blacklist_paths = [input_api.os_path.join(*path.split('/'))
- for path in CPPLINT_BLACKLIST]
+ # Create a platform independent exempt list for cpplint.
+ disabled_paths = [input_api.os_path.join(*path.split('/'))
+ for path in CPPLINT_EXCEPTIONS]
# Use the strictest verbosity level for cpplint.py (level 1) which is the
# default when running cpplint.py from command line. To make it possible to
# work with not-yet-converted code, we're only applying it to new (or
- # moved/renamed) files and files not listed in CPPLINT_BLACKLIST.
+ # moved/renamed) files and files not listed in CPPLINT_EXCEPTIONS.
verbosity_level = 1
files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
# Note that moved/renamed files also count as added.
- if f.Action() == 'A' or not IsLintBlacklisted(blacklist_paths,
+ if f.Action() == 'A' or not IsLintDisabled(disabled_paths,
f.LocalPath()):
files.append(f.AbsoluteLocalPath())
@@ -338,6 +346,37 @@ def CheckNoSourcesAbove(input_api, gn_files, output_api):
return []
+def CheckAbseilDependencies(input_api, gn_files, output_api):
+ """Checks that Abseil dependencies are declared in `absl_deps`."""
+ absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL)
+ target_types_to_check = [
+ 'rtc_library',
+ 'rtc_source_set',
+ 'rtc_static_library',
+ 'webrtc_fuzzer_test',
+ ]
+ error_msg = ('Abseil dependencies in target "%s" (file: %s) '
+ 'should be moved to the "absl_deps" parameter.')
+ errors = []
+
+ for gn_file in gn_files:
+ gn_file_content = input_api.ReadFile(gn_file)
+ for target_match in TARGET_RE.finditer(gn_file_content):
+ target_type = target_match.group('target_type')
+ target_name = target_match.group('target_name')
+ target_contents = target_match.group('target_contents')
+ if target_type in target_types_to_check:
+ for deps_match in DEPS_RE.finditer(target_contents):
+ deps = deps_match.group('deps').splitlines()
+ for dep in deps:
+ if re.search(absl_re, dep):
+ errors.append(
+ output_api.PresubmitError(error_msg % (target_name,
+ gn_file.LocalPath())))
+ break # no need to warn more than once per target
+ return errors
+
+
def CheckNoMixingSources(input_api, gn_files, output_api):
"""Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target.
@@ -566,8 +605,8 @@ def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api):
def CheckGnChanges(input_api, output_api):
file_filter = lambda x: (input_api.FilterSourceFile(
- x, white_list=(r'.+\.(gn|gni)$',),
- black_list=(r'.*/presubmit_checks_lib/testdata/.*',)))
+ x, allow_list=(r'.+\.(gn|gni)$',),
+ block_list=(r'.*/presubmit_checks_lib/testdata/.*',)))
gn_files = []
for f in input_api.AffectedSourceFiles(file_filter):
@@ -577,6 +616,7 @@ def CheckGnChanges(input_api, output_api):
if gn_files:
result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api))
result.extend(CheckNoMixingSources(input_api, gn_files, output_api))
+ result.extend(CheckAbseilDependencies(input_api, gn_files, output_api))
result.extend(CheckNoPackageBoundaryViolations(input_api, gn_files,
output_api))
result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api))
@@ -756,7 +796,7 @@ def RunPythonTests(input_api, output_api):
input_api,
output_api,
directory,
- whitelist=[r'.+_test\.py$']))
+ allowlist=[r'.+_test\.py$']))
return input_api.RunTests(tests, parallel=True)
@@ -810,17 +850,18 @@ def CommonChecks(input_api, output_api):
results = []
# Filter out files that are in objc or ios dirs from being cpplint-ed since
# they do not follow C++ lint rules.
- black_list = input_api.DEFAULT_BLACK_LIST + (
+ exception_list = input_api.DEFAULT_BLACK_LIST + (
r".*\bobjc[\\\/].*",
r".*objc\.[hcm]+$",
)
- source_file_filter = lambda x: input_api.FilterSourceFile(x, None, black_list)
+ source_file_filter = lambda x: input_api.FilterSourceFile(x, None,
+ exception_list)
results.extend(CheckApprovedFilesLintClean(
input_api, output_api, source_file_filter))
results.extend(input_api.canned_checks.CheckLicense(
input_api, output_api, _LicenseHeader(input_api)))
results.extend(input_api.canned_checks.RunPylint(input_api, output_api,
- black_list=(r'^base[\\\/].*\.py$',
+ block_list=(r'^base[\\\/].*\.py$',
r'^build[\\\/].*\.py$',
r'^buildtools[\\\/].*\.py$',
r'^infra[\\\/].*\.py$',
@@ -847,12 +888,12 @@ def CommonChecks(input_api, output_api):
# Also we will skip most checks for third_party directory.
third_party_filter_list = (r'^third_party[\\\/].+',)
eighty_char_sources = lambda x: input_api.FilterSourceFile(x,
- black_list=build_file_filter_list + objc_filter_list +
+ block_list=build_file_filter_list + objc_filter_list +
third_party_filter_list)
hundred_char_sources = lambda x: input_api.FilterSourceFile(x,
- white_list=objc_filter_list)
+ allow_list=objc_filter_list)
non_third_party_sources = lambda x: input_api.FilterSourceFile(x,
- black_list=third_party_filter_list)
+ block_list=third_party_filter_list)
results.extend(input_api.canned_checks.CheckLongLines(
input_api, output_api, maxlen=80, source_file_filter=eighty_char_sources))
@@ -900,6 +941,8 @@ def CommonChecks(input_api, output_api):
input_api, output_api, non_third_party_sources))
results.extend(CheckBannedAbslMakeUnique(
input_api, output_api, non_third_party_sources))
+ results.extend(CheckObjcApiSymbols(
+ input_api, output_api, non_third_party_sources))
return results
@@ -976,6 +1019,35 @@ def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter):
files)]
return []
+def CheckObjcApiSymbols(input_api, output_api, source_file_filter):
+ rtc_objc_export = re.compile(r'RTC_OBJC_EXPORT(.|\n){26}',
+ re.MULTILINE | re.DOTALL)
+ file_filter = lambda f: (f.LocalPath().endswith(('.h'))
+ and source_file_filter(f))
+
+ files = []
+ file_filter = lambda x: (input_api.FilterSourceFile(x)
+ and source_file_filter(x))
+ for f in input_api.AffectedSourceFiles(file_filter):
+ if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath():
+ continue
+ contents = input_api.ReadFile(f)
+ for match in rtc_objc_export.finditer(contents):
+ export_block = match.group(0)
+ if 'RTC_OBJC_TYPE' not in export_block:
+ files.append(f.LocalPath())
+
+ if len(files):
+ return [output_api.PresubmitError(
+ 'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' +
+ 'macro.\n\n' +
+ 'For example:\n' +
+ 'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' +
+ 'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' +
+ 'Please fix the following files:',
+ files)]
+ return []
+
def CheckAbslMemoryInclude(input_api, output_api, source_file_filter):
pattern = input_api.re.compile(
r'^#include\s*"absl/memory/memory.h"', input_api.re.MULTILINE)
@@ -1033,7 +1105,7 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter):
# eval-ed and thus doesn't have __file__.
error_msg = """{} should be listed in {}."""
results = []
- orphan_blacklist = [
+ exempt_paths = [
os.path.join('tools_webrtc', 'ios', 'SDK'),
]
with _AddToPath(input_api.os_path.join(
@@ -1042,7 +1114,7 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter):
from check_orphan_headers import IsHeaderInBuildGn
file_filter = lambda x: input_api.FilterSourceFile(
- x, black_list=orphan_blacklist) and source_file_filter(x)
+ x, block_list=exempt_paths) and source_file_filter(x)
for f in input_api.AffectedSourceFiles(file_filter):
if f.LocalPath().endswith('.h'):
file_path = os.path.abspath(f.LocalPath())
@@ -1061,7 +1133,7 @@ def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api, source_file_filter):
error_msg = 'File {} must end with exactly one newline.'
results = []
file_filter = lambda x: input_api.FilterSourceFile(
- x, white_list=(r'.+\.proto$',)) and source_file_filter(x)
+ x, allow_list=(r'.+\.proto$',)) and source_file_filter(x)
for f in input_api.AffectedSourceFiles(file_filter):
file_path = f.LocalPath()
with open(file_path) as f:
diff --git a/README.chromium b/README.chromium
index 246c13dc09..58c8da8403 100644
--- a/README.chromium
+++ b/README.chromium
@@ -1,13 +1,14 @@
-Name: WebRTC
-URL: http://www.webrtc.org
-Version: 90
-License: BSD
-License File: LICENSE
-
-Description:
-WebRTC provides real time voice and video processing
-functionality to enable the implementation of
-PeerConnection/MediaStream.
-
-Third party code used in this project is described
-in the file LICENSE_THIRD_PARTY.
+Name: WebRTC
+URL: http://www.webrtc.org
+Version: 90
+CPEPrefix: cpe:/a:webrtc_project:webrtc:90
+License: BSD
+License File: LICENSE
+
+Description:
+WebRTC provides real time voice and video processing
+functionality to enable the implementation of
+PeerConnection/MediaStream.
+
+Third party code used in this project is described
+in the file LICENSE_THIRD_PARTY.
diff --git a/abseil-in-webrtc.md b/abseil-in-webrtc.md
index 0541d3c7a5..da03af07b1 100644
--- a/abseil-in-webrtc.md
+++ b/abseil-in-webrtc.md
@@ -23,9 +23,11 @@ adds the first use.
* `absl::variant` and related stuff from `absl/types/variant.h`.
* The functions in `absl/algorithm/algorithm.h` and
`absl/algorithm/container.h`.
+* `absl/base/const_init.h` for mutex initialization.
* The macros in `absl/base/attributes.h`, `absl/base/config.h` and
`absl/base/macros.h`.
+
## **Disallowed**
### `absl::make_unique`
@@ -34,7 +36,7 @@ adds the first use.
### `absl::Mutex`
-*Use `rtc::CriticalSection` instead.*
+*Use `webrtc::Mutex` instead.*
Chromium has a ban on new static initializers, and `absl::Mutex` uses
one. To make `absl::Mutex` available, we would need to nicely ask the
@@ -61,3 +63,12 @@ has decided if they will change `absl::Span` to match.
These are optimized for speed, not binary size. Even `StrCat` calls
with a modest number of arguments can easily add several hundred bytes
to the binary.
+
+## How to depend on Abseil
+
+For build targets `rtc_library`, `rtc_source_set` and `rtc_static_library`,
+dependencies on Abseil need to be listed in `absl_deps` instead of `deps`.
+
+This is needed in order to support the Abseil component build in Chromium. In
+such build mode, WebRTC will depend on a unique Abseil build target what will
+generate a shared library.
diff --git a/api/BUILD.gn b/api/BUILD.gn
index 8d3ee8f460..0d4ba2ca46 100644
--- a/api/BUILD.gn
+++ b/api/BUILD.gn
@@ -71,8 +71,8 @@ rtc_library("rtp_headers") {
"..:webrtc_common",
"units:timestamp",
"video:video_rtp_headers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtp_packet_info") {
@@ -90,8 +90,8 @@ rtc_library("rtp_packet_info") {
"..:webrtc_common",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("media_stream_interface") {
@@ -111,8 +111,8 @@ rtc_library("media_stream_interface") {
"../rtc_base/system:rtc_export",
"video:recordable_encoded_frame",
"video:video_frame",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("libjingle_peerconnection_api") {
@@ -166,12 +166,14 @@ rtc_library("libjingle_peerconnection_api") {
":media_stream_interface",
":network_state_predictor_api",
":packet_socket_factory",
+ ":priority",
":rtc_error",
":rtc_stats_api",
":rtp_packet_info",
":rtp_parameters",
":rtp_transceiver_direction",
":scoped_refptr",
+ "adaptation:resource_adaptation_api",
"audio:audio_mixer_api",
"audio_codecs:audio_codecs_api",
"crypto:frame_decryptor_interface",
@@ -181,23 +183,15 @@ rtc_library("libjingle_peerconnection_api") {
"rtc_event_log",
"task_queue",
"transport:bitrate_settings",
- "transport:datagram_transport_interface",
"transport:enums",
"transport:network_control",
"transport:webrtc_key_value_config",
- "transport/media:audio_interfaces",
- "transport/media:media_transport_interface",
- "transport/media:video_interfaces",
"transport/rtp:rtp_source",
"units:data_rate",
"units:timestamp",
"video:encoded_image",
"video:video_frame",
"video:video_rtp_headers",
- "//third_party/abseil-cpp/absl/algorithm:container",
- "//third_party/abseil-cpp/absl/memory",
- "//third_party/abseil-cpp/absl/strings",
- "//third_party/abseil-cpp/absl/types:optional",
# Basically, don't add stuff here. You might break sensitive downstream
# targets like pnacl. API should not depend on anything outside of this
@@ -212,6 +206,12 @@ rtc_library("libjingle_peerconnection_api") {
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
rtc_source_set("frame_transformer_interface") {
@@ -221,6 +221,7 @@ rtc_source_set("frame_transformer_interface") {
":scoped_refptr",
"../rtc_base:refcount",
"video:encoded_frame",
+ "video:video_frame_metadata",
]
}
@@ -235,8 +236,8 @@ rtc_library("rtc_error") {
"../rtc_base:logging",
"../rtc_base:macromagic",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("packet_socket_factory") {
@@ -272,7 +273,6 @@ rtc_source_set("video_quality_test_fixture_api") {
"../test:video_test_common",
"transport:bitrate_settings",
"transport:network_control",
- "transport/media:media_transport_interface",
"video_codecs:video_codecs_api",
]
}
@@ -283,18 +283,23 @@ rtc_source_set("video_quality_analyzer_api") {
sources = [ "test/video_quality_analyzer_interface.h" ]
deps = [
+ ":array_view",
":stats_observer_interface",
"video:encoded_image",
"video:video_frame",
"video:video_rtp_headers",
"video_codecs:video_codecs_api",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
-rtc_source_set("track_id_stream_label_map") {
+rtc_source_set("track_id_stream_info_map") {
visibility = [ "*" ]
- sources = [ "test/track_id_stream_label_map.h" ]
+ sources = [ "test/track_id_stream_info_map.h" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_source_set("rtp_transceiver_direction") {
@@ -302,6 +307,10 @@ rtc_source_set("rtp_transceiver_direction") {
sources = [ "rtp_transceiver_direction.h" ]
}
+rtc_source_set("priority") {
+ sources = [ "priority.h" ]
+}
+
rtc_library("rtp_parameters") {
visibility = [ "*" ]
sources = [
@@ -312,18 +321,21 @@ rtc_library("rtp_parameters") {
]
deps = [
":array_view",
+ ":priority",
":rtp_transceiver_direction",
"../rtc_base:checks",
"../rtc_base:stringutils",
"../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
if (is_android) {
- java_cpp_enum("rtp_parameters_enums") {
- sources = [ "rtp_parameters.h" ]
+ java_cpp_enum("priority_enums") {
+ sources = [ "priority.h" ]
}
}
@@ -334,7 +346,7 @@ rtc_source_set("audio_quality_analyzer_api") {
deps = [
":stats_observer_interface",
- ":track_id_stream_label_map",
+ ":track_id_stream_info_map",
]
}
@@ -343,11 +355,9 @@ rtc_source_set("stats_observer_interface") {
testonly = true
sources = [ "test/stats_observer_interface.h" ]
- deps = [
- # For api/stats_types.h
- ":libjingle_peerconnection_api",
- ":rtp_parameters",
- ]
+ deps = [ ":rtc_stats_api" ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_source_set("peer_connection_quality_test_fixture_api") {
@@ -365,18 +375,21 @@ rtc_source_set("peer_connection_quality_test_fixture_api") {
":media_stream_interface",
":network_state_predictor_api",
":packet_socket_factory",
+ ":rtp_parameters",
":simulated_network_api",
":stats_observer_interface",
+ ":track_id_stream_info_map",
":video_quality_analyzer_api",
"../media:rtc_media_base",
"../rtc_base:rtc_base",
"rtc_event_log",
"task_queue",
"transport:network_control",
- "transport/media:media_transport_interface",
"units:time_delta",
"video:video_frame",
"video_codecs:video_codecs_api",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -391,8 +404,8 @@ rtc_source_set("frame_generator_api") {
deps = [
":scoped_refptr",
"video:video_frame",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("test_dependency_factory") {
@@ -405,7 +418,7 @@ rtc_library("test_dependency_factory") {
deps = [
":video_quality_test_fixture_api",
"../rtc_base:checks",
- "../rtc_base:thread_checker",
+ "../rtc_base:platform_thread_types",
]
}
@@ -451,6 +464,7 @@ if (rtc_include_tests) {
deps = [
":audio_quality_analyzer_api",
":peer_connection_quality_test_fixture_api",
+ ":time_controller",
":video_quality_analyzer_api",
"../test/pc/e2e:peerconnection_quality_test",
]
@@ -469,8 +483,8 @@ rtc_library("create_frame_generator") {
"../rtc_base:checks",
"../system_wrappers",
"../test:frame_generator_impl",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("create_peer_connection_quality_test_frame_generator") {
@@ -486,8 +500,8 @@ rtc_library("create_peer_connection_quality_test_frame_generator") {
":peer_connection_quality_test_fixture_api",
"../rtc_base:checks",
"../test:fileutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("libjingle_logging_api") {
@@ -540,8 +554,8 @@ rtc_library("audio_options_api") {
":array_view",
"../rtc_base:stringutils",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("transport_api") {
@@ -565,11 +579,8 @@ rtc_source_set("bitrate_allocation") {
rtc_source_set("simulated_network_api") {
visibility = [ "*" ]
sources = [ "test/simulated_network.h" ]
- deps = [
- "../rtc_base",
- "../rtc_base:criticalsection",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "../rtc_base" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
# TODO(srte): Move to network_emulation sub directory.
@@ -702,6 +713,8 @@ if (rtc_include_tests) {
"../modules/audio_coding:neteq_test_factory",
"../rtc_base:checks",
"neteq:neteq_api",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/strings",
@@ -852,6 +865,7 @@ if (rtc_include_tests) {
}
rtc_source_set("mock_peerconnectioninterface") {
+ visibility = [ "*" ]
testonly = true
sources = [ "test/mock_peerconnectioninterface.h" ]
@@ -861,6 +875,17 @@ if (rtc_include_tests) {
]
}
+ rtc_source_set("mock_peer_connection_factory_interface") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [ "test/mock_peer_connection_factory_interface.h" ]
+
+ deps = [
+ ":libjingle_peerconnection_api",
+ "../test:test_support",
+ ]
+ }
+
rtc_source_set("mock_rtp") {
testonly = true
sources = [
@@ -874,6 +899,16 @@ if (rtc_include_tests) {
]
}
+ rtc_source_set("mock_transformable_video_frame") {
+ testonly = true
+ sources = [ "test/mock_transformable_video_frame.h" ]
+
+ deps = [
+ ":frame_transformer_interface",
+ "../test:test_support",
+ ]
+ }
+
rtc_source_set("mock_video_bitrate_allocator") {
testonly = true
sources = [ "test/mock_video_bitrate_allocator.h" ]
@@ -931,39 +966,6 @@ if (rtc_include_tests) {
]
}
- rtc_source_set("fake_media_transport") {
- testonly = true
-
- sources = [
- "test/fake_datagram_transport.h",
- "test/fake_media_transport.h",
- ]
-
- deps = [
- "../rtc_base:checks",
- "transport:datagram_transport_interface",
- "transport/media:media_transport_interface",
- "//third_party/abseil-cpp/absl/algorithm:container",
- ]
- }
-
- rtc_library("loopback_media_transport") {
- testonly = true
-
- sources = [
- "test/loopback_media_transport.cc",
- "test/loopback_media_transport.h",
- ]
-
- deps = [
- "../rtc_base",
- "../rtc_base:checks",
- "transport:datagram_transport_interface",
- "transport/media:media_transport_interface",
- "//third_party/abseil-cpp/absl/algorithm:container",
- ]
- }
-
rtc_library("create_time_controller") {
visibility = [ "*" ]
testonly = true
@@ -994,7 +996,6 @@ if (rtc_include_tests) {
"rtp_parameters_unittest.cc",
"scoped_refptr_unittest.cc",
"test/create_time_controller_unittest.cc",
- "test/loopback_media_transport_unittest.cc",
]
deps = [
@@ -1002,7 +1003,6 @@ if (rtc_include_tests) {
":create_time_controller",
":function_view",
":libjingle_peerconnection_api",
- ":loopback_media_transport",
":rtc_error",
":rtc_event_log_output_file",
":rtp_packet_info",
@@ -1033,13 +1033,13 @@ if (rtc_include_tests) {
":dummy_peer_connection",
":fake_frame_decryptor",
":fake_frame_encryptor",
- ":fake_media_transport",
- ":loopback_media_transport",
":mock_audio_mixer",
":mock_frame_decryptor",
":mock_frame_encryptor",
+ ":mock_peer_connection_factory_interface",
":mock_peerconnectioninterface",
":mock_rtp",
+ ":mock_transformable_video_frame",
":mock_video_bitrate_allocator",
":mock_video_bitrate_allocator_factory",
":mock_video_codec_factory",
diff --git a/api/DEPS b/api/DEPS
index 1e92b12281..220b30b3cf 100644
--- a/api/DEPS
+++ b/api/DEPS
@@ -115,11 +115,6 @@ specific_include_rules = {
"+rtc_base/ref_count.h",
],
- "media_transport_interface\.h": [
- "+rtc_base/copy_on_write_buffer.h", # As used by datachannelinterface.h
- "+rtc_base/network_route.h",
- ],
-
"packet_socket_factory\.h": [
"+rtc_base/proxy_info.h",
"+rtc_base/async_packet_socket.h",
@@ -246,6 +241,10 @@ specific_include_rules = {
"+modules/audio_processing/include/audio_processing.h",
],
+ "echo_detector_creator\.h": [
+ "+modules/audio_processing/include/audio_processing.h",
+ ],
+
"fake_frame_decryptor\.h": [
"+rtc_base/ref_counted_object.h",
],
@@ -259,7 +258,6 @@ specific_include_rules = {
],
"simulated_network\.h": [
- "+rtc_base/critical_section.h",
"+rtc_base/random.h",
"+rtc_base/thread_annotations.h",
],
diff --git a/api/adaptation/BUILD.gn b/api/adaptation/BUILD.gn
new file mode 100644
index 0000000000..dc4c73711e
--- /dev/null
+++ b/api/adaptation/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved.
+#
+# Use of this source code is governed by a BSD - style license
+# that can be found in the LICENSE file in the root of the source
+# tree.An additional intellectual property rights grant can be found
+# in the file PATENTS.All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+rtc_source_set("resource_adaptation_api") {
+ visibility = [ "*" ]
+ sources = [
+ "resource.cc",
+ "resource.h",
+ ]
+ deps = [
+ "../../api:scoped_refptr",
+ "../../rtc_base:refcount",
+ "../../rtc_base:rtc_base_approved",
+ "../../rtc_base/system:rtc_export",
+ ]
+}
diff --git a/api/adaptation/DEPS b/api/adaptation/DEPS
new file mode 100644
index 0000000000..cab7fb8e14
--- /dev/null
+++ b/api/adaptation/DEPS
@@ -0,0 +1,7 @@
+specific_include_rules = {
+ "resource\.h": [
+ # ref_count.h is a public_deps of rtc_base_approved. Necessary because of
+ # rtc::RefCountInterface.
+ "+rtc_base/ref_count.h",
+ ],
+} \ No newline at end of file
diff --git a/api/adaptation/resource.cc b/api/adaptation/resource.cc
new file mode 100644
index 0000000000..0a9c83a311
--- /dev/null
+++ b/api/adaptation/resource.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2019 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/adaptation/resource.h"
+
+namespace webrtc {
+
+const char* ResourceUsageStateToString(ResourceUsageState usage_state) {
+ switch (usage_state) {
+ case ResourceUsageState::kOveruse:
+ return "kOveruse";
+ case ResourceUsageState::kUnderuse:
+ return "kUnderuse";
+ }
+}
+
+ResourceListener::~ResourceListener() {}
+
+Resource::Resource() {}
+
+Resource::~Resource() {}
+
+} // namespace webrtc
diff --git a/api/adaptation/resource.h b/api/adaptation/resource.h
new file mode 100644
index 0000000000..9b3968055f
--- /dev/null
+++ b/api/adaptation/resource.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2019 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_ADAPTATION_RESOURCE_H_
+#define API_ADAPTATION_RESOURCE_H_
+
+#include <string>
+
+#include "api/scoped_refptr.h"
+#include "rtc_base/ref_count.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+class Resource;
+
+enum class ResourceUsageState {
+ // Action is needed to minimze the load on this resource.
+ kOveruse,
+ // Increasing the load on this resource is desired, if possible.
+ kUnderuse,
+};
+
+RTC_EXPORT const char* ResourceUsageStateToString(
+ ResourceUsageState usage_state);
+
+class RTC_EXPORT ResourceListener {
+ public:
+ virtual ~ResourceListener();
+
+ virtual void OnResourceUsageStateMeasured(
+ rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) = 0;
+};
+
+// A Resource monitors an implementation-specific resource. It may report
+// kOveruse or kUnderuse when resource usage is high or low enough that we
+// should perform some sort of mitigation to fulfil the resource's constraints.
+//
+// The methods on this interface are invoked on the adaptation task queue.
+// Resource usage measurements may be performed on an any task queue.
+//
+// The Resource is reference counted to prevent use-after-free when posting
+// between task queues. As such, the implementation MUST NOT make any
+// assumptions about which task queue Resource is destructed on.
+class RTC_EXPORT Resource : public rtc::RefCountInterface {
+ public:
+ Resource();
+ // Destruction may happen on any task queue.
+ ~Resource() override;
+
+ virtual std::string Name() const = 0;
+ // The |listener| may be informed of resource usage measurements on any task
+ // queue, but not after this method is invoked with the null argument.
+ virtual void SetResourceListener(ResourceListener* listener) = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_ADAPTATION_RESOURCE_H_
diff --git a/api/array_view_unittest.cc b/api/array_view_unittest.cc
index 8aa858805f..0357f68aa2 100644
--- a/api/array_view_unittest.cc
+++ b/api/array_view_unittest.cc
@@ -38,7 +38,7 @@ void CallFixed(ArrayView<T, N> av) {}
} // namespace
-TEST(ArrayViewTest, TestConstructFromPtrAndArray) {
+TEST(ArrayViewDeathTest, TestConstructFromPtrAndArray) {
char arr[] = "Arrr!";
const char carr[] = "Carrr!";
EXPECT_EQ(6u, Call<const char>(arr));
@@ -409,7 +409,7 @@ TEST(FixArrayViewTest, TestSwapFixed) {
// swap(x, w); // Compile error, because different sizes.
}
-TEST(ArrayViewTest, TestIndexing) {
+TEST(ArrayViewDeathTest, TestIndexing) {
char arr[] = "abcdefg";
ArrayView<char> x(arr);
const ArrayView<char> y(arr);
diff --git a/api/audio/BUILD.gn b/api/audio/BUILD.gn
index 2405d9d041..117e5cc0ab 100644
--- a/api/audio/BUILD.gn
+++ b/api/audio/BUILD.gn
@@ -61,8 +61,8 @@ rtc_library("aec3_config_json") {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_json",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("aec3_factory") {
@@ -87,3 +87,17 @@ rtc_source_set("echo_control") {
sources = [ "echo_control.h" ]
deps = [ "../../rtc_base:checks" ]
}
+
+rtc_source_set("echo_detector_creator") {
+ visibility = [ "*" ]
+ sources = [
+ "echo_detector_creator.cc",
+ "echo_detector_creator.h",
+ ]
+ deps = [
+ "../../api:scoped_refptr",
+ "../../modules/audio_processing:api",
+ "../../modules/audio_processing:audio_processing",
+ "../../rtc_base:refcount",
+ ]
+}
diff --git a/api/audio/audio_frame.cc b/api/audio/audio_frame.cc
index 47459ac333..c6e5cf4dd6 100644
--- a/api/audio/audio_frame.cc
+++ b/api/audio/audio_frame.cc
@@ -11,6 +11,8 @@
#include "api/audio/audio_frame.h"
#include <string.h>
+#include <algorithm>
+#include <utility>
#include "rtc_base/checks.h"
#include "rtc_base/time_utils.h"
@@ -22,6 +24,28 @@ AudioFrame::AudioFrame() {
static_assert(sizeof(data_) == kMaxDataSizeBytes, "kMaxDataSizeBytes");
}
+void swap(AudioFrame& a, AudioFrame& b) {
+ using std::swap;
+ swap(a.timestamp_, b.timestamp_);
+ swap(a.elapsed_time_ms_, b.elapsed_time_ms_);
+ swap(a.ntp_time_ms_, b.ntp_time_ms_);
+ swap(a.samples_per_channel_, b.samples_per_channel_);
+ swap(a.sample_rate_hz_, b.sample_rate_hz_);
+ swap(a.num_channels_, b.num_channels_);
+ swap(a.channel_layout_, b.channel_layout_);
+ swap(a.speech_type_, b.speech_type_);
+ swap(a.vad_activity_, b.vad_activity_);
+ swap(a.profile_timestamp_ms_, b.profile_timestamp_ms_);
+ swap(a.packet_infos_, b.packet_infos_);
+ const size_t length_a = a.samples_per_channel_ * a.num_channels_;
+ const size_t length_b = b.samples_per_channel_ * b.num_channels_;
+ RTC_DCHECK_LE(length_a, AudioFrame::kMaxDataSizeSamples);
+ RTC_DCHECK_LE(length_b, AudioFrame::kMaxDataSizeSamples);
+ std::swap_ranges(a.data_, a.data_ + std::max(length_a, length_b), b.data_);
+ swap(a.muted_, b.muted_);
+ swap(a.absolute_capture_timestamp_ms_, b.absolute_capture_timestamp_ms_);
+}
+
void AudioFrame::Reset() {
ResetWithoutMuting();
muted_ = true;
diff --git a/api/audio/audio_frame.h b/api/audio/audio_frame.h
index 06b0b28b38..78539f57eb 100644
--- a/api/audio/audio_frame.h
+++ b/api/audio/audio_frame.h
@@ -14,6 +14,8 @@
#include <stddef.h>
#include <stdint.h>
+#include <utility>
+
#include "api/audio/channel_layout.h"
#include "api/rtp_packet_infos.h"
#include "rtc_base/constructor_magic.h"
@@ -58,6 +60,8 @@ class AudioFrame {
AudioFrame();
+ friend void swap(AudioFrame& a, AudioFrame& b);
+
// Resets all members to their default state.
void Reset();
// Same as Reset(), but leaves mute state unchanged. Muting a frame requires
diff --git a/api/audio/echo_detector_creator.cc b/api/audio/echo_detector_creator.cc
new file mode 100644
index 0000000000..4c3d9e61fe
--- /dev/null
+++ b/api/audio/echo_detector_creator.cc
@@ -0,0 +1,21 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/audio/echo_detector_creator.h"
+
+#include "modules/audio_processing/residual_echo_detector.h"
+#include "rtc_base/ref_counted_object.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<EchoDetector> CreateEchoDetector() {
+ return new rtc::RefCountedObject<ResidualEchoDetector>();
+}
+
+} // namespace webrtc
diff --git a/api/audio/echo_detector_creator.h b/api/audio/echo_detector_creator.h
new file mode 100644
index 0000000000..5ba171de97
--- /dev/null
+++ b/api/audio/echo_detector_creator.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_AUDIO_ECHO_DETECTOR_CREATOR_H_
+#define API_AUDIO_ECHO_DETECTOR_CREATOR_H_
+
+#include "api/scoped_refptr.h"
+#include "modules/audio_processing/include/audio_processing.h"
+
+namespace webrtc {
+
+// Returns an instance of the WebRTC implementation of a residual echo detector.
+// It can be provided to the webrtc::AudioProcessingBuilder to obtain the
+// usual residual echo metrics.
+rtc::scoped_refptr<EchoDetector> CreateEchoDetector();
+
+} // namespace webrtc
+
+#endif // API_AUDIO_ECHO_DETECTOR_CREATOR_H_
diff --git a/api/audio/test/audio_frame_unittest.cc b/api/audio/test/audio_frame_unittest.cc
index dbf45ceabc..f8d3318274 100644
--- a/api/audio/test/audio_frame_unittest.cc
+++ b/api/audio/test/audio_frame_unittest.cc
@@ -133,4 +133,54 @@ TEST(AudioFrameTest, CopyFrom) {
EXPECT_EQ(0, memcmp(frame2.data(), frame1.data(), sizeof(samples)));
}
+TEST(AudioFrameTest, SwapFrames) {
+ AudioFrame frame1, frame2;
+ int16_t samples1[kNumChannelsMono * kSamplesPerChannel];
+ for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) {
+ samples1[i] = i;
+ }
+ frame1.UpdateFrame(kTimestamp, samples1, kSamplesPerChannel, kSampleRateHz,
+ AudioFrame::kPLC, AudioFrame::kVadActive,
+ kNumChannelsMono);
+ frame1.set_absolute_capture_timestamp_ms(12345678);
+ const auto frame1_channel_layout = frame1.channel_layout();
+
+ int16_t samples2[(kNumChannelsMono + 1) * (kSamplesPerChannel + 1)];
+ for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1);
+ ++i) {
+ samples2[i] = 1000 + i;
+ }
+ frame2.UpdateFrame(kTimestamp + 1, samples2, kSamplesPerChannel + 1,
+ kSampleRateHz + 1, AudioFrame::kNormalSpeech,
+ AudioFrame::kVadPassive, kNumChannelsMono + 1);
+ const auto frame2_channel_layout = frame2.channel_layout();
+
+ swap(frame1, frame2);
+
+ EXPECT_EQ(kTimestamp + 1, frame1.timestamp_);
+ ASSERT_EQ(kSamplesPerChannel + 1, frame1.samples_per_channel_);
+ EXPECT_EQ(kSampleRateHz + 1, frame1.sample_rate_hz_);
+ EXPECT_EQ(AudioFrame::kNormalSpeech, frame1.speech_type_);
+ EXPECT_EQ(AudioFrame::kVadPassive, frame1.vad_activity_);
+ ASSERT_EQ(kNumChannelsMono + 1, frame1.num_channels_);
+ for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1);
+ ++i) {
+ EXPECT_EQ(samples2[i], frame1.data()[i]);
+ }
+ EXPECT_FALSE(frame1.absolute_capture_timestamp_ms());
+ EXPECT_EQ(frame2_channel_layout, frame1.channel_layout());
+
+ EXPECT_EQ(kTimestamp, frame2.timestamp_);
+ ASSERT_EQ(kSamplesPerChannel, frame2.samples_per_channel_);
+ EXPECT_EQ(kSampleRateHz, frame2.sample_rate_hz_);
+ EXPECT_EQ(AudioFrame::kPLC, frame2.speech_type_);
+ EXPECT_EQ(AudioFrame::kVadActive, frame2.vad_activity_);
+ ASSERT_EQ(kNumChannelsMono, frame2.num_channels_);
+ for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) {
+ EXPECT_EQ(samples1[i], frame2.data()[i]);
+ }
+ EXPECT_EQ(12345678, frame2.absolute_capture_timestamp_ms());
+ EXPECT_EQ(frame1_channel_layout, frame2.channel_layout());
+}
+
} // namespace webrtc
diff --git a/api/audio_codecs/BUILD.gn b/api/audio_codecs/BUILD.gn
index 987e20f178..b6292de570 100644
--- a/api/audio_codecs/BUILD.gn
+++ b/api/audio_codecs/BUILD.gn
@@ -38,6 +38,8 @@ rtc_library("audio_codecs_api") {
"../../rtc_base:sanitizer",
"../../rtc_base/system:rtc_export",
"../units:time_delta",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/api/audio_codecs/L16/BUILD.gn b/api/audio_codecs/L16/BUILD.gn
index bef671237e..1f7a1e5a0b 100644
--- a/api/audio_codecs/L16/BUILD.gn
+++ b/api/audio_codecs/L16/BUILD.gn
@@ -25,6 +25,8 @@ rtc_library("audio_encoder_L16") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -42,6 +44,8 @@ rtc_library("audio_decoder_L16") {
"../../../modules/audio_coding:pcm16b",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/api/audio_codecs/g711/BUILD.gn b/api/audio_codecs/g711/BUILD.gn
index ba0586b901..92d77bed9f 100644
--- a/api/audio_codecs/g711/BUILD.gn
+++ b/api/audio_codecs/g711/BUILD.gn
@@ -25,6 +25,8 @@ rtc_library("audio_encoder_g711") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -42,6 +44,8 @@ rtc_library("audio_decoder_g711") {
"../../../modules/audio_coding:g711",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/api/audio_codecs/g722/BUILD.gn b/api/audio_codecs/g722/BUILD.gn
index 8738ef889a..a186eabbb7 100644
--- a/api/audio_codecs/g722/BUILD.gn
+++ b/api/audio_codecs/g722/BUILD.gn
@@ -31,6 +31,8 @@ rtc_library("audio_encoder_g722") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -48,6 +50,8 @@ rtc_library("audio_decoder_g722") {
"../../../modules/audio_coding:g722",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/api/audio_codecs/ilbc/BUILD.gn b/api/audio_codecs/ilbc/BUILD.gn
index 066a73cff2..b6a5045eaf 100644
--- a/api/audio_codecs/ilbc/BUILD.gn
+++ b/api/audio_codecs/ilbc/BUILD.gn
@@ -30,6 +30,8 @@ rtc_library("audio_encoder_ilbc") {
"../../../modules/audio_coding:ilbc",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -46,6 +48,8 @@ rtc_library("audio_decoder_ilbc") {
"..:audio_codecs_api",
"../../../modules/audio_coding:ilbc",
"../../../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/api/audio_codecs/isac/BUILD.gn b/api/audio_codecs/isac/BUILD.gn
index 9eb32147e1..6ff6e5f092 100644
--- a/api/audio_codecs/isac/BUILD.gn
+++ b/api/audio_codecs/isac/BUILD.gn
@@ -68,6 +68,8 @@ rtc_library("audio_encoder_isac_fix") {
"../../../modules/audio_coding:isac_fix",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -85,6 +87,8 @@ rtc_library("audio_decoder_isac_fix") {
"../../../modules/audio_coding:isac_fix",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -102,6 +106,8 @@ rtc_library("audio_encoder_isac_float") {
"../../../modules/audio_coding:isac",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -119,6 +125,8 @@ rtc_library("audio_decoder_isac_float") {
"../../../modules/audio_coding:isac",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/api/audio_codecs/opus/BUILD.gn b/api/audio_codecs/opus/BUILD.gn
index 5fb626d990..586e9b3dd8 100644
--- a/api/audio_codecs/opus/BUILD.gn
+++ b/api/audio_codecs/opus/BUILD.gn
@@ -23,8 +23,8 @@ rtc_library("audio_encoder_opus_config") {
deps = [
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
if (rtc_opus_variable_complexity) {
defines += [ "WEBRTC_OPUS_VARIABLE_COMPLEXITY=1" ]
@@ -49,6 +49,8 @@ rtc_library("audio_encoder_opus") {
"../../../modules/audio_coding:webrtc_opus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -66,6 +68,8 @@ rtc_library("audio_decoder_opus") {
"../../../modules/audio_coding:webrtc_opus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -82,8 +86,8 @@ rtc_library("audio_encoder_multiopus") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
"../opus:audio_encoder_opus_config",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_decoder_multiopus") {
@@ -99,6 +103,8 @@ rtc_library("audio_decoder_multiopus") {
"../../../modules/audio_coding:webrtc_multiopus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
diff --git a/api/audio_options.h b/api/audio_options.h
index b714998c6b..1b0d1ad0bd 100644
--- a/api/audio_options.h
+++ b/api/audio_options.h
@@ -75,6 +75,8 @@ struct RTC_EXPORT AudioOptions {
// and check if any other AudioOptions members are unused.
absl::optional<bool> combined_audio_video_bwe;
// Enable audio network adaptor.
+ // TODO(webrtc:11717): Remove this API in favor of adaptivePtime in
+ // RtpEncodingParameters.
absl::optional<bool> audio_network_adaptor;
// Config string for audio network adaptor.
absl::optional<std::string> audio_network_adaptor_config;
diff --git a/api/data_channel_interface.h b/api/data_channel_interface.h
index e08830feaf..5b2b1263ab 100644
--- a/api/data_channel_interface.h
+++ b/api/data_channel_interface.h
@@ -20,6 +20,7 @@
#include <string>
#include "absl/types/optional.h"
+#include "api/priority.h"
#include "api/rtc_error.h"
#include "rtc_base/checks.h"
#include "rtc_base/copy_on_write_buffer.h"
@@ -61,6 +62,9 @@ struct DataChannelInit {
// The stream id, or SID, for SCTP data channels. -1 if unset (see above).
int id = -1;
+
+ // https://w3c.github.io/webrtc-priority/#new-rtcdatachannelinit-member
+ absl::optional<Priority> priority;
};
// At the JavaScript level, data can be passed in as a string or a blob, so
@@ -154,6 +158,7 @@ class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface {
// If negotiated in-band, this ID will be populated once the DTLS role is
// determined, and until then this will return -1.
virtual int id() const = 0;
+ virtual Priority priority() const { return Priority::kLow; }
virtual DataState state() const = 0;
// When state is kClosed, and the DataChannel was not closed using
// the closing procedure, returns the error information about the closing.
diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h
index e712b3c190..2cfe6edb88 100644
--- a/api/frame_transformer_interface.h
+++ b/api/frame_transformer_interface.h
@@ -16,6 +16,7 @@
#include "api/scoped_refptr.h"
#include "api/video/encoded_frame.h"
+#include "api/video/video_frame_metadata.h"
#include "rtc_base/ref_count.h"
namespace webrtc {
@@ -48,6 +49,8 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface {
// TODO(bugs.webrtc.org/11380) remove from interface once
// webrtc::RtpDescriptorAuthentication is exposed in api/.
virtual std::vector<uint8_t> GetAdditionalData() const = 0;
+
+ virtual const VideoFrameMetadata& GetMetadata() const = 0;
};
// Extends the TransformableFrameInterface to expose audio-specific information.
diff --git a/api/neteq/BUILD.gn b/api/neteq/BUILD.gn
index 1ab02ec92b..4e85c4d268 100644
--- a/api/neteq/BUILD.gn
+++ b/api/neteq/BUILD.gn
@@ -23,8 +23,8 @@ rtc_source_set("neteq_api") {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:system_wrappers",
"../audio_codecs:audio_codecs_api",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("custom_neteq_factory") {
@@ -56,8 +56,8 @@ rtc_source_set("neteq_controller_api") {
":tick_timer",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("default_neteq_controller_factory") {
diff --git a/api/neteq/neteq.cc b/api/neteq/neteq.cc
index 155ddf2cf3..e8ef4dbd39 100644
--- a/api/neteq/neteq.cc
+++ b/api/neteq/neteq.cc
@@ -30,7 +30,8 @@ std::string NetEq::Config::ToString() const {
<< ", min_delay_ms=" << min_delay_ms << ", enable_fast_accelerate="
<< (enable_fast_accelerate ? "true" : "false")
<< ", enable_muted_state=" << (enable_muted_state ? "true" : "false")
- << ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false");
+ << ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false")
+ << ", extra_output_delay_ms=" << extra_output_delay_ms;
return ss.str();
}
diff --git a/api/neteq/neteq.h b/api/neteq/neteq.h
index f62d3795f0..15ad3aac0e 100644
--- a/api/neteq/neteq.h
+++ b/api/neteq/neteq.h
@@ -138,6 +138,10 @@ class NetEq {
bool enable_rtx_handling = false;
absl::optional<AudioCodecPairId> codec_pair_id;
bool for_test_no_time_stretching = false; // Use only for testing.
+ // Adds extra delay to the output of NetEq, without affecting jitter or
+ // loss behavior. This is mainly for testing. Value must be a non-negative
+ // multiple of 10 ms.
+ int extra_output_delay_ms = 0;
};
enum ReturnCodes { kOK = 0, kFail = -1 };
diff --git a/api/peer_connection_interface.cc b/api/peer_connection_interface.cc
index 0c25405784..f82e84b80f 100644
--- a/api/peer_connection_interface.cc
+++ b/api/peer_connection_interface.cc
@@ -53,27 +53,6 @@ RTCError PeerConnectionInterface::SetConfiguration(
return RTCError();
}
-RTCError PeerConnectionInterface::SetBitrate(const BitrateSettings& bitrate) {
- BitrateParameters bitrate_parameters;
- bitrate_parameters.min_bitrate_bps = bitrate.min_bitrate_bps;
- bitrate_parameters.current_bitrate_bps = bitrate.start_bitrate_bps;
- bitrate_parameters.max_bitrate_bps = bitrate.max_bitrate_bps;
- return SetBitrate(bitrate_parameters);
-}
-
-RTCError PeerConnectionInterface::SetBitrate(
- const BitrateParameters& bitrate_parameters) {
- BitrateSettings bitrate;
- bitrate.min_bitrate_bps = bitrate_parameters.min_bitrate_bps;
- bitrate.start_bitrate_bps = bitrate_parameters.current_bitrate_bps;
- bitrate.max_bitrate_bps = bitrate_parameters.max_bitrate_bps;
- return SetBitrate(bitrate);
-}
-
-PeerConnectionInterface::BitrateParameters::BitrateParameters() = default;
-
-PeerConnectionInterface::BitrateParameters::~BitrateParameters() = default;
-
PeerConnectionDependencies::PeerConnectionDependencies(
PeerConnectionObserver* observer_in)
: observer(observer_in) {}
diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h
index 1d81de74d8..fd4d2df6a7 100644
--- a/api/peer_connection_interface.h
+++ b/api/peer_connection_interface.h
@@ -73,6 +73,7 @@
#include <string>
#include <vector>
+#include "api/adaptation/resource.h"
#include "api/async_resolver_factory.h"
#include "api/audio/audio_mixer.h"
#include "api/audio_codecs/audio_decoder_factory.h"
@@ -102,7 +103,6 @@
#include "api/task_queue/task_queue_factory.h"
#include "api/transport/bitrate_settings.h"
#include "api/transport/enums.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/transport/webrtc_key_value_config.h"
#include "api/turn_customizer.h"
@@ -613,34 +613,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
// correctly. This flag will be deprecated soon. Do not rely on it.
bool active_reset_srtp_params = false;
- // DEPRECATED. Do not use. This option is ignored by peer connection.
- // TODO(webrtc:9719): Delete this option.
- bool use_media_transport = false;
-
- // DEPRECATED. Do not use. This option is ignored by peer connection.
- // TODO(webrtc:9719): Delete this option.
- bool use_media_transport_for_data_channels = false;
-
- // If MediaTransportFactory is provided in PeerConnectionFactory, this flag
- // informs PeerConnection that it should use the DatagramTransportInterface
- // for packets instead DTLS. It's invalid to set it to |true| if the
- // MediaTransportFactory wasn't provided.
- absl::optional<bool> use_datagram_transport;
-
- // If MediaTransportFactory is provided in PeerConnectionFactory, this flag
- // informs PeerConnection that it should use the DatagramTransport's
- // implementation of DataChannelTransportInterface for data channels instead
- // of SCTP-DTLS.
- absl::optional<bool> use_datagram_transport_for_data_channels;
-
- // If true, this PeerConnection will only use datagram transport for data
- // channels when receiving an incoming offer that includes datagram
- // transport parameters. It will not request use of a datagram transport
- // when it creates the initial, outgoing offer.
- // This setting only applies when |use_datagram_transport_for_data_channels|
- // is true.
- absl::optional<bool> use_datagram_transport_for_data_channels_receive_only;
-
// Defines advanced optional cryptographic settings related to SRTP and
// frame encryption for native WebRTC. Setting this will overwrite any
// settings set in PeerConnectionFactory (which is deprecated).
@@ -666,8 +638,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
// Whether network condition based codec switching is allowed.
absl::optional<bool> allow_codec_switching;
- bool enable_simulcast_stats = true;
-
//
// Don't forget to update operator== if adding something.
//
@@ -1045,28 +1015,13 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
virtual bool RemoveIceCandidates(
const std::vector<cricket::Candidate>& candidates) = 0;
- // 0 <= min <= current <= max should hold for set parameters.
- struct BitrateParameters {
- BitrateParameters();
- ~BitrateParameters();
-
- absl::optional<int> min_bitrate_bps;
- absl::optional<int> current_bitrate_bps;
- absl::optional<int> max_bitrate_bps;
- };
-
// SetBitrate limits the bandwidth allocated for all RTP streams sent by
// this PeerConnection. Other limitations might affect these limits and
// are respected (for example "b=AS" in SDP).
//
// Setting |current_bitrate_bps| will reset the current bitrate estimate
// to the provided value.
- virtual RTCError SetBitrate(const BitrateSettings& bitrate);
-
- // TODO(nisse): Deprecated - use version above. These two default
- // implementations require subclasses to implement one or the other
- // of the methods.
- virtual RTCError SetBitrate(const BitrateParameters& bitrate_parameters);
+ virtual RTCError SetBitrate(const BitrateSettings& bitrate) = 0;
// Enable/disable playout of received audio streams. Enabled by default. Note
// that even if playout is enabled, streams will only be played out if the
@@ -1118,6 +1073,14 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
return absl::nullopt;
}
+ // When a resource is overused, the PeerConnection will try to reduce the load
+ // on the sysem, for example by reducing the resolution or frame rate of
+ // encoded streams. The Resource API allows injecting platform-specific usage
+ // measurements. The conditions to trigger kOveruse or kUnderuse are up to the
+ // implementation.
+ // TODO(hbos): Make pure virtual when implemented by downstream projects.
+ virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) {}
+
// Start RtcEventLog using an existing output-sink. Takes ownership of
// |output| and passes it on to Call, which will take the ownership. If the
// operation fails the output will be closed and deallocated. The event log
@@ -1332,7 +1295,6 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final {
std::unique_ptr<NetworkStatePredictorFactoryInterface>
network_state_predictor_factory;
std::unique_ptr<NetworkControllerFactoryInterface> network_controller_factory;
- std::unique_ptr<MediaTransportFactory> media_transport_factory;
std::unique_ptr<NetEqFactory> neteq_factory;
std::unique_ptr<WebRtcKeyValueConfig> trials;
};
diff --git a/api/peer_connection_proxy.h b/api/peer_connection_proxy.h
index c278308ccb..23887e53da 100644
--- a/api/peer_connection_proxy.h
+++ b/api/peer_connection_proxy.h
@@ -132,6 +132,7 @@ PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state)
PROXY_METHOD0(PeerConnectionState, peer_connection_state)
PROXY_METHOD0(IceGatheringState, ice_gathering_state)
PROXY_METHOD0(absl::optional<bool>, can_trickle_ice_candidates)
+PROXY_METHOD1(void, AddAdaptationResource, rtc::scoped_refptr<Resource>)
PROXY_METHOD2(bool,
StartRtcEventLog,
std::unique_ptr<RtcEventLogOutput>,
diff --git a/test/fuzzers/rtp_rtcp_demuxer_helper_fuzzer.cc b/api/priority.h
index f7403b9567..4953e453a3 100644
--- a/test/fuzzers/rtp_rtcp_demuxer_helper_fuzzer.cc
+++ b/api/priority.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,16 +8,19 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include <stddef.h>
-#include <stdint.h>
-
-#include "api/array_view.h"
-#include "call/rtp_rtcp_demuxer_helper.h"
+#ifndef API_PRIORITY_H_
+#define API_PRIORITY_H_
namespace webrtc {
-void FuzzOneInput(const uint8_t* data, size_t size) {
- ParseRtcpPacketSenderSsrc(rtc::MakeArrayView(data, size));
-}
+// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
+enum class Priority {
+ kVeryLow,
+ kLow,
+ kMedium,
+ kHigh,
+};
} // namespace webrtc
+
+#endif // API_PRIORITY_H_
diff --git a/api/proxy.h b/api/proxy.h
index 385992e659..b1ebe31acd 100644
--- a/api/proxy.h
+++ b/api/proxy.h
@@ -55,6 +55,7 @@
#include <memory>
#include <string>
#include <tuple>
+#include <type_traits>
#include <utility>
#include "api/scoped_refptr.h"
@@ -396,6 +397,16 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler {
return call.Marshal(RTC_FROM_HERE, worker_thread_); \
}
+// For use when returning purely const state (set during construction).
+// Use with caution. This method should only be used when the return value will
+// always be the same.
+#define BYPASS_PROXY_CONSTMETHOD0(r, method) \
+ r method() const override { \
+ static_assert(!std::is_pointer<r>::value, "Type is a pointer"); \
+ static_assert(!std::is_reference<r>::value, "Type is a reference"); \
+ return c_->method(); \
+ }
+
} // namespace webrtc
#endif // API_PROXY_H_
diff --git a/api/rtc_event_log_output_file_unittest.cc b/api/rtc_event_log_output_file_unittest.cc
index 071909b2c5..4274215491 100644
--- a/api/rtc_event_log_output_file_unittest.cc
+++ b/api/rtc_event_log_output_file_unittest.cc
@@ -141,14 +141,16 @@ TEST_F(RtcEventLogOutputFileTest, AllowReasonableFileSizeLimits) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(RtcEventLogOutputFileTest, WritingToInactiveFileForbidden) {
+class RtcEventLogOutputFileDeathTest : public RtcEventLogOutputFileTest {};
+
+TEST_F(RtcEventLogOutputFileDeathTest, WritingToInactiveFileForbidden) {
RtcEventLogOutputFile output_file(output_file_name_, 2);
ASSERT_FALSE(output_file.Write("abc"));
ASSERT_FALSE(output_file.IsActive());
EXPECT_DEATH(output_file.Write("abc"), "");
}
-TEST_F(RtcEventLogOutputFileTest, DisallowUnreasonableFileSizeLimits) {
+TEST_F(RtcEventLogOutputFileDeathTest, DisallowUnreasonableFileSizeLimits) {
// Keeping in a temporary unique_ptr to make it clearer that the death is
// triggered by construction, not destruction.
std::unique_ptr<RtcEventLogOutputFile> output_file;
diff --git a/api/rtp_headers.cc b/api/rtp_headers.cc
index bf973b6fe5..e0ad9eb26e 100644
--- a/api/rtp_headers.cc
+++ b/api/rtp_headers.cc
@@ -26,9 +26,7 @@ RTPHeaderExtension::RTPHeaderExtension()
videoRotation(kVideoRotation_0),
hasVideoContentType(false),
videoContentType(VideoContentType::UNSPECIFIED),
- has_video_timing(false),
- has_frame_marking(false),
- frame_marking({false, false, false, false, false, 0xFF, 0, 0}) {}
+ has_video_timing(false) {}
RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) =
default;
diff --git a/api/rtp_headers.h b/api/rtp_headers.h
index 163347f675..454149ca6e 100644
--- a/api/rtp_headers.h
+++ b/api/rtp_headers.h
@@ -21,10 +21,9 @@
#include "api/units/timestamp.h"
#include "api/video/color_space.h"
#include "api/video/video_content_type.h"
-#include "api/video/video_frame_marking.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
-#include "common_types.h" // NOLINT(build/include)
+#include "common_types.h" // NOLINT (build/include)
namespace webrtc {
@@ -143,9 +142,6 @@ struct RTPHeaderExtension {
bool has_video_timing;
VideoSendTiming video_timing;
- bool has_frame_marking;
- FrameMarking frame_marking;
-
PlayoutDelay playout_delay = {-1, -1};
// For identification of a stream when ssrc is not signaled. See
diff --git a/api/rtp_parameters.cc b/api/rtp_parameters.cc
index a05b2bfa7b..28acb68be6 100644
--- a/api/rtp_parameters.cc
+++ b/api/rtp_parameters.cc
@@ -18,6 +18,20 @@
namespace webrtc {
+const char* DegradationPreferenceToString(
+ DegradationPreference degradation_preference) {
+ switch (degradation_preference) {
+ case DegradationPreference::DISABLED:
+ return "disabled";
+ case DegradationPreference::MAINTAIN_FRAMERATE:
+ return "maintain-framerate";
+ case DegradationPreference::MAINTAIN_RESOLUTION:
+ return "maintain-resolution";
+ case DegradationPreference::BALANCED:
+ return "balanced";
+ }
+}
+
const double kDefaultBitratePriority = 1.0;
RtcpFeedback::RtcpFeedback() = default;
@@ -105,7 +119,6 @@ constexpr char RtpExtension::kAbsoluteCaptureTimeUri[];
constexpr char RtpExtension::kVideoRotationUri[];
constexpr char RtpExtension::kVideoContentTypeUri[];
constexpr char RtpExtension::kVideoTimingUri[];
-constexpr char RtpExtension::kFrameMarkingUri[];
constexpr char RtpExtension::kGenericFrameDescriptorUri00[];
constexpr char RtpExtension::kDependencyDescriptorUri[];
constexpr char RtpExtension::kTransportSequenceNumberUri[];
@@ -144,7 +157,6 @@ bool RtpExtension::IsSupportedForVideo(absl::string_view uri) {
uri == webrtc::RtpExtension::kVideoContentTypeUri ||
uri == webrtc::RtpExtension::kVideoTimingUri ||
uri == webrtc::RtpExtension::kMidUri ||
- uri == webrtc::RtpExtension::kFrameMarkingUri ||
uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00 ||
uri == webrtc::RtpExtension::kDependencyDescriptorUri ||
uri == webrtc::RtpExtension::kColorSpaceUri ||
diff --git a/api/rtp_parameters.h b/api/rtp_parameters.h
index 49c1e0c885..b667bf812c 100644
--- a/api/rtp_parameters.h
+++ b/api/rtp_parameters.h
@@ -20,6 +20,7 @@
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/media_types.h"
+#include "api/priority.h"
#include "api/rtp_transceiver_direction.h"
#include "rtc_base/system/rtc_export.h"
@@ -91,15 +92,10 @@ enum class DegradationPreference {
BALANCED,
};
-RTC_EXPORT extern const double kDefaultBitratePriority;
+RTC_EXPORT const char* DegradationPreferenceToString(
+ DegradationPreference degradation_preference);
-// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
-enum class Priority {
- kVeryLow,
- kLow,
- kMedium,
- kHigh,
-};
+RTC_EXPORT extern const double kDefaultBitratePriority;
struct RTC_EXPORT RtcpFeedback {
RtcpFeedbackType type = RtcpFeedbackType::CCM;
@@ -226,7 +222,7 @@ struct RTC_EXPORT RtpHeaderExtensionCapability {
bool preferred_encrypt = false;
// The direction of the extension. The kStopped value is only used with
- // RtpTransceiverInterface::header_extensions_offered() and
+ // RtpTransceiverInterface::HeaderExtensionsToOffer() and
// SetOfferedRtpHeaderExtensions().
RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv;
@@ -314,10 +310,6 @@ struct RTC_EXPORT RtpExtension {
static constexpr char kVideoTimingUri[] =
"http://www.webrtc.org/experiments/rtp-hdrext/video-timing";
- // Header extension for video frame marking.
- static constexpr char kFrameMarkingUri[] =
- "http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07";
-
// Experimental codec agnostic frame descriptor.
static constexpr char kGenericFrameDescriptorUri00[] =
"http://www.webrtc.org/experiments/rtp-hdrext/"
@@ -481,6 +473,10 @@ struct RTC_EXPORT RtpEncodingParameters {
// Called "encodingId" in ORTC.
std::string rid;
+ // Allow dynamic frame length changes for audio:
+ // https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime
+ bool adaptive_ptime = false;
+
bool operator==(const RtpEncodingParameters& o) const {
return ssrc == o.ssrc && bitrate_priority == o.bitrate_priority &&
network_priority == o.network_priority &&
@@ -489,7 +485,8 @@ struct RTC_EXPORT RtpEncodingParameters {
max_framerate == o.max_framerate &&
num_temporal_layers == o.num_temporal_layers &&
scale_resolution_down_by == o.scale_resolution_down_by &&
- active == o.active && rid == o.rid;
+ active == o.active && rid == o.rid &&
+ adaptive_ptime == o.adaptive_ptime;
}
bool operator!=(const RtpEncodingParameters& o) const {
return !(*this == o);
diff --git a/api/rtp_transceiver_interface.cc b/api/rtp_transceiver_interface.cc
index d4e2b26e33..e795e51dfb 100644
--- a/api/rtp_transceiver_interface.cc
+++ b/api/rtp_transceiver_interface.cc
@@ -41,4 +41,10 @@ RtpTransceiverInterface::HeaderExtensionsToOffer() const {
return {};
}
+webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions(
+ rtc::ArrayView<const RtpHeaderExtensionCapability>
+ header_extensions_to_offer) {
+ return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION);
+}
+
} // namespace webrtc
diff --git a/api/rtp_transceiver_interface.h b/api/rtp_transceiver_interface.h
index 9dbafd46ec..13277d9a50 100644
--- a/api/rtp_transceiver_interface.h
+++ b/api/rtp_transceiver_interface.h
@@ -133,6 +133,13 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface {
virtual std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToOffer()
const;
+ // The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation
+ // so that it negotiates use of header extensions which are not kStopped.
+ // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface
+ virtual webrtc::RTCError SetOfferedRtpHeaderExtensions(
+ rtc::ArrayView<const RtpHeaderExtensionCapability>
+ header_extensions_to_offer);
+
protected:
~RtpTransceiverInterface() override = default;
};
diff --git a/api/stats/rtc_stats.h b/api/stats/rtc_stats.h
index d45902e0a5..5de5b7fbb0 100644
--- a/api/stats/rtc_stats.h
+++ b/api/stats/rtc_stats.h
@@ -319,6 +319,14 @@ class RTCStatsMember : public RTCStatsMemberInterface {
std::string ValueToString() const override;
std::string ValueToJson() const override;
+ template <typename U>
+ inline T ValueOrDefault(U default_value) const {
+ if (is_defined()) {
+ return *(*this);
+ }
+ return default_value;
+ }
+
// Assignment operators.
T& operator=(const T& value) {
value_ = value;
diff --git a/api/stats/rtcstats_objects.h b/api/stats/rtcstats_objects.h
index 28d841db09..7d8f5f5f9a 100644
--- a/api/stats/rtcstats_objects.h
+++ b/api/stats/rtcstats_objects.h
@@ -134,7 +134,7 @@ class RTC_EXPORT RTCDataChannelStats final : public RTCStats {
RTCStatsMember<std::string> label;
RTCStatsMember<std::string> protocol;
- RTCStatsMember<int32_t> datachannelid;
+ RTCStatsMember<int32_t> data_channel_identifier;
// TODO(hbos): Support enum types? "RTCStatsMember<RTCDataChannelState>"?
RTCStatsMember<std::string> state;
RTCStatsMember<uint32_t> messages_sent;
@@ -419,6 +419,18 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats {
// TODO(hbos): Collect and populate this value for both "audio" and "video",
// currently not collected for "video". https://bugs.webrtc.org/7065
RTCStatsMember<double> jitter;
+ RTCStatsMember<double> jitter_buffer_delay;
+ RTCStatsMember<uint64_t> jitter_buffer_emitted_count;
+ RTCStatsMember<uint64_t> total_samples_received;
+ RTCStatsMember<uint64_t> concealed_samples;
+ RTCStatsMember<uint64_t> silent_concealed_samples;
+ RTCStatsMember<uint64_t> concealment_events;
+ RTCStatsMember<uint64_t> inserted_samples_for_deceleration;
+ RTCStatsMember<uint64_t> removed_samples_for_acceleration;
+ RTCStatsMember<double> audio_level;
+ RTCStatsMember<double> total_audio_energy;
+ RTCStatsMember<double> total_samples_duration;
+ RTCStatsMember<int32_t> frames_received;
// TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065
RTCStatsMember<double> round_trip_time;
// TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065
@@ -441,8 +453,13 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats {
RTCStatsMember<double> gap_loss_rate;
// TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065
RTCStatsMember<double> gap_discard_rate;
+ RTCStatsMember<uint32_t> frame_width;
+ RTCStatsMember<uint32_t> frame_height;
+ RTCStatsMember<uint32_t> frame_bit_depth;
+ RTCStatsMember<double> frames_per_second;
RTCStatsMember<uint32_t> frames_decoded;
RTCStatsMember<uint32_t> key_frames_decoded;
+ RTCStatsMember<uint32_t> frames_dropped;
RTCStatsMember<double> total_decode_time;
RTCStatsMember<double> total_inter_frame_delay;
RTCStatsMember<double> total_squared_inter_frame_delay;
@@ -602,7 +619,9 @@ class RTC_EXPORT RTCTransportStats final : public RTCStats {
~RTCTransportStats() override;
RTCStatsMember<uint64_t> bytes_sent;
+ RTCStatsMember<uint64_t> packets_sent;
RTCStatsMember<uint64_t> bytes_received;
+ RTCStatsMember<uint64_t> packets_received;
RTCStatsMember<std::string> rtcp_transport_stats_id;
// TODO(hbos): Support enum types? "RTCStatsMember<RTCDtlsTransportState>"?
RTCStatsMember<std::string> dtls_state;
diff --git a/api/task_queue/BUILD.gn b/api/task_queue/BUILD.gn
index 4c9f591ec1..1072057e3f 100644
--- a/api/task_queue/BUILD.gn
+++ b/api/task_queue/BUILD.gn
@@ -21,6 +21,8 @@ rtc_library("task_queue") {
"../../rtc_base:checks",
"../../rtc_base:macromagic",
"../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:config",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/strings",
@@ -51,6 +53,8 @@ rtc_library("task_queue_test") {
deps = [
"../../../webrtc_overrides:webrtc_component",
"../../test:test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]
@@ -62,6 +66,8 @@ rtc_library("task_queue_test") {
"../../rtc_base:timeutils",
"../../rtc_base/task_utils:to_queued_task",
"../../test:test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]
diff --git a/api/task_queue/task_queue_test.cc b/api/task_queue/task_queue_test.cc
index a8a799f11b..0d411d2d9c 100644
--- a/api/task_queue/task_queue_test.cc
+++ b/api/task_queue/task_queue_test.cc
@@ -37,9 +37,11 @@ TEST_P(TaskQueueTest, PostAndCheckCurrent) {
rtc::Event event;
auto queue = CreateTaskQueue(factory, "PostAndCheckCurrent");
- // We're not running a task, so there shouldn't be a current queue.
+ // We're not running a task, so |queue| shouldn't be current.
+ // Note that because rtc::Thread also supports the TQ interface and
+ // TestMainImpl::Init wraps the main test thread (bugs.webrtc.org/9714), that
+ // means that TaskQueueBase::Current() will still return a valid value.
EXPECT_FALSE(queue->IsCurrent());
- EXPECT_FALSE(TaskQueueBase::Current());
queue->PostTask(ToQueuedTask([&event, &queue] {
EXPECT_TRUE(queue->IsCurrent());
@@ -269,5 +271,10 @@ TEST_P(TaskQueueTest, PostTwoWithSharedUnprotectedState) {
EXPECT_TRUE(done.Wait(1000));
}
+// TaskQueueTest is a set of tests for any implementation of the TaskQueueBase.
+// Tests are instantiated next to the concrete implementation(s).
+// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md#creating-value-parameterized-abstract-tests
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TaskQueueTest);
+
} // namespace
} // namespace webrtc
diff --git a/api/test/DEPS b/api/test/DEPS
index 1a02bf16e9..4f985965fc 100644
--- a/api/test/DEPS
+++ b/api/test/DEPS
@@ -13,7 +13,6 @@ specific_include_rules = {
],
"loopback_media_transport\.h": [
"+rtc_base/async_invoker.h",
- "+rtc_base/critical_section.h",
"+rtc_base/thread.h",
"+rtc_base/thread_checker.h",
],
diff --git a/api/test/audio_quality_analyzer_interface.h b/api/test/audio_quality_analyzer_interface.h
index 88392d7fd2..c1044795d1 100644
--- a/api/test/audio_quality_analyzer_interface.h
+++ b/api/test/audio_quality_analyzer_interface.h
@@ -14,7 +14,7 @@
#include <string>
#include "api/test/stats_observer_interface.h"
-#include "api/test/track_id_stream_label_map.h"
+#include "api/test/track_id_stream_info_map.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@@ -31,7 +31,7 @@ class AudioQualityAnalyzerInterface : public StatsObserverInterface {
// stream_id matching. The caller is responsible for ensuring the
// AnalyzerHelper outlives the instance of the AudioQualityAnalyzerInterface.
virtual void Start(std::string test_case_name,
- TrackIdStreamLabelMap* analyzer_helper) = 0;
+ TrackIdStreamInfoMap* analyzer_helper) = 0;
// Will be called by the framework at the end of the test. The analyzer
// has to finalize all its stats and it should report them.
diff --git a/api/test/audioproc_float.cc b/api/test/audioproc_float.cc
index bba9c622a1..c8d7ff7193 100644
--- a/api/test/audioproc_float.cc
+++ b/api/test/audioproc_float.cc
@@ -17,6 +17,12 @@
namespace webrtc {
namespace test {
+int AudioprocFloat(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ int argc,
+ char* argv[]) {
+ return AudioprocFloatImpl(std::move(audio_processing), argc, argv);
+}
+
int AudioprocFloat(std::unique_ptr<AudioProcessingBuilder> ap_builder,
int argc,
char* argv[]) {
diff --git a/api/test/audioproc_float.h b/api/test/audioproc_float.h
index 2625e6ad9a..fec2ad11fa 100644
--- a/api/test/audioproc_float.h
+++ b/api/test/audioproc_float.h
@@ -22,6 +22,22 @@ namespace test {
// This is an interface for the audio processing simulation utility. This
// utility can be used to simulate the audioprocessing module using a recording
// (either an AEC dump or wav files), and generate the output as a wav file.
+// Any audio_processing object specified in the input is used for the
+// simulation. The optional |audio_processing| object provides the
+// AudioProcessing instance that is used during the simulation. Note that when
+// the audio_processing object is specified all functionality that relies on
+// using the AudioProcessingBuilder is deactivated, since the AudioProcessing
+// object is already created and the builder is not used in the simulation. It
+// is needed to pass the command line flags as |argc| and |argv|, so these can
+// be interpreted properly by the utility. To see a list of all supported
+// command line flags, run the executable with the '--help' flag.
+int AudioprocFloat(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ int argc,
+ char* argv[]);
+
+// This is an interface for the audio processing simulation utility. This
+// utility can be used to simulate the audioprocessing module using a recording
+// (either an AEC dump or wav files), and generate the output as a wav file.
// The |ap_builder| object will be used to create the AudioProcessing instance
// that is used during the simulation. The |ap_builder| supports setting of
// injectable components, which will be passed on to the created AudioProcessing
diff --git a/api/test/compile_all_headers.cc b/api/test/compile_all_headers.cc
index 47c5c6ec84..4cece5b286 100644
--- a/api/test/compile_all_headers.cc
+++ b/api/test/compile_all_headers.cc
@@ -27,16 +27,17 @@
// "api/test/videocodec_test_fixture.h"
// "api/test/videocodec_test_stats.h"
+#include "api/test/dummy_peer_connection.h"
#include "api/test/fake_frame_decryptor.h"
#include "api/test/fake_frame_encryptor.h"
-#include "api/test/fake_media_transport.h"
-#include "api/test/loopback_media_transport.h"
#include "api/test/mock_audio_mixer.h"
#include "api/test/mock_frame_decryptor.h"
#include "api/test/mock_frame_encryptor.h"
+#include "api/test/mock_peer_connection_factory_interface.h"
#include "api/test/mock_peerconnectioninterface.h"
#include "api/test/mock_rtpreceiver.h"
#include "api/test/mock_rtpsender.h"
+#include "api/test/mock_transformable_video_frame.h"
#include "api/test/mock_video_bitrate_allocator.h"
#include "api/test/mock_video_bitrate_allocator_factory.h"
#include "api/test/mock_video_decoder.h"
diff --git a/api/test/create_network_emulation_manager.h b/api/test/create_network_emulation_manager.h
index c57c34874c..f444743786 100644
--- a/api/test/create_network_emulation_manager.h
+++ b/api/test/create_network_emulation_manager.h
@@ -1,4 +1,3 @@
-
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
@@ -18,6 +17,7 @@
namespace webrtc {
+// Returns a non-null NetworkEmulationManager instance.
std::unique_ptr<NetworkEmulationManager> CreateNetworkEmulationManager(
TimeMode mode = TimeMode::kRealTime);
diff --git a/api/test/create_peerconnection_quality_test_fixture.cc b/api/test/create_peerconnection_quality_test_fixture.cc
index 1e027bf31a..2d9d0821fc 100644
--- a/api/test/create_peerconnection_quality_test_fixture.cc
+++ b/api/test/create_peerconnection_quality_test_fixture.cc
@@ -13,6 +13,7 @@
#include <memory>
#include <utility>
+#include "api/test/time_controller.h"
#include "test/pc/e2e/peer_connection_quality_test.h"
namespace webrtc {
@@ -21,11 +22,12 @@ namespace webrtc_pc_e2e {
std::unique_ptr<PeerConnectionE2EQualityTestFixture>
CreatePeerConnectionE2EQualityTestFixture(
std::string test_case_name,
+ TimeController& time_controller,
std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer) {
return std::make_unique<PeerConnectionE2EQualityTest>(
- std::move(test_case_name), std::move(audio_quality_analyzer),
- std::move(video_quality_analyzer));
+ std::move(test_case_name), time_controller,
+ std::move(audio_quality_analyzer), std::move(video_quality_analyzer));
}
} // namespace webrtc_pc_e2e
diff --git a/api/test/create_peerconnection_quality_test_fixture.h b/api/test/create_peerconnection_quality_test_fixture.h
index 330d86de02..95b9ced5d2 100644
--- a/api/test/create_peerconnection_quality_test_fixture.h
+++ b/api/test/create_peerconnection_quality_test_fixture.h
@@ -15,19 +15,25 @@
#include "api/test/audio_quality_analyzer_interface.h"
#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/time_controller.h"
#include "api/test/video_quality_analyzer_interface.h"
namespace webrtc {
namespace webrtc_pc_e2e {
// API is in development. Can be changed/removed without notice.
+
// Create test fixture to establish test call between Alice and Bob.
// During the test Alice will be caller and Bob will answer the call.
// |test_case_name| is a name of test case, that will be used for all metrics
// reporting.
+// |time_controller| is used to manage all rtc::Thread's and TaskQueue
+// instances. Instance of |time_controller| have to outlive created fixture.
+// Returns a non-null PeerConnectionE2EQualityTestFixture instance.
std::unique_ptr<PeerConnectionE2EQualityTestFixture>
CreatePeerConnectionE2EQualityTestFixture(
std::string test_case_name,
+ TimeController& time_controller,
std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer);
diff --git a/api/test/create_time_controller.cc b/api/test/create_time_controller.cc
index d3b046bd61..a2c0cb713f 100644
--- a/api/test/create_time_controller.cc
+++ b/api/test/create_time_controller.cc
@@ -35,13 +35,18 @@ std::unique_ptr<CallFactoryInterface> CreateTimeControllerBasedCallFactory(
explicit TimeControllerBasedCallFactory(TimeController* time_controller)
: time_controller_(time_controller) {}
Call* CreateCall(const Call::Config& config) override {
- return Call::Create(config, time_controller_->GetClock(),
- time_controller_->CreateProcessThread("CallModules"),
+ if (!module_thread_) {
+ module_thread_ = SharedModuleThread::Create(
+ time_controller_->CreateProcessThread("CallModules"),
+ [this]() { module_thread_ = nullptr; });
+ }
+ return Call::Create(config, time_controller_->GetClock(), module_thread_,
time_controller_->CreateProcessThread("Pacer"));
}
private:
TimeController* time_controller_;
+ rtc::scoped_refptr<SharedModuleThread> module_thread_;
};
return std::make_unique<TimeControllerBasedCallFactory>(time_controller);
}
diff --git a/api/test/dummy_peer_connection.h b/api/test/dummy_peer_connection.h
index 102b0684c0..97a97d0c81 100644
--- a/api/test/dummy_peer_connection.h
+++ b/api/test/dummy_peer_connection.h
@@ -194,10 +194,6 @@ class DummyPeerConnection : public PeerConnectionInterface {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
}
- RTCError SetBitrate(const BitrateParameters& bitrate_parameters) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
- }
-
void SetAudioPlayout(bool playout) override { FATAL() << "Not implemented"; }
void SetAudioRecording(bool recording) override {
FATAL() << "Not implemented";
diff --git a/api/test/fake_datagram_transport.h b/api/test/fake_datagram_transport.h
deleted file mode 100644
index 847b4d842a..0000000000
--- a/api/test/fake_datagram_transport.h
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright 2019 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
-#define API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
-
-#include <cstddef>
-#include <string>
-
-#include "api/transport/datagram_transport_interface.h"
-#include "api/transport/media/media_transport_interface.h"
-
-namespace webrtc {
-
-// Maxmum size of datagrams sent by |FakeDatagramTransport|.
-constexpr size_t kMaxFakeDatagramSize = 1000;
-
-// Fake datagram transport. Does not support making an actual connection
-// or sending data. Only used for tests that need to stub out a transport.
-class FakeDatagramTransport : public DatagramTransportInterface {
- public:
- FakeDatagramTransport(
- const MediaTransportSettings& settings,
- std::string transport_parameters,
- const std::function<bool(absl::string_view, absl::string_view)>&
- are_parameters_compatible)
- : settings_(settings),
- transport_parameters_(transport_parameters),
- are_parameters_compatible_(are_parameters_compatible) {}
-
- ~FakeDatagramTransport() override { RTC_DCHECK(!state_callback_); }
-
- void Connect(rtc::PacketTransportInternal* packet_transport) override {
- packet_transport_ = packet_transport;
- }
-
- CongestionControlInterface* congestion_control() override {
- return nullptr; // Datagram interface doesn't provide this yet.
- }
-
- void SetTransportStateCallback(
- MediaTransportStateCallback* callback) override {
- state_callback_ = callback;
- }
-
- RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) override {
- return RTCError::OK();
- }
-
- size_t GetLargestDatagramSize() const override {
- return kMaxFakeDatagramSize;
- }
-
- void SetDatagramSink(DatagramSinkInterface* sink) override {}
-
- std::string GetTransportParameters() const override {
- if (settings_.remote_transport_parameters) {
- return *settings_.remote_transport_parameters;
- }
- return transport_parameters_;
- }
-
- RTCError SetRemoteTransportParameters(
- absl::string_view remote_parameters) override {
- if (are_parameters_compatible_(GetTransportParameters(),
- remote_parameters)) {
- return RTCError::OK();
- }
- return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
- "Incompatible remote transport parameters");
- }
-
- RTCError OpenChannel(int channel_id) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- RTCError SendData(int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- RTCError CloseChannel(int channel_id) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- void SetDataSink(DataChannelSink* /*sink*/) override {}
-
- bool IsReadyToSend() const override { return false; }
-
- rtc::PacketTransportInternal* packet_transport() { return packet_transport_; }
-
- void set_state(webrtc::MediaTransportState state) {
- if (state_callback_) {
- state_callback_->OnStateChanged(state);
- }
- }
-
- const MediaTransportSettings& settings() { return settings_; }
-
- private:
- const MediaTransportSettings settings_;
- const std::string transport_parameters_;
- const std::function<bool(absl::string_view, absl::string_view)>
- are_parameters_compatible_;
-
- rtc::PacketTransportInternal* packet_transport_ = nullptr;
- MediaTransportStateCallback* state_callback_ = nullptr;
-};
-
-} // namespace webrtc
-
-#endif // API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
diff --git a/api/test/fake_media_transport.h b/api/test/fake_media_transport.h
deleted file mode 100644
index 530394710a..0000000000
--- a/api/test/fake_media_transport.h
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef API_TEST_FAKE_MEDIA_TRANSPORT_H_
-#define API_TEST_FAKE_MEDIA_TRANSPORT_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "absl/algorithm/container.h"
-#include "api/test/fake_datagram_transport.h"
-#include "api/transport/media/media_transport_interface.h"
-
-namespace webrtc {
-
-// Fake media transport factory creates fake media transport.
-// Also creates fake datagram transport, since both media and datagram
-// transports are created by |MediaTransportFactory|.
-class FakeMediaTransportFactory : public MediaTransportFactory {
- public:
- explicit FakeMediaTransportFactory(
- const absl::optional<std::string>& transport_offer = "")
- : transport_offer_(transport_offer) {}
- ~FakeMediaTransportFactory() = default;
-
- std::string GetTransportName() const override { return "fake"; }
-
- RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
- rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
- CreateDatagramTransport(rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override {
- return std::unique_ptr<DatagramTransportInterface>(
- new FakeDatagramTransport(settings, transport_offer_.value_or(""),
- transport_parameters_comparison_));
- }
-
- void set_transport_parameters_comparison(
- std::function<bool(absl::string_view, absl::string_view)> comparison) {
- transport_parameters_comparison_ = std::move(comparison);
- }
-
- private:
- const absl::optional<std::string> transport_offer_;
- std::function<bool(absl::string_view, absl::string_view)>
- transport_parameters_comparison_ =
- [](absl::string_view local, absl::string_view remote) {
- return local == remote;
- };
-};
-
-} // namespace webrtc
-
-#endif // API_TEST_FAKE_MEDIA_TRANSPORT_H_
diff --git a/api/test/loopback_media_transport.cc b/api/test/loopback_media_transport.cc
deleted file mode 100644
index 18ce93cd7e..0000000000
--- a/api/test/loopback_media_transport.cc
+++ /dev/null
@@ -1,373 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "api/test/loopback_media_transport.h"
-
-#include <memory>
-
-#include "absl/algorithm/container.h"
-#include "rtc_base/time_utils.h"
-
-namespace webrtc {
-
-namespace {
-
-constexpr size_t kLoopbackMaxDatagramSize = 1200;
-
-class WrapperDatagramTransport : public DatagramTransportInterface {
- public:
- explicit WrapperDatagramTransport(DatagramTransportInterface* wrapped)
- : wrapped_(wrapped) {}
-
- // Datagram transport overrides.
- void Connect(rtc::PacketTransportInternal* packet_transport) override {
- return wrapped_->Connect(packet_transport);
- }
-
- CongestionControlInterface* congestion_control() override {
- return wrapped_->congestion_control();
- }
-
- void SetTransportStateCallback(
- MediaTransportStateCallback* callback) override {
- return wrapped_->SetTransportStateCallback(callback);
- }
-
- RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) override {
- return wrapped_->SendDatagram(data, datagram_id);
- }
-
- size_t GetLargestDatagramSize() const override {
- return wrapped_->GetLargestDatagramSize();
- }
-
- void SetDatagramSink(DatagramSinkInterface* sink) override {
- return wrapped_->SetDatagramSink(sink);
- }
-
- std::string GetTransportParameters() const override {
- return wrapped_->GetTransportParameters();
- }
-
- RTCError SetRemoteTransportParameters(absl::string_view parameters) override {
- return wrapped_->SetRemoteTransportParameters(parameters);
- }
-
- // Data channel overrides.
- RTCError OpenChannel(int channel_id) override {
- return wrapped_->OpenChannel(channel_id);
- }
-
- RTCError SendData(int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) override {
- return wrapped_->SendData(channel_id, params, buffer);
- }
-
- RTCError CloseChannel(int channel_id) override {
- return wrapped_->CloseChannel(channel_id);
- }
-
- void SetDataSink(DataChannelSink* sink) override {
- wrapped_->SetDataSink(sink);
- }
-
- bool IsReadyToSend() const override { return wrapped_->IsReadyToSend(); }
-
- private:
- DatagramTransportInterface* wrapped_;
-};
-
-} // namespace
-
-WrapperMediaTransportFactory::WrapperMediaTransportFactory(
- DatagramTransportInterface* wrapped_datagram_transport)
- : wrapped_datagram_transport_(wrapped_datagram_transport) {}
-
-WrapperMediaTransportFactory::WrapperMediaTransportFactory(
- MediaTransportFactory* wrapped)
- : wrapped_factory_(wrapped) {}
-
-RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
-WrapperMediaTransportFactory::CreateMediaTransport(
- rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
-}
-
-RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
-WrapperMediaTransportFactory::CreateDatagramTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- created_transport_count_++;
- if (wrapped_factory_) {
- return wrapped_factory_->CreateDatagramTransport(network_thread, settings);
- }
- return {
- std::make_unique<WrapperDatagramTransport>(wrapped_datagram_transport_)};
-}
-
-std::string WrapperMediaTransportFactory::GetTransportName() const {
- if (wrapped_factory_) {
- return wrapped_factory_->GetTransportName();
- }
- return "wrapped-transport";
-}
-
-int WrapperMediaTransportFactory::created_transport_count() const {
- return created_transport_count_;
-}
-
-RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
-WrapperMediaTransportFactory::CreateMediaTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
-}
-
-MediaTransportPair::MediaTransportPair(rtc::Thread* thread)
- : first_datagram_transport_(thread),
- second_datagram_transport_(thread),
- first_factory_(&first_datagram_transport_),
- second_factory_(&second_datagram_transport_) {
- first_datagram_transport_.Connect(&second_datagram_transport_);
- second_datagram_transport_.Connect(&first_datagram_transport_);
-}
-
-MediaTransportPair::~MediaTransportPair() = default;
-
-MediaTransportPair::LoopbackDataChannelTransport::LoopbackDataChannelTransport(
- rtc::Thread* thread)
- : thread_(thread) {}
-
-MediaTransportPair::LoopbackDataChannelTransport::
- ~LoopbackDataChannelTransport() {
- RTC_CHECK(data_sink_ == nullptr);
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::Connect(
- LoopbackDataChannelTransport* other) {
- other_ = other;
-}
-
-RTCError MediaTransportPair::LoopbackDataChannelTransport::OpenChannel(
- int channel_id) {
- // No-op. No need to open channels for the loopback.
- return RTCError::OK();
-}
-
-RTCError MediaTransportPair::LoopbackDataChannelTransport::SendData(
- int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) {
- invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_,
- [this, channel_id, params, buffer] {
- other_->OnData(channel_id, params.type, buffer);
- });
- return RTCError::OK();
-}
-
-RTCError MediaTransportPair::LoopbackDataChannelTransport::CloseChannel(
- int channel_id) {
- invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, channel_id] {
- other_->OnRemoteCloseChannel(channel_id);
- rtc::CritScope lock(&sink_lock_);
- if (data_sink_) {
- data_sink_->OnChannelClosed(channel_id);
- }
- });
- return RTCError::OK();
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::SetDataSink(
- DataChannelSink* sink) {
- rtc::CritScope lock(&sink_lock_);
- data_sink_ = sink;
- if (data_sink_ && ready_to_send_) {
- data_sink_->OnReadyToSend();
- }
-}
-
-bool MediaTransportPair::LoopbackDataChannelTransport::IsReadyToSend() const {
- rtc::CritScope lock(&sink_lock_);
- return ready_to_send_;
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::FlushAsyncInvokes() {
- invoker_.Flush(thread_);
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::OnData(
- int channel_id,
- DataMessageType type,
- const rtc::CopyOnWriteBuffer& buffer) {
- rtc::CritScope lock(&sink_lock_);
- if (data_sink_) {
- data_sink_->OnDataReceived(channel_id, type, buffer);
- }
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::OnRemoteCloseChannel(
- int channel_id) {
- rtc::CritScope lock(&sink_lock_);
- if (data_sink_) {
- data_sink_->OnChannelClosing(channel_id);
- data_sink_->OnChannelClosed(channel_id);
- }
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::OnReadyToSend(
- bool ready_to_send) {
- invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, ready_to_send] {
- rtc::CritScope lock(&sink_lock_);
- ready_to_send_ = ready_to_send;
- // Propagate state to data channel sink, if present.
- if (data_sink_ && ready_to_send_) {
- data_sink_->OnReadyToSend();
- }
- });
-}
-
-MediaTransportPair::LoopbackDatagramTransport::LoopbackDatagramTransport(
- rtc::Thread* thread)
- : thread_(thread), dc_transport_(thread) {}
-
-void MediaTransportPair::LoopbackDatagramTransport::Connect(
- LoopbackDatagramTransport* other) {
- other_ = other;
- dc_transport_.Connect(&other->dc_transport_);
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::Connect(
- rtc::PacketTransportInternal* packet_transport) {
- if (state_after_connect_) {
- SetState(*state_after_connect_);
- }
-}
-
-CongestionControlInterface*
-MediaTransportPair::LoopbackDatagramTransport::congestion_control() {
- return nullptr;
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetTransportStateCallback(
- MediaTransportStateCallback* callback) {
- RTC_DCHECK_RUN_ON(thread_);
- state_callback_ = callback;
- if (state_callback_) {
- state_callback_->OnStateChanged(state_);
- }
-}
-
-RTCError MediaTransportPair::LoopbackDatagramTransport::SendDatagram(
- rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) {
- rtc::CopyOnWriteBuffer buffer;
- buffer.SetData(data.data(), data.size());
- invoker_.AsyncInvoke<void>(
- RTC_FROM_HERE, thread_, [this, datagram_id, buffer = std::move(buffer)] {
- RTC_DCHECK_RUN_ON(thread_);
- other_->DeliverDatagram(std::move(buffer));
- if (sink_) {
- DatagramAck ack;
- ack.datagram_id = datagram_id;
- ack.receive_timestamp = Timestamp::Micros(rtc::TimeMicros());
- sink_->OnDatagramAcked(ack);
- }
- });
- return RTCError::OK();
-}
-
-size_t MediaTransportPair::LoopbackDatagramTransport::GetLargestDatagramSize()
- const {
- return kLoopbackMaxDatagramSize;
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetDatagramSink(
- DatagramSinkInterface* sink) {
- RTC_DCHECK_RUN_ON(thread_);
- sink_ = sink;
-}
-
-std::string
-MediaTransportPair::LoopbackDatagramTransport::GetTransportParameters() const {
- return transport_parameters_;
-}
-
-RTCError
-MediaTransportPair::LoopbackDatagramTransport::SetRemoteTransportParameters(
- absl::string_view remote_parameters) {
- RTC_DCHECK_RUN_ON(thread_);
- if (transport_parameters_comparison_(GetTransportParameters(),
- remote_parameters)) {
- return RTCError::OK();
- }
- return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
- "Incompatible remote transport parameters");
-}
-
-RTCError MediaTransportPair::LoopbackDatagramTransport::OpenChannel(
- int channel_id) {
- return dc_transport_.OpenChannel(channel_id);
-}
-
-RTCError MediaTransportPair::LoopbackDatagramTransport::SendData(
- int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) {
- return dc_transport_.SendData(channel_id, params, buffer);
-}
-
-RTCError MediaTransportPair::LoopbackDatagramTransport::CloseChannel(
- int channel_id) {
- return dc_transport_.CloseChannel(channel_id);
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetDataSink(
- DataChannelSink* sink) {
- dc_transport_.SetDataSink(sink);
-}
-
-bool MediaTransportPair::LoopbackDatagramTransport::IsReadyToSend() const {
- return dc_transport_.IsReadyToSend();
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetState(
- MediaTransportState state) {
- invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, state] {
- RTC_DCHECK_RUN_ON(thread_);
- state_ = state;
- if (state_callback_) {
- state_callback_->OnStateChanged(state_);
- }
- });
- dc_transport_.OnReadyToSend(state == MediaTransportState::kWritable);
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetStateAfterConnect(
- MediaTransportState state) {
- state_after_connect_ = state;
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::FlushAsyncInvokes() {
- dc_transport_.FlushAsyncInvokes();
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::DeliverDatagram(
- rtc::CopyOnWriteBuffer buffer) {
- RTC_DCHECK_RUN_ON(thread_);
- if (sink_) {
- sink_->OnDatagramReceived(buffer);
- }
-}
-
-} // namespace webrtc
diff --git a/api/test/loopback_media_transport.h b/api/test/loopback_media_transport.h
deleted file mode 100644
index 468965ba31..0000000000
--- a/api/test/loopback_media_transport.h
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
-#define API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "api/transport/datagram_transport_interface.h"
-#include "api/transport/media/media_transport_interface.h"
-#include "rtc_base/async_invoker.h"
-#include "rtc_base/critical_section.h"
-#include "rtc_base/thread.h"
-#include "rtc_base/thread_checker.h"
-
-namespace webrtc {
-
-// Wrapper used to hand out unique_ptrs to loopback media
-// transport without ownership changes to the underlying
-// transport.
-// It works in two modes:
-// It can either wrap a factory, or it can wrap an existing interface.
-// In the former mode, it delegates the work to the wrapped factory.
-// In the latter mode, it always returns static instance of the transport
-// interface.
-//
-// Example use:
-// Factory wrap_static_interface = Wrapper(media_transport_interface);
-// Factory wrap_factory = Wrapper(wrap_static_interface);
-// The second factory may be created multiple times, and ownership may be passed
-// to the client. The first factory counts the number of invocations of
-// CreateMediaTransport();
-class WrapperMediaTransportFactory : public MediaTransportFactory {
- public:
- explicit WrapperMediaTransportFactory(
- DatagramTransportInterface* wrapped_datagram_transport);
- explicit WrapperMediaTransportFactory(MediaTransportFactory* wrapped);
-
- RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
- rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override;
-
- RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override;
-
- RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
- CreateDatagramTransport(rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override;
-
- std::string GetTransportName() const override;
-
- int created_transport_count() const;
-
- private:
- DatagramTransportInterface* wrapped_datagram_transport_ = nullptr;
- MediaTransportFactory* wrapped_factory_ = nullptr;
- int created_transport_count_ = 0;
-};
-
-// Contains two MediaTransportsInterfaces that are connected to each other.
-// Currently supports audio only.
-class MediaTransportPair {
- public:
- struct Stats {
- int sent_audio_frames = 0;
- int received_audio_frames = 0;
- int sent_video_frames = 0;
- int received_video_frames = 0;
- };
-
- explicit MediaTransportPair(rtc::Thread* thread);
- ~MediaTransportPair();
-
- DatagramTransportInterface* first_datagram_transport() {
- return &first_datagram_transport_;
- }
- DatagramTransportInterface* second_datagram_transport() {
- return &second_datagram_transport_;
- }
-
- std::unique_ptr<MediaTransportFactory> first_factory() {
- return std::make_unique<WrapperMediaTransportFactory>(&first_factory_);
- }
-
- std::unique_ptr<MediaTransportFactory> second_factory() {
- return std::make_unique<WrapperMediaTransportFactory>(&second_factory_);
- }
-
- void SetState(MediaTransportState state) {
- first_datagram_transport_.SetState(state);
- second_datagram_transport_.SetState(state);
- }
-
- void SetFirstState(MediaTransportState state) {
- first_datagram_transport_.SetState(state);
- }
-
- void SetSecondStateAfterConnect(MediaTransportState state) {
- second_datagram_transport_.SetState(state);
- }
-
- void SetFirstDatagramTransportParameters(const std::string& params) {
- first_datagram_transport_.set_transport_parameters(params);
- }
-
- void SetSecondDatagramTransportParameters(const std::string& params) {
- second_datagram_transport_.set_transport_parameters(params);
- }
-
- void SetFirstDatagramTransportParametersComparison(
- std::function<bool(absl::string_view, absl::string_view)> comparison) {
- first_datagram_transport_.set_transport_parameters_comparison(
- std::move(comparison));
- }
-
- void SetSecondDatagramTransportParametersComparison(
- std::function<bool(absl::string_view, absl::string_view)> comparison) {
- second_datagram_transport_.set_transport_parameters_comparison(
- std::move(comparison));
- }
-
- void FlushAsyncInvokes() {
- first_datagram_transport_.FlushAsyncInvokes();
- second_datagram_transport_.FlushAsyncInvokes();
- }
-
- int first_factory_transport_count() const {
- return first_factory_.created_transport_count();
- }
-
- int second_factory_transport_count() const {
- return second_factory_.created_transport_count();
- }
-
- private:
- class LoopbackDataChannelTransport : public DataChannelTransportInterface {
- public:
- explicit LoopbackDataChannelTransport(rtc::Thread* thread);
- ~LoopbackDataChannelTransport() override;
-
- void Connect(LoopbackDataChannelTransport* other);
-
- RTCError OpenChannel(int channel_id) override;
-
- RTCError SendData(int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) override;
-
- RTCError CloseChannel(int channel_id) override;
-
- bool IsReadyToSend() const override;
-
- void SetDataSink(DataChannelSink* sink) override;
-
- void OnReadyToSend(bool ready_to_send);
-
- void FlushAsyncInvokes();
-
- private:
- void OnData(int channel_id,
- DataMessageType type,
- const rtc::CopyOnWriteBuffer& buffer);
-
- void OnRemoteCloseChannel(int channel_id);
-
- rtc::Thread* const thread_;
- rtc::CriticalSection sink_lock_;
- DataChannelSink* data_sink_ RTC_GUARDED_BY(sink_lock_) = nullptr;
-
- bool ready_to_send_ RTC_GUARDED_BY(sink_lock_) = false;
-
- LoopbackDataChannelTransport* other_;
-
- rtc::AsyncInvoker invoker_;
- };
-
- class LoopbackDatagramTransport : public DatagramTransportInterface {
- public:
- explicit LoopbackDatagramTransport(rtc::Thread* thread);
-
- void Connect(LoopbackDatagramTransport* other);
-
- // Datagram transport overrides.
- void Connect(rtc::PacketTransportInternal* packet_transport) override;
- CongestionControlInterface* congestion_control() override;
- void SetTransportStateCallback(
- MediaTransportStateCallback* callback) override;
- RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) override;
- size_t GetLargestDatagramSize() const override;
- void SetDatagramSink(DatagramSinkInterface* sink) override;
- std::string GetTransportParameters() const override;
- RTCError SetRemoteTransportParameters(
- absl::string_view remote_parameters) override;
-
- // Data channel overrides.
- RTCError OpenChannel(int channel_id) override;
- RTCError SendData(int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) override;
- RTCError CloseChannel(int channel_id) override;
- void SetDataSink(DataChannelSink* sink) override;
- bool IsReadyToSend() const override;
-
- // Loopback-specific functionality.
- void SetState(MediaTransportState state);
-
- // When Connect() is called, the datagram transport will enter this state.
- // This is useful for mimicking zero-RTT connectivity, for example.
- void SetStateAfterConnect(MediaTransportState state);
- void FlushAsyncInvokes();
-
- void set_transport_parameters(const std::string& value) {
- transport_parameters_ = value;
- }
-
- void set_transport_parameters_comparison(
- std::function<bool(absl::string_view, absl::string_view)> comparison) {
- thread_->Invoke<void>(
- RTC_FROM_HERE, [this, comparison = std::move(comparison)] {
- RTC_DCHECK_RUN_ON(thread_);
- transport_parameters_comparison_ = std::move(comparison);
- });
- }
-
- private:
- void DeliverDatagram(rtc::CopyOnWriteBuffer buffer);
-
- rtc::Thread* thread_;
- LoopbackDataChannelTransport dc_transport_;
-
- MediaTransportState state_ RTC_GUARDED_BY(thread_) =
- MediaTransportState::kPending;
- DatagramSinkInterface* sink_ RTC_GUARDED_BY(thread_) = nullptr;
- MediaTransportStateCallback* state_callback_ RTC_GUARDED_BY(thread_) =
- nullptr;
- LoopbackDatagramTransport* other_;
-
- std::string transport_parameters_;
- std::function<bool(absl::string_view, absl::string_view)>
- transport_parameters_comparison_ RTC_GUARDED_BY(thread_) =
- [](absl::string_view a, absl::string_view b) { return a == b; };
-
- absl::optional<MediaTransportState> state_after_connect_;
-
- rtc::AsyncInvoker invoker_;
- };
-
- LoopbackDatagramTransport first_datagram_transport_;
- LoopbackDatagramTransport second_datagram_transport_;
- WrapperMediaTransportFactory first_factory_;
- WrapperMediaTransportFactory second_factory_;
-};
-
-} // namespace webrtc
-
-#endif // API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
diff --git a/api/test/loopback_media_transport_unittest.cc b/api/test/loopback_media_transport_unittest.cc
deleted file mode 100644
index f036de3eae..0000000000
--- a/api/test/loopback_media_transport_unittest.cc
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "api/test/loopback_media_transport.h"
-
-#include <algorithm>
-#include <memory>
-#include <vector>
-
-#include "test/gmock.h"
-
-namespace webrtc {
-
-namespace {
-
-class MockMediaTransportAudioSinkInterface
- : public MediaTransportAudioSinkInterface {
- public:
- MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedAudioFrame));
-};
-
-class MockMediaTransportVideoSinkInterface
- : public MediaTransportVideoSinkInterface {
- public:
- MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedVideoFrame));
-};
-
-class MockMediaTransportKeyFrameRequestCallback
- : public MediaTransportKeyFrameRequestCallback {
- public:
- MOCK_METHOD1(OnKeyFrameRequested, void(uint64_t));
-};
-
-class MockDataChannelSink : public DataChannelSink {
- public:
- MOCK_METHOD3(OnDataReceived,
- void(int, DataMessageType, const rtc::CopyOnWriteBuffer&));
- MOCK_METHOD1(OnChannelClosing, void(int));
- MOCK_METHOD1(OnChannelClosed, void(int));
- MOCK_METHOD0(OnReadyToSend, void());
-};
-
-class MockStateCallback : public MediaTransportStateCallback {
- public:
- MOCK_METHOD1(OnStateChanged, void(MediaTransportState));
-};
-
-} // namespace
-
-TEST(LoopbackMediaTransport, DataDeliveredToSink) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockDataChannelSink sink;
- transport_pair.first_datagram_transport()->SetDataSink(&sink);
-
- const int channel_id = 1;
- EXPECT_CALL(
- sink, OnDataReceived(
- channel_id, DataMessageType::kText,
- ::testing::Property<rtc::CopyOnWriteBuffer, const char*>(
- &rtc::CopyOnWriteBuffer::cdata, ::testing::StrEq("foo"))));
-
- SendDataParams params;
- params.type = DataMessageType::kText;
- rtc::CopyOnWriteBuffer buffer("foo");
- transport_pair.second_datagram_transport()->SendData(channel_id, params,
- buffer);
-
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
-}
-
-TEST(LoopbackMediaTransport, CloseDeliveredToSink) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockDataChannelSink first_sink;
- transport_pair.first_datagram_transport()->SetDataSink(&first_sink);
-
- MockDataChannelSink second_sink;
- transport_pair.second_datagram_transport()->SetDataSink(&second_sink);
-
- const int channel_id = 1;
- {
- ::testing::InSequence s;
- EXPECT_CALL(second_sink, OnChannelClosing(channel_id));
- EXPECT_CALL(second_sink, OnChannelClosed(channel_id));
- EXPECT_CALL(first_sink, OnChannelClosed(channel_id));
- }
-
- transport_pair.first_datagram_transport()->CloseChannel(channel_id);
-
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
- transport_pair.second_datagram_transport()->SetDataSink(nullptr);
-}
-
-TEST(LoopbackMediaTransport, InitialStateDeliveredWhenCallbackSet) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockStateCallback state_callback;
- EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending));
-
- thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
- transport_pair.first_datagram_transport()->SetTransportStateCallback(
- &state_callback);
- });
- transport_pair.FlushAsyncInvokes();
-}
-
-TEST(LoopbackMediaTransport, ChangedStateDeliveredWhenCallbackSet) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- transport_pair.SetState(MediaTransportState::kWritable);
- transport_pair.FlushAsyncInvokes();
-
- MockStateCallback state_callback;
-
- EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable));
- thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
- transport_pair.first_datagram_transport()->SetTransportStateCallback(
- &state_callback);
- });
- transport_pair.FlushAsyncInvokes();
-}
-
-TEST(LoopbackMediaTransport, StateChangeDeliveredToCallback) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockStateCallback state_callback;
-
- EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending));
- EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable));
- thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
- transport_pair.first_datagram_transport()->SetTransportStateCallback(
- &state_callback);
- });
- transport_pair.SetState(MediaTransportState::kWritable);
- transport_pair.FlushAsyncInvokes();
-}
-
-TEST(LoopbackMediaTransport, NotReadyToSendWhenDataSinkSet) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockDataChannelSink data_channel_sink;
- EXPECT_CALL(data_channel_sink, OnReadyToSend()).Times(0);
-
- transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
-}
-
-TEST(LoopbackMediaTransport, ReadyToSendWhenDataSinkSet) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- transport_pair.SetState(MediaTransportState::kWritable);
- transport_pair.FlushAsyncInvokes();
-
- MockDataChannelSink data_channel_sink;
- EXPECT_CALL(data_channel_sink, OnReadyToSend());
-
- transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
-}
-
-TEST(LoopbackMediaTransport, StateChangeDeliveredToDataSink) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockDataChannelSink data_channel_sink;
- EXPECT_CALL(data_channel_sink, OnReadyToSend());
-
- transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
- transport_pair.SetState(MediaTransportState::kWritable);
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
-}
-
-} // namespace webrtc
diff --git a/api/test/mock_audio_mixer.h b/api/test/mock_audio_mixer.h
index aee717bebf..88dc108ca3 100644
--- a/api/test/mock_audio_mixer.h
+++ b/api/test/mock_audio_mixer.h
@@ -19,12 +19,9 @@ namespace test {
class MockAudioMixer : public AudioMixer {
public:
- MOCK_METHOD(bool, AddSource, (Source * audio_source), (override));
- MOCK_METHOD(void, RemoveSource, (Source * audio_source), (override));
- MOCK_METHOD(void,
- Mix,
- (size_t number_of_channels, AudioFrame* audio_frame_for_mixing),
- (override));
+ MOCK_METHOD(bool, AddSource, (Source*), (override));
+ MOCK_METHOD(void, RemoveSource, (Source*), (override));
+ MOCK_METHOD(void, Mix, (size_t number_of_channels, AudioFrame*), (override));
};
} // namespace test
} // namespace webrtc
diff --git a/api/test/mock_peer_connection_factory_interface.h b/api/test/mock_peer_connection_factory_interface.h
new file mode 100644
index 0000000000..19c3f4063e
--- /dev/null
+++ b/api/test/mock_peer_connection_factory_interface.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
+#define API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
+
+#include <memory>
+#include <string>
+
+#include "api/peer_connection_interface.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockPeerConnectionFactoryInterface final
+ : public rtc::RefCountedObject<webrtc::PeerConnectionFactoryInterface> {
+ public:
+ rtc::scoped_refptr<MockPeerConnectionFactoryInterface> Create() {
+ return new MockPeerConnectionFactoryInterface();
+ }
+
+ MOCK_METHOD(void, SetOptions, (const Options&), (override));
+ MOCK_METHOD(rtc::scoped_refptr<PeerConnectionInterface>,
+ CreatePeerConnection,
+ (const PeerConnectionInterface::RTCConfiguration&,
+ PeerConnectionDependencies),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<PeerConnectionInterface>,
+ CreatePeerConnection,
+ (const PeerConnectionInterface::RTCConfiguration&,
+ std::unique_ptr<cricket::PortAllocator>,
+ std::unique_ptr<rtc::RTCCertificateGeneratorInterface>,
+ PeerConnectionObserver*),
+ (override));
+ MOCK_METHOD(RtpCapabilities,
+ GetRtpSenderCapabilities,
+ (cricket::MediaType),
+ (const override));
+ MOCK_METHOD(RtpCapabilities,
+ GetRtpReceiverCapabilities,
+ (cricket::MediaType),
+ (const override));
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamInterface>,
+ CreateLocalMediaStream,
+ (const std::string&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<AudioSourceInterface>,
+ CreateAudioSource,
+ (const cricket::AudioOptions&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<VideoTrackInterface>,
+ CreateVideoTrack,
+ (const std::string&, VideoTrackSourceInterface*),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<AudioTrackInterface>,
+ CreateAudioTrack,
+ (const std::string&, AudioSourceInterface*),
+ (override));
+ MOCK_METHOD(bool, StartAecDump, (FILE*, int64_t), (override));
+ MOCK_METHOD(void, StopAecDump, (), (override));
+
+ protected:
+ MockPeerConnectionFactoryInterface() = default;
+};
+
+} // namespace webrtc
+
+#endif // API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
diff --git a/api/test/mock_peerconnectioninterface.h b/api/test/mock_peerconnectioninterface.h
index 6b247b7cee..be34df0b32 100644
--- a/api/test/mock_peerconnectioninterface.h
+++ b/api/test/mock_peerconnectioninterface.h
@@ -167,7 +167,6 @@ class MockPeerConnectionInterface
(const std::vector<cricket::Candidate>&),
(override));
MOCK_METHOD(RTCError, SetBitrate, (const BitrateSettings&), (override));
- MOCK_METHOD(RTCError, SetBitrate, (const BitrateParameters&), (override));
MOCK_METHOD(void, SetAudioPlayout, (bool), (override));
MOCK_METHOD(void, SetAudioRecording, (bool), (override));
MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
diff --git a/api/test/mock_transformable_video_frame.h b/api/test/mock_transformable_video_frame.h
new file mode 100644
index 0000000000..36798b5d73
--- /dev/null
+++ b/api/test/mock_transformable_video_frame.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
+#define API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
+
+#include <vector>
+
+#include "api/frame_transformer_interface.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockTransformableVideoFrame
+ : public webrtc::TransformableVideoFrameInterface {
+ public:
+ MOCK_METHOD(rtc::ArrayView<const uint8_t>, GetData, (), (const override));
+ MOCK_METHOD(void, SetData, (rtc::ArrayView<const uint8_t> data), (override));
+ MOCK_METHOD(uint32_t, GetTimestamp, (), (const override));
+ MOCK_METHOD(uint32_t, GetSsrc, (), (const, override));
+ MOCK_METHOD(bool, IsKeyFrame, (), (const, override));
+ MOCK_METHOD(std::vector<uint8_t>, GetAdditionalData, (), (const, override));
+ MOCK_METHOD(const webrtc::VideoFrameMetadata&,
+ GetMetadata,
+ (),
+ (const, override));
+};
+
+} // namespace webrtc
+
+#endif // API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
diff --git a/api/test/mock_video_bitrate_allocator_factory.h b/api/test/mock_video_bitrate_allocator_factory.h
index c7d883a5b9..16af191970 100644
--- a/api/test/mock_video_bitrate_allocator_factory.h
+++ b/api/test/mock_video_bitrate_allocator_factory.h
@@ -26,7 +26,7 @@ class MockVideoBitrateAllocatorFactory
CreateVideoBitrateAllocator,
(const VideoCodec&),
(override));
- MOCK_METHOD(void, Die, (), ());
+ MOCK_METHOD(void, Die, ());
};
} // namespace webrtc
diff --git a/api/test/mock_video_decoder_factory.h b/api/test/mock_video_decoder_factory.h
index 1f832a156b..98a5d40eb6 100644
--- a/api/test/mock_video_decoder_factory.h
+++ b/api/test/mock_video_decoder_factory.h
@@ -27,12 +27,12 @@ class MockVideoDecoderFactory : public webrtc::VideoDecoderFactory {
MOCK_METHOD(std::vector<webrtc::SdpVideoFormat>,
GetSupportedFormats,
(),
- (const override));
+ (const, override));
MOCK_METHOD(std::unique_ptr<webrtc::VideoDecoder>,
CreateVideoDecoder,
(const webrtc::SdpVideoFormat&),
(override));
- MOCK_METHOD(void, Die, (), ());
+ MOCK_METHOD(void, Die, ());
};
} // namespace webrtc
diff --git a/api/test/mock_video_encoder.h b/api/test/mock_video_encoder.h
index c4b6b3e50c..26d758fd6a 100644
--- a/api/test/mock_video_encoder.h
+++ b/api/test/mock_video_encoder.h
@@ -23,8 +23,8 @@ class MockEncodedImageCallback : public EncodedImageCallback {
MOCK_METHOD(Result,
OnEncodedImage,
(const EncodedImage& encodedImage,
- const CodecSpecificInfo* codecSpecificInfo,
- const RTPFragmentationHeader* fragmentation),
+ const CodecSpecificInfo*,
+ const RTPFragmentationHeader*),
(override));
MOCK_METHOD(void, OnDroppedFrame, (DropReason reason), (override));
};
@@ -33,31 +33,41 @@ class MockVideoEncoder : public VideoEncoder {
public:
MOCK_METHOD(void,
SetFecControllerOverride,
- (FecControllerOverride * fec_controller_override),
+ (FecControllerOverride*),
(override));
MOCK_METHOD(int32_t,
InitEncode,
- (const VideoCodec* codecSettings,
- int32_t numberOfCores,
- size_t maxPayloadSize),
+ (const VideoCodec*, int32_t numberOfCores, size_t maxPayloadSize),
+ (override));
+ MOCK_METHOD(int32_t,
+ InitEncode,
+ (const VideoCodec*, const VideoEncoder::Settings& settings),
(override));
- MOCK_METHOD2(InitEncode,
- int32_t(const VideoCodec* codecSettings,
- const VideoEncoder::Settings& settings));
- MOCK_METHOD2(Encode,
- int32_t(const VideoFrame& inputImage,
- const std::vector<VideoFrameType>* frame_types));
- MOCK_METHOD1(RegisterEncodeCompleteCallback,
- int32_t(EncodedImageCallback* callback));
- MOCK_METHOD0(Release, int32_t());
- MOCK_METHOD0(Reset, int32_t());
- MOCK_METHOD1(SetRates, void(const RateControlParameters& parameters));
- MOCK_METHOD1(OnPacketLossRateUpdate, void(float packet_loss_rate));
- MOCK_METHOD1(OnRttUpdate, void(int64_t rtt_ms));
- MOCK_METHOD1(OnLossNotification,
- void(const LossNotification& loss_notification));
- MOCK_CONST_METHOD0(GetEncoderInfo, EncoderInfo(void));
+ MOCK_METHOD(int32_t,
+ Encode,
+ (const VideoFrame& inputImage,
+ const std::vector<VideoFrameType>*),
+ (override));
+ MOCK_METHOD(int32_t,
+ RegisterEncodeCompleteCallback,
+ (EncodedImageCallback*),
+ (override));
+ MOCK_METHOD(int32_t, Release, (), (override));
+ MOCK_METHOD(void,
+ SetRates,
+ (const RateControlParameters& parameters),
+ (override));
+ MOCK_METHOD(void,
+ OnPacketLossRateUpdate,
+ (float packet_loss_rate),
+ (override));
+ MOCK_METHOD(void, OnRttUpdate, (int64_t rtt_ms), (override));
+ MOCK_METHOD(void,
+ OnLossNotification,
+ (const LossNotification& loss_notification),
+ (override));
+ MOCK_METHOD(EncoderInfo, GetEncoderInfo, (), (const, override));
};
} // namespace webrtc
diff --git a/api/test/mock_video_encoder_factory.h b/api/test/mock_video_encoder_factory.h
index fa08dbd6bc..1aa14631be 100644
--- a/api/test/mock_video_encoder_factory.h
+++ b/api/test/mock_video_encoder_factory.h
@@ -27,17 +27,17 @@ class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory {
MOCK_METHOD(std::vector<SdpVideoFormat>,
GetSupportedFormats,
(),
- (const override));
+ (const, override));
MOCK_METHOD(CodecInfo,
QueryVideoEncoder,
(const SdpVideoFormat&),
- (const override));
+ (const, override));
MOCK_METHOD(std::unique_ptr<VideoEncoder>,
CreateVideoEncoder,
(const SdpVideoFormat&),
(override));
- MOCK_METHOD(void, Die, (), ());
+ MOCK_METHOD(void, Die, ());
};
} // namespace webrtc
diff --git a/api/test/network_emulation/BUILD.gn b/api/test/network_emulation/BUILD.gn
index 5fda1e288a..a3dd961c81 100644
--- a/api/test/network_emulation/BUILD.gn
+++ b/api/test/network_emulation/BUILD.gn
@@ -23,6 +23,6 @@ rtc_library("network_emulation") {
"../../units:data_rate",
"../../units:data_size",
"../../units:timestamp",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/api/test/network_emulation/network_emulation_interfaces.h b/api/test/network_emulation/network_emulation_interfaces.h
index 0986df4a08..260ab0e292 100644
--- a/api/test/network_emulation/network_emulation_interfaces.h
+++ b/api/test/network_emulation/network_emulation_interfaces.h
@@ -10,6 +10,9 @@
#ifndef API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_
#define API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_
+#include <map>
+#include <vector>
+
#include "absl/types/optional.h"
#include "api/units/data_rate.h"
#include "api/units/data_size.h"
@@ -56,9 +59,7 @@ class EmulatedNetworkReceiverInterface {
virtual void OnPacketReceived(EmulatedIpPacket packet) = 0;
};
-struct EmulatedNetworkStats {
- int64_t packets_sent = 0;
- DataSize bytes_sent = DataSize::Zero();
+struct EmulatedNetworkIncomingStats {
// Total amount of packets received with or without destination.
int64_t packets_received = 0;
// Total amount of bytes in received packets.
@@ -69,22 +70,122 @@ struct EmulatedNetworkStats {
DataSize bytes_dropped = DataSize::Zero();
DataSize first_received_packet_size = DataSize::Zero();
- DataSize first_sent_packet_size = DataSize::Zero();
- Timestamp first_packet_sent_time = Timestamp::PlusInfinity();
- Timestamp last_packet_sent_time = Timestamp::PlusInfinity();
+ // Timestamps are initialized to different infinities for simplifying
+ // computations. Client have to assume that it is some infinite value
+ // if unset. Client mustn't consider sign of infinit value.
Timestamp first_packet_received_time = Timestamp::PlusInfinity();
- Timestamp last_packet_received_time = Timestamp::PlusInfinity();
+ Timestamp last_packet_received_time = Timestamp::MinusInfinity();
+
+ DataRate AverageReceiveRate() const {
+ RTC_DCHECK_GE(packets_received, 2);
+ RTC_DCHECK(first_packet_received_time.IsFinite());
+ RTC_DCHECK(last_packet_received_time.IsFinite());
+ return (bytes_received - first_received_packet_size) /
+ (last_packet_received_time - first_packet_received_time);
+ }
+};
+
+struct EmulatedNetworkStats {
+ int64_t packets_sent = 0;
+ DataSize bytes_sent = DataSize::Zero();
+
+ DataSize first_sent_packet_size = DataSize::Zero();
+ Timestamp first_packet_sent_time = Timestamp::PlusInfinity();
+ Timestamp last_packet_sent_time = Timestamp::MinusInfinity();
+
+ // List of IP addresses that were used to send data considered in this stats
+ // object.
+ std::vector<rtc::IPAddress> local_addresses;
+
+ std::map<rtc::IPAddress, EmulatedNetworkIncomingStats>
+ incoming_stats_per_source;
DataRate AverageSendRate() const {
RTC_DCHECK_GE(packets_sent, 2);
return (bytes_sent - first_sent_packet_size) /
(last_packet_sent_time - first_packet_sent_time);
}
+
+ // Total amount of packets received regardless of the destination address.
+ int64_t PacketsReceived() const {
+ int64_t packets_received = 0;
+ for (const auto& incoming_stats : incoming_stats_per_source) {
+ packets_received += incoming_stats.second.packets_received;
+ }
+ return packets_received;
+ }
+
+ // Total amount of bytes in received packets.
+ DataSize BytesReceived() const {
+ DataSize bytes_received = DataSize::Zero();
+ for (const auto& incoming_stats : incoming_stats_per_source) {
+ bytes_received += incoming_stats.second.bytes_received;
+ }
+ return bytes_received;
+ }
+
+ // Total amount of packets that were received, but no destination was found.
+ int64_t PacketsDropped() const {
+ int64_t packets_dropped = 0;
+ for (const auto& incoming_stats : incoming_stats_per_source) {
+ packets_dropped += incoming_stats.second.packets_dropped;
+ }
+ return packets_dropped;
+ }
+
+ // Total amount of bytes in dropped packets.
+ DataSize BytesDropped() const {
+ DataSize bytes_dropped = DataSize::Zero();
+ for (const auto& incoming_stats : incoming_stats_per_source) {
+ bytes_dropped += incoming_stats.second.bytes_dropped;
+ }
+ return bytes_dropped;
+ }
+
+ DataSize FirstReceivedPacketSize() const {
+ Timestamp first_packet_received_time = Timestamp::PlusInfinity();
+ DataSize first_received_packet_size = DataSize::Zero();
+ for (const auto& incoming_stats : incoming_stats_per_source) {
+ if (first_packet_received_time >
+ incoming_stats.second.first_packet_received_time) {
+ first_packet_received_time =
+ incoming_stats.second.first_packet_received_time;
+ first_received_packet_size =
+ incoming_stats.second.first_received_packet_size;
+ }
+ }
+ return first_received_packet_size;
+ }
+
+ Timestamp FirstPacketReceivedTime() const {
+ Timestamp first_packet_received_time = Timestamp::PlusInfinity();
+ for (const auto& incoming_stats : incoming_stats_per_source) {
+ if (first_packet_received_time >
+ incoming_stats.second.first_packet_received_time) {
+ first_packet_received_time =
+ incoming_stats.second.first_packet_received_time;
+ }
+ }
+ return first_packet_received_time;
+ }
+
+ Timestamp LastPacketReceivedTime() const {
+ Timestamp last_packet_received_time = Timestamp::MinusInfinity();
+ for (const auto& incoming_stats : incoming_stats_per_source) {
+ if (last_packet_received_time <
+ incoming_stats.second.last_packet_received_time) {
+ last_packet_received_time =
+ incoming_stats.second.last_packet_received_time;
+ }
+ }
+ return last_packet_received_time;
+ }
+
DataRate AverageReceiveRate() const {
- RTC_DCHECK_GE(packets_received, 2);
- return (bytes_received - first_received_packet_size) /
- (last_packet_received_time - first_packet_received_time);
+ RTC_DCHECK_GE(PacketsReceived(), 2);
+ return (BytesReceived() - FirstReceivedPacketSize()) /
+ (LastPacketReceivedTime() - FirstPacketReceivedTime());
}
};
diff --git a/api/test/peerconnection_quality_test_fixture.h b/api/test/peerconnection_quality_test_fixture.h
index d55647a841..f370478956 100644
--- a/api/test/peerconnection_quality_test_fixture.h
+++ b/api/test/peerconnection_quality_test_fixture.h
@@ -26,13 +26,14 @@
#include "api/media_stream_interface.h"
#include "api/peer_connection_interface.h"
#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
+#include "api/rtp_parameters.h"
#include "api/task_queue/task_queue_factory.h"
#include "api/test/audio_quality_analyzer_interface.h"
#include "api/test/frame_generator_interface.h"
#include "api/test/simulated_network.h"
#include "api/test/stats_observer_interface.h"
+#include "api/test/track_id_stream_info_map.h"
#include "api/test/video_quality_analyzer_interface.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/units/time_delta.h"
#include "api/video_codecs/video_decoder_factory.h"
@@ -53,6 +54,12 @@ constexpr size_t kDefaultSlidesHeight = 1110;
// API is in development. Can be changed/removed without notice.
class PeerConnectionE2EQualityTestFixture {
public:
+ // The index of required capturing device in OS provided list of video
+ // devices. On Linux and Windows the list will be obtained via
+ // webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via
+ // [RTCCameraVideoCapturer captureDevices].
+ enum class CapturingDeviceIndex : size_t {};
+
// Contains parameters for screen share scrolling.
//
// If scrolling is enabled, then it will be done by putting sliding window
@@ -116,8 +123,6 @@ class PeerConnectionE2EQualityTestFixture {
std::vector<std::string> slides_yuv_file_names;
};
- enum VideoGeneratorType { kDefault, kI420A, kI010 };
-
// Config for Vp8 simulcast or Vp9 SVC testing.
//
// SVC support is limited:
@@ -160,6 +165,14 @@ class PeerConnectionE2EQualityTestFixture {
// It requires Selective Forwarding Unit (SFU) to be configured in the
// network.
absl::optional<int> target_spatial_index;
+
+ // Encoding parameters per simulcast layer. If not empty, |encoding_params|
+ // size have to be equal to |simulcast_streams_count|. Will be used to set
+ // transceiver send encoding params for simulcast layers. Applicable only
+ // for codecs that support simulcast (ex. Vp8) and will be ignored
+ // otherwise. RtpEncodingParameters::rid may be changed by fixture
+ // implementation to ensure signaling correctness.
+ std::vector<RtpEncodingParameters> encoding_params;
};
// Contains properties of single video stream.
@@ -178,12 +191,6 @@ class PeerConnectionE2EQualityTestFixture {
// Will be set for current video track. If equals to kText or kDetailed -
// screencast in on.
absl::optional<VideoTrackInterface::ContentHint> content_hint;
- // If specified this capturing device will be used to get input video. The
- // |capturing_device_index| is the index of required capturing device in OS
- // provided list of video devices. On Linux and Windows the list will be
- // obtained via webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via
- // [RTCCameraVideoCapturer captureDevices].
- absl::optional<size_t> capturing_device_index;
// If presented video will be transfered in simulcast/SVC mode depending on
// which encoder is used.
//
@@ -222,8 +229,7 @@ class PeerConnectionE2EQualityTestFixture {
bool show_on_screen = false;
// If specified, determines a sync group to which this video stream belongs.
// According to bugs.webrtc.org/4762 WebRTC supports synchronization only
- // for pair of single audio and single video stream. Framework won't do any
- // enforcements on this field.
+ // for pair of single audio and single video stream.
absl::optional<std::string> sync_group;
};
@@ -250,8 +256,7 @@ class PeerConnectionE2EQualityTestFixture {
int sampling_frequency_in_hz = 48000;
// If specified, determines a sync group to which this audio stream belongs.
// According to bugs.webrtc.org/4762 WebRTC supports synchronization only
- // for pair of single audio and single video stream. Framework won't do any
- // enforcements on this field.
+ // for pair of single audio and single video stream.
absl::optional<std::string> sync_group;
};
@@ -280,8 +285,6 @@ class PeerConnectionE2EQualityTestFixture {
virtual PeerConfigurer* SetNetworkControllerFactory(
std::unique_ptr<NetworkControllerFactoryInterface>
network_controller_factory) = 0;
- virtual PeerConfigurer* SetMediaTransportFactory(
- std::unique_ptr<MediaTransportFactory> media_transport_factory) = 0;
virtual PeerConfigurer* SetVideoEncoderFactory(
std::unique_ptr<VideoEncoderFactory> video_encoder_factory) = 0;
virtual PeerConfigurer* SetVideoDecoderFactory(
@@ -312,6 +315,11 @@ class PeerConnectionE2EQualityTestFixture {
virtual PeerConfigurer* AddVideoConfig(
VideoConfig config,
std::unique_ptr<test::FrameGeneratorInterface> generator) = 0;
+ // Add new video stream to the call that will be sent from this peer.
+ // Capturing device with specified index will be used to get input video.
+ virtual PeerConfigurer* AddVideoConfig(
+ VideoConfig config,
+ CapturingDeviceIndex capturing_device_index) = 0;
// Set the audio stream for the call from this peer. If this method won't
// be invoked, this peer will send no audio.
virtual PeerConfigurer* SetAudioConfig(AudioConfig config) = 0;
@@ -325,8 +333,8 @@ class PeerConnectionE2EQualityTestFixture {
PeerConnectionInterface::RTCConfiguration configuration) = 0;
// Set bitrate parameters on PeerConnection. This constraints will be
// applied to all summed RTP streams for this peer.
- virtual PeerConfigurer* SetBitrateParameters(
- PeerConnectionInterface::BitrateParameters bitrate_params) = 0;
+ virtual PeerConfigurer* SetBitrateSettings(
+ BitrateSettings bitrate_settings) = 0;
};
// Contains configuration for echo emulator.
@@ -400,7 +408,14 @@ class PeerConnectionE2EQualityTestFixture {
// Invoked by framework after peer connection factory and peer connection
// itself will be created but before offer/answer exchange will be started.
- virtual void Start(absl::string_view test_case_name) = 0;
+ // |test_case_name| is name of test case, that should be used to report all
+ // metrics.
+ // |reporter_helper| is a pointer to a class that will allow track_id to
+ // stream_id matching. The caller is responsible for ensuring the
+ // TrackIdStreamInfoMap will be valid from Start() to
+ // StopAndReportResults().
+ virtual void Start(absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) = 0;
// Invoked by framework after call is ended and peer connection factory and
// peer connection are destroyed.
@@ -436,6 +451,12 @@ class PeerConnectionE2EQualityTestFixture {
virtual void AddPeer(rtc::Thread* network_thread,
rtc::NetworkManager* network_manager,
rtc::FunctionView<void(PeerConfigurer*)> configurer) = 0;
+ // Runs the media quality test, which includes setting up the call with
+ // configured participants, running it according to provided |run_params| and
+ // terminating it properly at the end. During call duration media quality
+ // metrics are gathered, which are then reported to stdout and (if configured)
+ // to the json/protobuf output file through the WebRTC perf test results
+ // reporting system.
virtual void Run(RunParams run_params) = 0;
// Returns real test duration - the time of test execution measured during
diff --git a/api/test/simulated_network.h b/api/test/simulated_network.h
index 0d5c6613a6..3fba61f74d 100644
--- a/api/test/simulated_network.h
+++ b/api/test/simulated_network.h
@@ -19,7 +19,6 @@
#include <vector>
#include "absl/types/optional.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/random.h"
#include "rtc_base/thread_annotations.h"
diff --git a/api/test/stats_observer_interface.h b/api/test/stats_observer_interface.h
index 98c8dd937f..ea4d6c23db 100644
--- a/api/test/stats_observer_interface.h
+++ b/api/test/stats_observer_interface.h
@@ -11,9 +11,8 @@
#ifndef API_TEST_STATS_OBSERVER_INTERFACE_H_
#define API_TEST_STATS_OBSERVER_INTERFACE_H_
-#include <string>
-
-#include "api/stats_types.h"
+#include "absl/strings/string_view.h"
+#include "api/stats/rtc_stats_report.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@@ -25,8 +24,9 @@ class StatsObserverInterface {
// Method called when stats reports are available for the PeerConnection
// identified by |pc_label|.
- virtual void OnStatsReports(const std::string& pc_label,
- const StatsReports& reports) = 0;
+ virtual void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) = 0;
};
} // namespace webrtc_pc_e2e
diff --git a/api/test/test_dependency_factory.cc b/api/test/test_dependency_factory.cc
index e72f55aab5..41ad70cc3f 100644
--- a/api/test/test_dependency_factory.cc
+++ b/api/test/test_dependency_factory.cc
@@ -14,22 +14,24 @@
#include <utility>
#include "rtc_base/checks.h"
-#include "rtc_base/thread_checker.h"
+#include "rtc_base/platform_thread_types.h"
namespace webrtc {
+namespace {
// This checks everything in this file gets called on the same thread. It's
// static because it needs to look at the static methods too.
-rtc::ThreadChecker* GetThreadChecker() {
- static rtc::ThreadChecker checker;
- return &checker;
+bool IsValidTestDependencyFactoryThread() {
+ const rtc::PlatformThreadRef main_thread = rtc::CurrentThreadRef();
+ return rtc::IsThreadRefEqual(main_thread, rtc::CurrentThreadRef());
}
+} // namespace
std::unique_ptr<TestDependencyFactory> TestDependencyFactory::instance_ =
nullptr;
const TestDependencyFactory& TestDependencyFactory::GetInstance() {
- RTC_DCHECK(GetThreadChecker()->IsCurrent());
+ RTC_DCHECK(IsValidTestDependencyFactoryThread());
if (instance_ == nullptr) {
instance_ = std::make_unique<TestDependencyFactory>();
}
@@ -38,14 +40,14 @@ const TestDependencyFactory& TestDependencyFactory::GetInstance() {
void TestDependencyFactory::SetInstance(
std::unique_ptr<TestDependencyFactory> instance) {
- RTC_DCHECK(GetThreadChecker()->IsCurrent());
+ RTC_DCHECK(IsValidTestDependencyFactoryThread());
RTC_CHECK(instance_ == nullptr);
instance_ = std::move(instance);
}
std::unique_ptr<VideoQualityTestFixtureInterface::InjectionComponents>
TestDependencyFactory::CreateComponents() const {
- RTC_DCHECK(GetThreadChecker()->IsCurrent());
+ RTC_DCHECK(IsValidTestDependencyFactoryThread());
return nullptr;
}
diff --git a/api/test/time_controller.h b/api/test/time_controller.h
index 4d7f9e6c39..bd3192ddf2 100644
--- a/api/test/time_controller.h
+++ b/api/test/time_controller.h
@@ -46,6 +46,7 @@ class TimeController {
const char* thread_name) = 0;
// Creates an rtc::Thread instance. If |socket_server| is nullptr, a default
// noop socket server is created.
+ // Returned thread is not null and started.
virtual std::unique_ptr<rtc::Thread> CreateThread(
const std::string& name,
std::unique_ptr<rtc::SocketServer> socket_server = nullptr) = 0;
@@ -59,6 +60,8 @@ class TimeController {
// Waits until condition() == true, polling condition() in small time
// intervals.
+ // Returns true if condition() was evaluated to true before |max_duration|
+ // elapsed and false otherwise.
bool Wait(const std::function<bool()>& condition,
TimeDelta max_duration = TimeDelta::Seconds(5));
};
diff --git a/api/test/track_id_stream_info_map.h b/api/test/track_id_stream_info_map.h
new file mode 100644
index 0000000000..bb73cfd997
--- /dev/null
+++ b/api/test/track_id_stream_info_map.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_TEST_TRACK_ID_STREAM_INFO_MAP_H_
+#define API_TEST_TRACK_ID_STREAM_INFO_MAP_H_
+
+#include "absl/strings/string_view.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Instances of |TrackIdStreamInfoMap| provide bookkeeping capabilities that
+// are useful to associate stats reports track_ids to the remote stream info.
+class TrackIdStreamInfoMap {
+ public:
+ virtual ~TrackIdStreamInfoMap() = default;
+
+ // These methods must be called on the same thread where
+ // StatsObserverInterface::OnStatsReports is invoked.
+
+ // Returns a reference to a stream label owned by the TrackIdStreamInfoMap.
+ // Precondition: |track_id| must be already mapped to stream label.
+ virtual absl::string_view GetStreamLabelFromTrackId(
+ absl::string_view track_id) const = 0;
+
+ // Returns a reference to a sync group name owned by the TrackIdStreamInfoMap.
+ // Precondition: |track_id| must be already mapped to sync group.
+ virtual absl::string_view GetSyncGroupLabelFromTrackId(
+ absl::string_view track_id) const = 0;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // API_TEST_TRACK_ID_STREAM_INFO_MAP_H_
diff --git a/api/test/track_id_stream_label_map.h b/api/test/track_id_stream_label_map.h
deleted file mode 100644
index e8dc947ab1..0000000000
--- a/api/test/track_id_stream_label_map.h
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_
-#define API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_
-
-#include <string>
-
-namespace webrtc {
-namespace webrtc_pc_e2e {
-
-// Instances of |TrackIdStreamLabelMap| provide bookkeeping capabilities that
-// are useful to associate stats reports track_ids to the remote stream_id.
-class TrackIdStreamLabelMap {
- public:
- virtual ~TrackIdStreamLabelMap() = default;
-
- // This method must be called on the same thread where
- // StatsObserverInterface::OnStatsReports is invoked.
- // Returns a reference to a stream label owned by the TrackIdStreamLabelMap.
- // Precondition: |track_id| must be already mapped to a stream_label.
- virtual const std::string& GetStreamLabelFromTrackId(
- const std::string& track_id) const = 0;
-};
-
-} // namespace webrtc_pc_e2e
-} // namespace webrtc
-
-#endif // API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_
diff --git a/api/test/video_quality_analyzer_interface.h b/api/test/video_quality_analyzer_interface.h
index 0d3f441534..c5370a7089 100644
--- a/api/test/video_quality_analyzer_interface.h
+++ b/api/test/video_quality_analyzer_interface.h
@@ -14,7 +14,9 @@
#include <memory>
#include <string>
+#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
+#include "api/array_view.h"
#include "api/test/stats_observer_interface.h"
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
@@ -76,42 +78,65 @@ class VideoQualityAnalyzerInterface : public StatsObserverInterface {
// calculations. Analyzer can perform simple calculations on the calling
// thread in each method, but should remember, that it is the same thread,
// that is used in video pipeline.
- virtual void Start(std::string test_case_name, int max_threads_count) {}
+ virtual void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {}
// Will be called when frame was generated from the input stream.
+ // |peer_name| is name of the peer on which side frame was captured.
// Returns frame id, that will be set by framework to the frame.
- virtual uint16_t OnFrameCaptured(const std::string& stream_label,
+ virtual uint16_t OnFrameCaptured(absl::string_view peer_name,
+ const std::string& stream_label,
const VideoFrame& frame) = 0;
// Will be called before calling the encoder.
- virtual void OnFramePreEncode(const VideoFrame& frame) {}
+ // |peer_name| is name of the peer on which side frame came to encoder.
+ virtual void OnFramePreEncode(absl::string_view peer_name,
+ const VideoFrame& frame) {}
// Will be called for each EncodedImage received from encoder. Single
// VideoFrame can produce multiple EncodedImages. Each encoded image will
// have id from VideoFrame.
- virtual void OnFrameEncoded(uint16_t frame_id,
+ // |peer_name| is name of the peer on which side frame was encoded.
+ virtual void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image,
const EncoderStats& stats) {}
// Will be called for each frame dropped by encoder.
- virtual void OnFrameDropped(EncodedImageCallback::DropReason reason) {}
+ // |peer_name| is name of the peer on which side frame drop was detected.
+ virtual void OnFrameDropped(absl::string_view peer_name,
+ EncodedImageCallback::DropReason reason) {}
// Will be called before calling the decoder.
- virtual void OnFramePreDecode(uint16_t frame_id,
+ // |peer_name| is name of the peer on which side frame was received.
+ virtual void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image) {}
// Will be called after decoding the frame.
- virtual void OnFrameDecoded(const VideoFrame& frame,
+ // |peer_name| is name of the peer on which side frame was decoded.
+ virtual void OnFrameDecoded(absl::string_view peer_name,
+ const VideoFrame& frame,
const DecoderStats& stats) {}
// Will be called when frame will be obtained from PeerConnection stack.
- virtual void OnFrameRendered(const VideoFrame& frame) {}
+ // |peer_name| is name of the peer on which side frame was rendered.
+ virtual void OnFrameRendered(absl::string_view peer_name,
+ const VideoFrame& frame) {}
// Will be called if encoder return not WEBRTC_VIDEO_CODEC_OK.
// All available codes are listed in
// modules/video_coding/include/video_error_codes.h
- virtual void OnEncoderError(const VideoFrame& frame, int32_t error_code) {}
+ // |peer_name| is name of the peer on which side error acquired.
+ virtual void OnEncoderError(absl::string_view peer_name,
+ const VideoFrame& frame,
+ int32_t error_code) {}
// Will be called if decoder return not WEBRTC_VIDEO_CODEC_OK.
// All available codes are listed in
// modules/video_coding/include/video_error_codes.h
- virtual void OnDecoderError(uint16_t frame_id, int32_t error_code) {}
+ // |peer_name| is name of the peer on which side error acquired.
+ virtual void OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code) {}
// Will be called every time new stats reports are available for the
// Peer Connection identified by |pc_label|.
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& stats_reports) override {}
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {}
// Tells analyzer that analysis complete and it should calculate final
// statistics.
diff --git a/api/test/video_quality_test_fixture.h b/api/test/video_quality_test_fixture.h
index ec07c23cd4..92c398aa54 100644
--- a/api/test/video_quality_test_fixture.h
+++ b/api/test/video_quality_test_fixture.h
@@ -22,6 +22,7 @@
#include "api/test/simulated_network.h"
#include "api/transport/bitrate_settings.h"
#include "api/transport/network_control.h"
+#include "api/video_codecs/sdp_video_format.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_config.h"
#include "api/video_codecs/video_encoder_factory.h"
@@ -31,60 +32,56 @@ namespace webrtc {
class VideoQualityTestFixtureInterface {
public:
// Parameters are grouped into smaller structs to make it easier to set
- // the desired elements and skip unused, using aggregate initialization.
- // Unfortunately, C++11 (as opposed to C11) doesn't support unnamed structs,
- // which makes the implementation of VideoQualityTest a bit uglier.
+ // the desired elements and skip unused.
struct Params {
- Params();
- ~Params();
struct CallConfig {
- bool send_side_bwe;
- bool generic_descriptor;
+ bool send_side_bwe = false;
+ bool generic_descriptor = false;
BitrateConstraints call_bitrate_config;
- int num_thumbnails;
+ int num_thumbnails = 0;
// Indicates if secondary_(video|ss|screenshare) structures are used.
- bool dual_video;
+ bool dual_video = false;
} call;
struct Video {
- bool enabled;
- size_t width;
- size_t height;
- int32_t fps;
- int min_bitrate_bps;
- int target_bitrate_bps;
- int max_bitrate_bps;
- bool suspend_below_min_bitrate;
- std::string codec;
- int num_temporal_layers;
- int selected_tl;
- int min_transmit_bps;
- bool ulpfec;
- bool flexfec;
- bool automatic_scaling;
+ bool enabled = false;
+ size_t width = 640;
+ size_t height = 480;
+ int32_t fps = 30;
+ int min_bitrate_bps = 50;
+ int target_bitrate_bps = 800;
+ int max_bitrate_bps = 800;
+ bool suspend_below_min_bitrate = false;
+ std::string codec = "VP8";
+ int num_temporal_layers = 1;
+ int selected_tl = -1;
+ int min_transmit_bps = 0;
+ bool ulpfec = false;
+ bool flexfec = false;
+ bool automatic_scaling = false;
std::string clip_path; // "Generator" to generate frames instead.
- size_t capture_device_index;
+ size_t capture_device_index = 0;
SdpVideoFormat::Parameters sdp_params;
- double encoder_overshoot_factor;
+ double encoder_overshoot_factor = 0.0;
} video[2];
struct Audio {
- bool enabled;
- bool sync_video;
- bool dtx;
- bool use_real_adm;
+ bool enabled = false;
+ bool sync_video = false;
+ bool dtx = false;
+ bool use_real_adm = false;
absl::optional<std::string> ana_config;
} audio;
struct Screenshare {
- bool enabled;
- bool generate_slides;
- int32_t slide_change_interval;
- int32_t scroll_duration;
+ bool enabled = false;
+ bool generate_slides = false;
+ int32_t slide_change_interval = 10;
+ int32_t scroll_duration = 0;
std::vector<std::string> slides;
} screenshare[2];
struct Analyzer {
std::string test_label;
- double avg_psnr_threshold; // (*)
- double avg_ssim_threshold; // (*)
- int test_durations_secs;
+ double avg_psnr_threshold = 0.0; // (*)
+ double avg_ssim_threshold = 0.0; // (*)
+ int test_durations_secs = 0;
std::string graph_data_output_filename;
std::string graph_title;
} analyzer;
@@ -95,14 +92,14 @@ class VideoQualityTestFixtureInterface {
absl::optional<BuiltInNetworkBehaviorConfig> config;
struct SS { // Spatial scalability.
std::vector<VideoStream> streams; // If empty, one stream is assumed.
- size_t selected_stream;
- int num_spatial_layers;
- int selected_sl;
- InterLayerPredMode inter_layer_pred;
+ size_t selected_stream = 0;
+ int num_spatial_layers = 0;
+ int selected_sl = -1;
+ InterLayerPredMode inter_layer_pred = InterLayerPredMode::kOn;
// If empty, bitrates are generated in VP9Impl automatically.
std::vector<SpatialLayer> spatial_layers;
// If set, default parameters will be used instead of |streams|.
- bool infer_streams;
+ bool infer_streams = false;
} ss[2];
struct Logging {
std::string rtc_event_log_name;
diff --git a/api/transport/BUILD.gn b/api/transport/BUILD.gn
index 0f07301fe4..a4ada07108 100644
--- a/api/transport/BUILD.gn
+++ b/api/transport/BUILD.gn
@@ -14,10 +14,8 @@ rtc_library("bitrate_settings") {
"bitrate_settings.cc",
"bitrate_settings.h",
]
- deps = [
- "../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "../../rtc_base/system:rtc_export" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("enums") {
@@ -41,6 +39,8 @@ rtc_library("network_control") {
"../units:data_size",
"../units:time_delta",
"../units:timestamp",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -49,10 +49,8 @@ rtc_library("network_control") {
rtc_source_set("webrtc_key_value_config") {
visibility = [ "*" ]
sources = [ "webrtc_key_value_config.h" ]
- deps = [
- "../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/strings",
- ]
+ deps = [ "../../rtc_base/system:rtc_export" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("field_trial_based_config") {
@@ -64,26 +62,20 @@ rtc_library("field_trial_based_config") {
deps = [
":webrtc_key_value_config",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
+# TODO(nisse): Rename?
rtc_source_set("datagram_transport_interface") {
visibility = [ "*" ]
- sources = [
- "congestion_control_interface.h",
- "data_channel_transport_interface.h",
- "datagram_transport_interface.h",
- ]
+ sources = [ "data_channel_transport_interface.h" ]
deps = [
- ":network_control",
"..:array_view",
"..:rtc_error",
"../../rtc_base:rtc_base_approved",
- "../units:data_rate",
- "../units:timestamp",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("goog_cc") {
diff --git a/api/transport/congestion_control_interface.h b/api/transport/congestion_control_interface.h
deleted file mode 100644
index 40552cb4ff..0000000000
--- a/api/transport/congestion_control_interface.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media and datagram transports.
-
-#ifndef API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
-#define API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "api/transport/network_control.h"
-#include "api/units/data_rate.h"
-
-namespace webrtc {
-
-// TODO(nisse): Defined together with MediaTransportInterface. But we should use
-// types that aren't tied to media, so that MediaTransportInterface can depend
-// on CongestionControlInterface, but not the other way around.
-// api/transport/network_control.h may be a reasonable place.
-class MediaTransportRttObserver;
-struct MediaTransportAllocatedBitrateLimits;
-struct MediaTransportTargetRateConstraints;
-
-// Defines congestion control feedback interface for media and datagram
-// transports.
-class CongestionControlInterface {
- public:
- virtual ~CongestionControlInterface() = default;
-
- // Updates allocation limits.
- virtual void SetAllocatedBitrateLimits(
- const MediaTransportAllocatedBitrateLimits& limits) = 0;
-
- // Sets starting rate.
- virtual void SetTargetBitrateLimits(
- const MediaTransportTargetRateConstraints& target_rate_constraints) = 0;
-
- // Intended for receive side. AddRttObserver registers an observer to be
- // called for each RTT measurement, typically once per ACK. Before media
- // transport is destructed the observer must be unregistered.
- //
- // TODO(sukhanov): Looks like AddRttObserver and RemoveRttObserver were
- // never implemented for media transport, so keeping noop implementation.
- virtual void AddRttObserver(MediaTransportRttObserver* observer) {}
- virtual void RemoveRttObserver(MediaTransportRttObserver* observer) {}
-
- // Adds a target bitrate observer. Before media transport is destructed
- // the observer must be unregistered (by calling
- // RemoveTargetTransferRateObserver).
- // A newly registered observer will be called back with the latest recorded
- // target rate, if available.
- virtual void AddTargetTransferRateObserver(
- TargetTransferRateObserver* observer) = 0;
-
- // Removes an existing |observer| from observers. If observer was never
- // registered, an error is logged and method does nothing.
- virtual void RemoveTargetTransferRateObserver(
- TargetTransferRateObserver* observer) = 0;
-
- // Returns the last known target transfer rate as reported to the above
- // observers.
- virtual absl::optional<TargetTransferRate> GetLatestTargetTransferRate() = 0;
-};
-
-} // namespace webrtc
-
-#endif // API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
diff --git a/api/transport/data_channel_transport_interface.h b/api/transport/data_channel_transport_interface.h
index 671deffc6e..7b8c653c39 100644
--- a/api/transport/data_channel_transport_interface.h
+++ b/api/transport/data_channel_transport_interface.h
@@ -35,8 +35,8 @@ enum class DataMessageType {
// sent reliably and in-order, even if the data channel is configured for
// unreliable delivery.
struct SendDataParams {
- SendDataParams();
- SendDataParams(const SendDataParams&);
+ SendDataParams() = default;
+ SendDataParams(const SendDataParams&) = default;
DataMessageType type = DataMessageType::kText;
diff --git a/api/transport/datagram_transport_interface.h b/api/transport/datagram_transport_interface.h
deleted file mode 100644
index 01736b978d..0000000000
--- a/api/transport/datagram_transport_interface.h
+++ /dev/null
@@ -1,151 +0,0 @@
-/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media and datagram transports.
-
-#ifndef API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
-#define API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "absl/types/optional.h"
-#include "api/array_view.h"
-#include "api/rtc_error.h"
-#include "api/transport/congestion_control_interface.h"
-#include "api/transport/data_channel_transport_interface.h"
-#include "api/units/data_rate.h"
-#include "api/units/timestamp.h"
-
-namespace rtc {
-class PacketTransportInternal;
-} // namespace rtc
-
-namespace webrtc {
-
-class MediaTransportStateCallback;
-
-typedef int64_t DatagramId;
-
-struct DatagramAck {
- // |datagram_id| is same as passed in
- // DatagramTransportInterface::SendDatagram.
- DatagramId datagram_id;
-
- // The timestamp at which the remote peer received the identified datagram,
- // according to that peer's clock.
- Timestamp receive_timestamp = Timestamp::MinusInfinity();
-};
-
-// All sink methods are called on network thread.
-class DatagramSinkInterface {
- public:
- virtual ~DatagramSinkInterface() {}
-
- // Called when new packet is received.
- virtual void OnDatagramReceived(rtc::ArrayView<const uint8_t> data) = 0;
-
- // Called when datagram is actually sent (datragram can be delayed due
- // to congestion control or fusing). |datagram_id| is same as passed in
- // DatagramTransportInterface::SendDatagram.
- virtual void OnDatagramSent(DatagramId datagram_id) = 0;
-
- // Called when datagram is ACKed.
- virtual void OnDatagramAcked(const DatagramAck& datagram_ack) = 0;
-
- // Called when a datagram is lost.
- virtual void OnDatagramLost(DatagramId datagram_id) = 0;
-};
-
-// Datagram transport allows to send and receive unreliable packets (datagrams)
-// and receive feedback from congestion control (via
-// CongestionControlInterface). The idea is to send RTP packets as datagrams and
-// have underlying implementation of datagram transport to use QUIC datagram
-// protocol.
-class DatagramTransportInterface : public DataChannelTransportInterface {
- public:
- virtual ~DatagramTransportInterface() = default;
-
- // Connect the datagram transport to the ICE transport.
- // The implementation must be able to ignore incoming packets that don't
- // belong to it.
- virtual void Connect(rtc::PacketTransportInternal* packet_transport) = 0;
-
- // Returns congestion control feedback interface or nullptr if datagram
- // transport does not implement congestion control.
- //
- // Note that right now datagram transport is used without congestion control,
- // but we plan to use it in the future.
- virtual CongestionControlInterface* congestion_control() = 0;
-
- // Sets a state observer callback. Before datagram transport is destroyed, the
- // callback must be unregistered by setting it to nullptr.
- // A newly registered callback will be called with the current state.
- // Datagram transport does not invoke this callback concurrently.
- virtual void SetTransportStateCallback(
- MediaTransportStateCallback* callback) = 0;
-
- // Start asynchronous send of datagram. The status returned by this method
- // only pertains to the synchronous operations (e.g. serialization /
- // packetization), not to the asynchronous operation.
- //
- // Datagrams larger than GetLargestDatagramSize() will fail and return error.
- //
- // Datagrams are sent in FIFO order.
- //
- // |datagram_id| is only used in ACK/LOST notifications in
- // DatagramSinkInterface and does not need to be unique.
- virtual RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) = 0;
-
- // Returns maximum size of datagram message, does not change.
- // TODO(sukhanov): Because value may be undefined before connection setup
- // is complete, consider returning error when called before connection is
- // established. Currently returns hardcoded const, because integration
- // prototype may call before connection is established.
- virtual size_t GetLargestDatagramSize() const = 0;
-
- // Sets packet sink. Sink must be unset by calling
- // SetDataTransportSink(nullptr) before the data transport is destroyed or
- // before new sink is set.
- virtual void SetDatagramSink(DatagramSinkInterface* sink) = 0;
-
- // Retrieves transport parameters for this datagram transport. May be called
- // on either client- or server-perspective transports.
- //
- // For servers, the parameters represent what kind of connections and data the
- // server is prepared to accept. This is generally a superset of acceptable
- // parameters.
- //
- // For clients, the parameters echo the server configuration used to create
- // the client, possibly removing any fields or parameters which the client
- // does not understand.
- virtual std::string GetTransportParameters() const = 0;
-
- // Sets remote transport parameters. |remote_params| is a serialized string
- // of opaque parameters, understood by the datagram transport implementation.
- // Returns an error if |remote_params| are not compatible with this transport.
- //
- // TODO(mellem): Make pure virtual. The default implementation maintains
- // original negotiation behavior (negotiation falls back to RTP if the
- // remote datagram transport fails to echo exactly the local parameters).
- virtual RTCError SetRemoteTransportParameters(
- absl::string_view remote_params) {
- if (remote_params == GetTransportParameters()) {
- return RTCError::OK();
- }
- return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
- "Local and remote transport parameters do not match");
- }
-};
-
-} // namespace webrtc
-
-#endif // API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
diff --git a/api/transport/media/BUILD.gn b/api/transport/media/BUILD.gn
deleted file mode 100644
index 24a364c2e5..0000000000
--- a/api/transport/media/BUILD.gn
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-import("../../../webrtc.gni")
-
-rtc_library("media_transport_interface") {
- visibility = [ "*" ]
- sources = [
- "media_transport_config.cc",
- "media_transport_config.h",
- "media_transport_interface.cc",
- "media_transport_interface.h",
- ]
- deps = [
- ":audio_interfaces",
- ":video_interfaces",
- "..:datagram_transport_interface",
- "..:network_control",
- "../..:array_view",
- "../..:rtc_error",
- "../../..:webrtc_common",
- "../../../rtc_base",
- "../../../rtc_base:checks",
- "../../../rtc_base:rtc_base_approved",
- "../../../rtc_base:stringutils",
- "../../units:data_rate",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
-}
-
-rtc_library("audio_interfaces") {
- visibility = [ "*" ]
- sources = [
- "audio_transport.cc",
- "audio_transport.h",
- ]
- deps = [ "../..:array_view" ]
-}
-
-rtc_library("video_interfaces") {
- visibility = [ "*" ]
- sources = [
- "video_transport.cc",
- "video_transport.h",
- ]
- deps = [ "../../video:encoded_image" ]
-}
diff --git a/api/transport/media/audio_transport.cc b/api/transport/media/audio_transport.cc
deleted file mode 100644
index 0f5fe8bcf2..0000000000
--- a/api/transport/media/audio_transport.cc
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#include "api/transport/media/audio_transport.h"
-
-#include <utility>
-
-namespace webrtc {
-
-MediaTransportEncodedAudioFrame::~MediaTransportEncodedAudioFrame() {}
-
-MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
- int sampling_rate_hz,
- int starting_sample_index,
- int samples_per_channel,
- int sequence_number,
- FrameType frame_type,
- int payload_type,
- std::vector<uint8_t> encoded_data)
- : sampling_rate_hz_(sampling_rate_hz),
- starting_sample_index_(starting_sample_index),
- samples_per_channel_(samples_per_channel),
- sequence_number_(sequence_number),
- frame_type_(frame_type),
- payload_type_(payload_type),
- encoded_data_(std::move(encoded_data)) {}
-
-MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=(
- const MediaTransportEncodedAudioFrame&) = default;
-
-MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=(
- MediaTransportEncodedAudioFrame&&) = default;
-
-MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
- const MediaTransportEncodedAudioFrame&) = default;
-
-MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
- MediaTransportEncodedAudioFrame&&) = default;
-
-} // namespace webrtc
diff --git a/api/transport/media/audio_transport.h b/api/transport/media/audio_transport.h
deleted file mode 100644
index dcbdcd7afe..0000000000
--- a/api/transport/media/audio_transport.h
+++ /dev/null
@@ -1,120 +0,0 @@
-/* Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#ifndef API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
-#define API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
-
-#include <vector>
-
-#include "api/array_view.h"
-
-namespace webrtc {
-
-// Represents encoded audio frame in any encoding (type of encoding is opaque).
-// To avoid copying of encoded data use move semantics when passing by value.
-class MediaTransportEncodedAudioFrame final {
- public:
- enum class FrameType {
- // Normal audio frame (equivalent to webrtc::kAudioFrameSpeech).
- kSpeech,
-
- // DTX frame (equivalent to webrtc::kAudioFrameCN).
- kDiscontinuousTransmission,
- // TODO(nisse): Mis-spelled version, update users, then delete.
- kDiscountinuousTransmission = kDiscontinuousTransmission,
- };
-
- MediaTransportEncodedAudioFrame(
- // Audio sampling rate, for example 48000.
- int sampling_rate_hz,
-
- // Starting sample index of the frame, i.e. how many audio samples were
- // before this frame since the beginning of the call or beginning of time
- // in one channel (the starting point should not matter for NetEq). In
- // WebRTC it is used as a timestamp of the frame.
- // TODO(sukhanov): Starting_sample_index is currently adjusted on the
- // receiver side in RTP path. Non-RTP implementations should preserve it.
- // For NetEq initial offset should not matter so we should consider fixing
- // RTP path.
- int starting_sample_index,
-
- // Number of audio samples in audio frame in 1 channel.
- int samples_per_channel,
-
- // Sequence number of the frame in the order sent, it is currently
- // required by NetEq, but we can fix NetEq, because starting_sample_index
- // should be enough.
- int sequence_number,
-
- // If audio frame is a speech or discontinued transmission.
- FrameType frame_type,
-
- // Opaque payload type. In RTP codepath payload type is stored in RTP
- // header. In other implementations it should be simply passed through the
- // wire -- it's needed for decoder.
- int payload_type,
-
- // Vector with opaque encoded data.
- std::vector<uint8_t> encoded_data);
-
- ~MediaTransportEncodedAudioFrame();
- MediaTransportEncodedAudioFrame(const MediaTransportEncodedAudioFrame&);
- MediaTransportEncodedAudioFrame& operator=(
- const MediaTransportEncodedAudioFrame& other);
- MediaTransportEncodedAudioFrame& operator=(
- MediaTransportEncodedAudioFrame&& other);
- MediaTransportEncodedAudioFrame(MediaTransportEncodedAudioFrame&&);
-
- // Getters.
- int sampling_rate_hz() const { return sampling_rate_hz_; }
- int starting_sample_index() const { return starting_sample_index_; }
- int samples_per_channel() const { return samples_per_channel_; }
- int sequence_number() const { return sequence_number_; }
-
- int payload_type() const { return payload_type_; }
- FrameType frame_type() const { return frame_type_; }
-
- rtc::ArrayView<const uint8_t> encoded_data() const { return encoded_data_; }
-
- private:
- int sampling_rate_hz_;
- int starting_sample_index_;
- int samples_per_channel_;
-
- // TODO(sukhanov): Refactor NetEq so we don't need sequence number.
- // Having sample_index and samples_per_channel should be enough.
- int sequence_number_;
-
- FrameType frame_type_;
-
- int payload_type_;
-
- std::vector<uint8_t> encoded_data_;
-};
-
-// Interface for receiving encoded audio frames from MediaTransportInterface
-// implementations.
-class MediaTransportAudioSinkInterface {
- public:
- virtual ~MediaTransportAudioSinkInterface() = default;
-
- // Called when new encoded audio frame is received.
- virtual void OnData(uint64_t channel_id,
- MediaTransportEncodedAudioFrame frame) = 0;
-};
-
-} // namespace webrtc
-#endif // API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
diff --git a/api/transport/media/media_transport_config.cc b/api/transport/media/media_transport_config.cc
deleted file mode 100644
index b9b19cb6f0..0000000000
--- a/api/transport/media/media_transport_config.cc
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "api/transport/media/media_transport_config.h"
-
-#include "rtc_base/checks.h"
-#include "rtc_base/strings/string_builder.h"
-
-namespace webrtc {
-
-MediaTransportConfig::MediaTransportConfig(size_t rtp_max_packet_size)
- : rtp_max_packet_size(rtp_max_packet_size) {
- RTC_DCHECK_GT(rtp_max_packet_size, 0);
-}
-
-std::string MediaTransportConfig::DebugString() const {
- rtc::StringBuilder result;
- result << "{rtp_max_packet_size: " << rtp_max_packet_size.value_or(0) << "}";
- return result.Release();
-}
-
-} // namespace webrtc
diff --git a/api/transport/media/media_transport_config.h b/api/transport/media/media_transport_config.h
deleted file mode 100644
index 7ef65453ae..0000000000
--- a/api/transport/media/media_transport_config.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
-#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "absl/types/optional.h"
-
-namespace webrtc {
-
-// Media transport config is made available to both transport and audio / video
-// layers, but access to individual interfaces should not be open without
-// necessity.
-struct MediaTransportConfig {
- // Default constructor for no-media transport scenarios.
- MediaTransportConfig() = default;
-
- // Constructor for datagram transport scenarios.
- explicit MediaTransportConfig(size_t rtp_max_packet_size);
-
- std::string DebugString() const;
-
- // If provided, limits RTP packet size (excludes ICE, IP or network overhead).
- absl::optional<size_t> rtp_max_packet_size;
-};
-
-} // namespace webrtc
-
-#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
diff --git a/api/transport/media/media_transport_interface.cc b/api/transport/media/media_transport_interface.cc
deleted file mode 100644
index 323ddca689..0000000000
--- a/api/transport/media/media_transport_interface.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#include "api/transport/media/media_transport_interface.h"
-
-#include <cstdint>
-#include <utility>
-
-#include "api/transport/datagram_transport_interface.h"
-
-namespace webrtc {
-
-MediaTransportSettings::MediaTransportSettings() = default;
-MediaTransportSettings::MediaTransportSettings(const MediaTransportSettings&) =
- default;
-MediaTransportSettings& MediaTransportSettings::operator=(
- const MediaTransportSettings&) = default;
-MediaTransportSettings::~MediaTransportSettings() = default;
-
-SendDataParams::SendDataParams() = default;
-SendDataParams::SendDataParams(const SendDataParams&) = default;
-
-RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
-MediaTransportFactory::CreateMediaTransport(
- rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return std::unique_ptr<MediaTransportInterface>(nullptr);
-}
-
-RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
-MediaTransportFactory::CreateMediaTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return std::unique_ptr<MediaTransportInterface>(nullptr);
-}
-
-RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
-MediaTransportFactory::CreateDatagramTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return std::unique_ptr<DatagramTransportInterface>(nullptr);
-}
-
-std::string MediaTransportFactory::GetTransportName() const {
- return "";
-}
-
-MediaTransportInterface::MediaTransportInterface() = default;
-MediaTransportInterface::~MediaTransportInterface() = default;
-
-absl::optional<std::string>
-MediaTransportInterface::GetTransportParametersOffer() const {
- return absl::nullopt;
-}
-
-void MediaTransportInterface::Connect(
- rtc::PacketTransportInternal* packet_transport) {}
-
-void MediaTransportInterface::SetKeyFrameRequestCallback(
- MediaTransportKeyFrameRequestCallback* callback) {}
-
-absl::optional<TargetTransferRate>
-MediaTransportInterface::GetLatestTargetTransferRate() {
- return absl::nullopt;
-}
-
-void MediaTransportInterface::AddNetworkChangeCallback(
- MediaTransportNetworkChangeCallback* callback) {}
-
-void MediaTransportInterface::RemoveNetworkChangeCallback(
- MediaTransportNetworkChangeCallback* callback) {}
-
-void MediaTransportInterface::SetFirstAudioPacketReceivedObserver(
- AudioPacketReceivedObserver* observer) {}
-
-void MediaTransportInterface::AddTargetTransferRateObserver(
- TargetTransferRateObserver* observer) {}
-void MediaTransportInterface::RemoveTargetTransferRateObserver(
- TargetTransferRateObserver* observer) {}
-
-void MediaTransportInterface::AddRttObserver(
- MediaTransportRttObserver* observer) {}
-void MediaTransportInterface::RemoveRttObserver(
- MediaTransportRttObserver* observer) {}
-
-size_t MediaTransportInterface::GetAudioPacketOverhead() const {
- return 0;
-}
-
-void MediaTransportInterface::SetAllocatedBitrateLimits(
- const MediaTransportAllocatedBitrateLimits& limits) {}
-
-} // namespace webrtc
diff --git a/api/transport/media/media_transport_interface.h b/api/transport/media/media_transport_interface.h
deleted file mode 100644
index dbe68d344b..0000000000
--- a/api/transport/media/media_transport_interface.h
+++ /dev/null
@@ -1,320 +0,0 @@
-/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
-#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "absl/types/optional.h"
-#include "api/array_view.h"
-#include "api/rtc_error.h"
-#include "api/transport/data_channel_transport_interface.h"
-#include "api/transport/media/audio_transport.h"
-#include "api/transport/media/video_transport.h"
-#include "api/transport/network_control.h"
-#include "api/units/data_rate.h"
-#include "rtc_base/copy_on_write_buffer.h"
-#include "rtc_base/network_route.h"
-
-namespace rtc {
-class PacketTransportInternal;
-class Thread;
-} // namespace rtc
-
-namespace webrtc {
-
-class DatagramTransportInterface;
-class RtcEventLog;
-
-class AudioPacketReceivedObserver {
- public:
- virtual ~AudioPacketReceivedObserver() = default;
-
- // Invoked for the first received audio packet on a given channel id.
- // It will be invoked once for each channel id.
- virtual void OnFirstAudioPacketReceived(int64_t channel_id) = 0;
-};
-
-// Used to configure stream allocations.
-struct MediaTransportAllocatedBitrateLimits {
- DataRate min_pacing_rate = DataRate::Zero();
- DataRate max_padding_bitrate = DataRate::Zero();
- DataRate max_total_allocated_bitrate = DataRate::Zero();
-};
-
-// Used to configure target bitrate constraints.
-// If the value is provided, the constraint is updated.
-// If the value is omitted, the value is left unchanged.
-struct MediaTransportTargetRateConstraints {
- absl::optional<DataRate> min_bitrate;
- absl::optional<DataRate> max_bitrate;
- absl::optional<DataRate> starting_bitrate;
-};
-
-// A collection of settings for creation of media transport.
-struct MediaTransportSettings final {
- MediaTransportSettings();
- MediaTransportSettings(const MediaTransportSettings&);
- MediaTransportSettings& operator=(const MediaTransportSettings&);
- ~MediaTransportSettings();
-
- // Group calls are not currently supported, in 1:1 call one side must set
- // is_caller = true and another is_caller = false.
- bool is_caller;
-
- // Must be set if a pre-shared key is used for the call.
- // TODO(bugs.webrtc.org/9944): This should become zero buffer in the distant
- // future.
- absl::optional<std::string> pre_shared_key;
-
- // If present, this is a config passed from the caller to the answerer in the
- // offer. Each media transport knows how to understand its own parameters.
- absl::optional<std::string> remote_transport_parameters;
-
- // If present, provides the event log that media transport should use.
- // Media transport does not own it. The lifetime of |event_log| will exceed
- // the lifetime of the instance of MediaTransportInterface instance.
- RtcEventLog* event_log = nullptr;
-};
-
-// Callback to notify about network route changes.
-class MediaTransportNetworkChangeCallback {
- public:
- virtual ~MediaTransportNetworkChangeCallback() = default;
-
- // Called when the network route is changed, with the new network route.
- virtual void OnNetworkRouteChanged(
- const rtc::NetworkRoute& new_network_route) = 0;
-};
-
-// State of the media transport. Media transport begins in the pending state.
-// It transitions to writable when it is ready to send media. It may transition
-// back to pending if the connection is blocked. It may transition to closed at
-// any time. Closed is terminal: a transport will never re-open once closed.
-enum class MediaTransportState {
- kPending,
- kWritable,
- kClosed,
-};
-
-// Callback invoked whenever the state of the media transport changes.
-class MediaTransportStateCallback {
- public:
- virtual ~MediaTransportStateCallback() = default;
-
- // Invoked whenever the state of the media transport changes.
- virtual void OnStateChanged(MediaTransportState state) = 0;
-};
-
-// Callback for RTT measurements on the receive side.
-// TODO(nisse): Related interfaces: CallStatsObserver and RtcpRttStats. It's
-// somewhat unclear what type of measurement is needed. It's used to configure
-// NACK generation and playout buffer. Either raw measurement values or recent
-// maximum would make sense for this use. Need consolidation of RTT signalling.
-class MediaTransportRttObserver {
- public:
- virtual ~MediaTransportRttObserver() = default;
-
- // Invoked when a new RTT measurement is available, typically once per ACK.
- virtual void OnRttUpdated(int64_t rtt_ms) = 0;
-};
-
-// Media transport interface for sending / receiving encoded audio/video frames
-// and receiving bandwidth estimate update from congestion control.
-class MediaTransportInterface : public DataChannelTransportInterface {
- public:
- MediaTransportInterface();
- virtual ~MediaTransportInterface();
-
- // Retrieves callers config (i.e. media transport offer) that should be passed
- // to the callee, before the call is connected. Such config is opaque to SDP
- // (sdp just passes it through). The config is a binary blob, so SDP may
- // choose to use base64 to serialize it (or any other approach that guarantees
- // that the binary blob goes through). This should only be called for the
- // caller's perspective.
- //
- // This may return an unset optional, which means that the given media
- // transport is not supported / disabled and shouldn't be reported in SDP.
- //
- // It may also return an empty string, in which case the media transport is
- // supported, but without any extra settings.
- // TODO(psla): Make abstract.
- virtual absl::optional<std::string> GetTransportParametersOffer() const;
-
- // Connect the media transport to the ICE transport.
- // The implementation must be able to ignore incoming packets that don't
- // belong to it.
- // TODO(psla): Make abstract.
- virtual void Connect(rtc::PacketTransportInternal* packet_transport);
-
- // Start asynchronous send of audio frame. The status returned by this method
- // only pertains to the synchronous operations (e.g.
- // serialization/packetization), not to the asynchronous operation.
-
- virtual RTCError SendAudioFrame(uint64_t channel_id,
- MediaTransportEncodedAudioFrame frame) = 0;
-
- // Start asynchronous send of video frame. The status returned by this method
- // only pertains to the synchronous operations (e.g.
- // serialization/packetization), not to the asynchronous operation.
- virtual RTCError SendVideoFrame(
- uint64_t channel_id,
- const MediaTransportEncodedVideoFrame& frame) = 0;
-
- // Used by video sender to be notified on key frame requests.
- virtual void SetKeyFrameRequestCallback(
- MediaTransportKeyFrameRequestCallback* callback);
-
- // Requests a keyframe for the particular channel (stream). The caller should
- // check that the keyframe is not present in a jitter buffer already (i.e.
- // don't request a keyframe if there is one that you will get from the jitter
- // buffer in a moment).
- virtual RTCError RequestKeyFrame(uint64_t channel_id) = 0;
-
- // Sets audio sink. Sink must be unset by calling SetReceiveAudioSink(nullptr)
- // before the media transport is destroyed or before new sink is set.
- virtual void SetReceiveAudioSink(MediaTransportAudioSinkInterface* sink) = 0;
-
- // Registers a video sink. Before destruction of media transport, you must
- // pass a nullptr.
- virtual void SetReceiveVideoSink(MediaTransportVideoSinkInterface* sink) = 0;
-
- // Adds a target bitrate observer. Before media transport is destructed
- // the observer must be unregistered (by calling
- // RemoveTargetTransferRateObserver).
- // A newly registered observer will be called back with the latest recorded
- // target rate, if available.
- virtual void AddTargetTransferRateObserver(
- TargetTransferRateObserver* observer);
-
- // Removes an existing |observer| from observers. If observer was never
- // registered, an error is logged and method does nothing.
- virtual void RemoveTargetTransferRateObserver(
- TargetTransferRateObserver* observer);
-
- // Sets audio packets observer, which gets informed about incoming audio
- // packets. Before destruction, the observer must be unregistered by setting
- // nullptr.
- //
- // This method may be temporary, when the multiplexer is implemented (or
- // multiplexer may use it to demultiplex channel ids).
- virtual void SetFirstAudioPacketReceivedObserver(
- AudioPacketReceivedObserver* observer);
-
- // Intended for receive side. AddRttObserver registers an observer to be
- // called for each RTT measurement, typically once per ACK. Before media
- // transport is destructed the observer must be unregistered.
- virtual void AddRttObserver(MediaTransportRttObserver* observer);
- virtual void RemoveRttObserver(MediaTransportRttObserver* observer);
-
- // Returns the last known target transfer rate as reported to the above
- // observers.
- virtual absl::optional<TargetTransferRate> GetLatestTargetTransferRate();
-
- // Gets the audio packet overhead in bytes. Returned overhead does not include
- // transport overhead (ipv4/6, turn channeldata, tcp/udp, etc.).
- // If the transport is capable of fusing packets together, this overhead
- // might not be a very accurate number.
- // TODO(nisse): Deprecated.
- virtual size_t GetAudioPacketOverhead() const;
-
- // Corresponding observers for audio and video overhead. Before destruction,
- // the observers must be unregistered by setting nullptr.
-
- // Registers an observer for network change events. If the network route is
- // already established when the callback is added, |callback| will be called
- // immediately with the current network route. Before media transport is
- // destroyed, the callback must be removed.
- virtual void AddNetworkChangeCallback(
- MediaTransportNetworkChangeCallback* callback);
- virtual void RemoveNetworkChangeCallback(
- MediaTransportNetworkChangeCallback* callback);
-
- // Sets a state observer callback. Before media transport is destroyed, the
- // callback must be unregistered by setting it to nullptr.
- // A newly registered callback will be called with the current state.
- // Media transport does not invoke this callback concurrently.
- virtual void SetMediaTransportStateCallback(
- MediaTransportStateCallback* callback) = 0;
-
- // Updates allocation limits.
- // TODO(psla): Make abstract when downstream implementation implement it.
- virtual void SetAllocatedBitrateLimits(
- const MediaTransportAllocatedBitrateLimits& limits);
-
- // Sets starting rate.
- // TODO(psla): Make abstract when downstream implementation implement it.
- virtual void SetTargetBitrateLimits(
- const MediaTransportTargetRateConstraints& target_rate_constraints) {}
-
- // TODO(sukhanov): RtcEventLogs.
-};
-
-// If media transport factory is set in peer connection factory, it will be
-// used to create media transport for sending/receiving encoded frames and
-// this transport will be used instead of default RTP/SRTP transport.
-//
-// Currently Media Transport negotiation is not supported in SDP.
-// If application is using media transport, it must negotiate it before
-// setting media transport factory in peer connection.
-class MediaTransportFactory {
- public:
- virtual ~MediaTransportFactory() = default;
-
- // Creates media transport.
- // - Does not take ownership of packet_transport or network_thread.
- // - Does not support group calls, in 1:1 call one side must set
- // is_caller = true and another is_caller = false.
- virtual RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
- CreateMediaTransport(rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings);
-
- // Creates a new Media Transport in a disconnected state. If the media
- // transport for the caller is created, one can then call
- // MediaTransportInterface::GetTransportParametersOffer on that new instance.
- // TODO(psla): Make abstract.
- virtual RTCErrorOr<std::unique_ptr<webrtc::MediaTransportInterface>>
- CreateMediaTransport(rtc::Thread* network_thread,
- const MediaTransportSettings& settings);
-
- // Creates a new Datagram Transport in a disconnected state. If the datagram
- // transport for the caller is created, one can then call
- // DatagramTransportInterface::GetTransportParametersOffer on that new
- // instance.
- //
- // TODO(sukhanov): Consider separating media and datagram transport factories.
- // TODO(sukhanov): Move factory to a separate .h file.
- virtual RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
- CreateDatagramTransport(rtc::Thread* network_thread,
- const MediaTransportSettings& settings);
-
- // Gets a transport name which is supported by the implementation.
- // Different factories should return different transport names, and at runtime
- // it will be checked that different names were used.
- // For example, "rtp" or "generic" may be returned by two different
- // implementations.
- // The value returned by this method must never change in the lifetime of the
- // factory.
- // TODO(psla): Make abstract.
- virtual std::string GetTransportName() const;
-};
-
-} // namespace webrtc
-#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
diff --git a/api/transport/media/video_transport.cc b/api/transport/media/video_transport.cc
deleted file mode 100644
index a6f5304048..0000000000
--- a/api/transport/media/video_transport.cc
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#include "api/transport/media/video_transport.h"
-
-#include <utility>
-
-namespace webrtc {
-
-MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame() = default;
-
-MediaTransportEncodedVideoFrame::~MediaTransportEncodedVideoFrame() = default;
-
-MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
- int64_t frame_id,
- std::vector<int64_t> referenced_frame_ids,
- int payload_type,
- const webrtc::EncodedImage& encoded_image)
- : payload_type_(payload_type),
- encoded_image_(encoded_image),
- frame_id_(frame_id),
- referenced_frame_ids_(std::move(referenced_frame_ids)) {}
-
-MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=(
- const MediaTransportEncodedVideoFrame&) = default;
-
-MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=(
- MediaTransportEncodedVideoFrame&&) = default;
-
-MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
- const MediaTransportEncodedVideoFrame& o)
- : MediaTransportEncodedVideoFrame() {
- *this = o;
-}
-
-MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
- MediaTransportEncodedVideoFrame&& o)
- : MediaTransportEncodedVideoFrame() {
- *this = std::move(o);
-}
-
-} // namespace webrtc
diff --git a/api/transport/media/video_transport.h b/api/transport/media/video_transport.h
deleted file mode 100644
index affd2e0d38..0000000000
--- a/api/transport/media/video_transport.h
+++ /dev/null
@@ -1,101 +0,0 @@
-/* Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#ifndef API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
-#define API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
-
-#include <vector>
-
-#include "api/video/encoded_image.h"
-
-namespace webrtc {
-
-// Represents encoded video frame, along with the codec information.
-class MediaTransportEncodedVideoFrame final {
- public:
- MediaTransportEncodedVideoFrame(int64_t frame_id,
- std::vector<int64_t> referenced_frame_ids,
- int payload_type,
- const webrtc::EncodedImage& encoded_image);
- ~MediaTransportEncodedVideoFrame();
- MediaTransportEncodedVideoFrame(const MediaTransportEncodedVideoFrame&);
- MediaTransportEncodedVideoFrame& operator=(
- const MediaTransportEncodedVideoFrame& other);
- MediaTransportEncodedVideoFrame& operator=(
- MediaTransportEncodedVideoFrame&& other);
- MediaTransportEncodedVideoFrame(MediaTransportEncodedVideoFrame&&);
-
- int payload_type() const { return payload_type_; }
- const webrtc::EncodedImage& encoded_image() const { return encoded_image_; }
-
- int64_t frame_id() const { return frame_id_; }
- const std::vector<int64_t>& referenced_frame_ids() const {
- return referenced_frame_ids_;
- }
-
- // Hack to workaround lack of ownership of the EncodedImage buffer. If we
- // don't already own the underlying data, make a copy.
- void Retain() { encoded_image_.Retain(); }
-
- private:
- MediaTransportEncodedVideoFrame();
-
- int payload_type_;
-
- // The buffer is not always owned by the encoded image. On the sender it means
- // that it will need to make a copy using the Retain() method, if it wants to
- // deliver it asynchronously.
- webrtc::EncodedImage encoded_image_;
-
- // Frame id uniquely identifies a frame in a stream. It needs to be unique in
- // a given time window (i.e. technically unique identifier for the lifetime of
- // the connection is not needed, but you need to guarantee that remote side
- // got rid of the previous frame_id if you plan to reuse it).
- //
- // It is required by a remote jitter buffer, and is the same as
- // EncodedFrame::id::picture_id.
- //
- // This data must be opaque to the media transport, and media transport should
- // itself not make any assumptions about what it is and its uniqueness.
- int64_t frame_id_;
-
- // A single frame might depend on other frames. This is set of identifiers on
- // which the current frame depends.
- std::vector<int64_t> referenced_frame_ids_;
-};
-
-// Interface for receiving encoded video frames from MediaTransportInterface
-// implementations.
-class MediaTransportVideoSinkInterface {
- public:
- virtual ~MediaTransportVideoSinkInterface() = default;
-
- // Called when new encoded video frame is received.
- virtual void OnData(uint64_t channel_id,
- MediaTransportEncodedVideoFrame frame) = 0;
-};
-
-// Interface for video sender to be notified of received key frame request.
-class MediaTransportKeyFrameRequestCallback {
- public:
- virtual ~MediaTransportKeyFrameRequestCallback() = default;
-
- // Called when a key frame request is received on the transport.
- virtual void OnKeyFrameRequested(uint64_t channel_id) = 0;
-};
-
-} // namespace webrtc
-#endif // API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
diff --git a/api/transport/rtp/BUILD.gn b/api/transport/rtp/BUILD.gn
index b0849502c8..7b01169360 100644
--- a/api/transport/rtp/BUILD.gn
+++ b/api/transport/rtp/BUILD.gn
@@ -14,15 +14,20 @@ rtc_source_set("rtp_source") {
deps = [
"../../../api:rtp_headers",
"../../../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("dependency_descriptor") {
visibility = [ "*" ]
- sources = [ "dependency_descriptor.h" ]
- deps = [
+ sources = [
+ "dependency_descriptor.cc",
+ "dependency_descriptor.h",
+ ]
+ deps = [ "../../../rtc_base:checks" ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
diff --git a/api/transport/rtp/dependency_descriptor.cc b/api/transport/rtp/dependency_descriptor.cc
new file mode 100644
index 0000000000..2a9b6d9a71
--- /dev/null
+++ b/api/transport/rtp/dependency_descriptor.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/transport/rtp/dependency_descriptor.h"
+
+#include "absl/container/inlined_vector.h"
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+constexpr int DependencyDescriptor::kMaxSpatialIds;
+constexpr int DependencyDescriptor::kMaxTemporalIds;
+constexpr int DependencyDescriptor::kMaxTemplates;
+constexpr int DependencyDescriptor::kMaxDecodeTargets;
+
+namespace webrtc_impl {
+
+absl::InlinedVector<DecodeTargetIndication, 10> StringToDecodeTargetIndications(
+ absl::string_view symbols) {
+ absl::InlinedVector<DecodeTargetIndication, 10> dtis;
+ dtis.reserve(symbols.size());
+ for (char symbol : symbols) {
+ DecodeTargetIndication indication;
+ switch (symbol) {
+ case '-':
+ indication = DecodeTargetIndication::kNotPresent;
+ break;
+ case 'D':
+ indication = DecodeTargetIndication::kDiscardable;
+ break;
+ case 'R':
+ indication = DecodeTargetIndication::kRequired;
+ break;
+ case 'S':
+ indication = DecodeTargetIndication::kSwitch;
+ break;
+ default:
+ RTC_NOTREACHED();
+ }
+ dtis.push_back(indication);
+ }
+ return dtis;
+}
+
+} // namespace webrtc_impl
+} // namespace webrtc
diff --git a/api/transport/rtp/dependency_descriptor.h b/api/transport/rtp/dependency_descriptor.h
index a488f56dfd..6967c83517 100644
--- a/api/transport/rtp/dependency_descriptor.h
+++ b/api/transport/rtp/dependency_descriptor.h
@@ -13,10 +13,12 @@
#include <stdint.h>
+#include <initializer_list>
#include <memory>
#include <vector>
#include "absl/container/inlined_vector.h"
+#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
namespace webrtc {
@@ -52,6 +54,13 @@ enum class DecodeTargetIndication {
};
struct FrameDependencyTemplate {
+ // Setters are named briefly to chain them when building the template.
+ FrameDependencyTemplate& S(int spatial_layer);
+ FrameDependencyTemplate& T(int temporal_layer);
+ FrameDependencyTemplate& Dtis(absl::string_view dtis);
+ FrameDependencyTemplate& FrameDiffs(std::initializer_list<int> diffs);
+ FrameDependencyTemplate& ChainDiffs(std::initializer_list<int> diffs);
+
friend bool operator==(const FrameDependencyTemplate& lhs,
const FrameDependencyTemplate& rhs) {
return lhs.spatial_id == rhs.spatial_id &&
@@ -82,14 +91,18 @@ struct FrameDependencyStructure {
int num_decode_targets = 0;
int num_chains = 0;
// If chains are used (num_chains > 0), maps decode target index into index of
- // the chain protecting that target or |num_chains| value if decode target is
- // not protected by a chain.
+ // the chain protecting that target.
absl::InlinedVector<int, 10> decode_target_protected_by_chain;
absl::InlinedVector<RenderResolution, 4> resolutions;
std::vector<FrameDependencyTemplate> templates;
};
struct DependencyDescriptor {
+ static constexpr int kMaxSpatialIds = 4;
+ static constexpr int kMaxTemporalIds = 8;
+ static constexpr int kMaxDecodeTargets = 32;
+ static constexpr int kMaxTemplates = 64;
+
bool first_packet_in_frame = true;
bool last_packet_in_frame = true;
int frame_number = 0;
@@ -99,6 +112,37 @@ struct DependencyDescriptor {
std::unique_ptr<FrameDependencyStructure> attached_structure;
};
+// Below are implementation details.
+namespace webrtc_impl {
+absl::InlinedVector<DecodeTargetIndication, 10> StringToDecodeTargetIndications(
+ absl::string_view indication_symbols);
+} // namespace webrtc_impl
+
+inline FrameDependencyTemplate& FrameDependencyTemplate::S(int spatial_layer) {
+ this->spatial_id = spatial_layer;
+ return *this;
+}
+inline FrameDependencyTemplate& FrameDependencyTemplate::T(int temporal_layer) {
+ this->temporal_id = temporal_layer;
+ return *this;
+}
+inline FrameDependencyTemplate& FrameDependencyTemplate::Dtis(
+ absl::string_view dtis) {
+ this->decode_target_indications =
+ webrtc_impl::StringToDecodeTargetIndications(dtis);
+ return *this;
+}
+inline FrameDependencyTemplate& FrameDependencyTemplate::FrameDiffs(
+ std::initializer_list<int> diffs) {
+ this->frame_diffs.assign(diffs.begin(), diffs.end());
+ return *this;
+}
+inline FrameDependencyTemplate& FrameDependencyTemplate::ChainDiffs(
+ std::initializer_list<int> diffs) {
+ this->chain_diffs.assign(diffs.begin(), diffs.end());
+ return *this;
+}
+
} // namespace webrtc
#endif // API_TRANSPORT_RTP_DEPENDENCY_DESCRIPTOR_H_
diff --git a/api/transport/test/mock_network_control.h b/api/transport/test/mock_network_control.h
index 54a416cb77..f613004fb7 100644
--- a/api/transport/test/mock_network_control.h
+++ b/api/transport/test/mock_network_control.h
@@ -18,11 +18,16 @@ namespace webrtc {
class MockNetworkStateEstimator : public NetworkStateEstimator {
public:
- MOCK_METHOD0(GetCurrentEstimate, absl::optional<NetworkStateEstimate>());
- MOCK_METHOD1(OnTransportPacketsFeedback,
- void(const TransportPacketsFeedback&));
- MOCK_METHOD1(OnReceivedPacket, void(const PacketResult&));
- MOCK_METHOD1(OnRouteChange, void(const NetworkRouteChange&));
+ MOCK_METHOD(absl::optional<NetworkStateEstimate>,
+ GetCurrentEstimate,
+ (),
+ (override));
+ MOCK_METHOD(void,
+ OnTransportPacketsFeedback,
+ (const TransportPacketsFeedback&),
+ (override));
+ MOCK_METHOD(void, OnReceivedPacket, (const PacketResult&), (override));
+ MOCK_METHOD(void, OnRouteChange, (const NetworkRouteChange&), (override));
};
} // namespace webrtc
diff --git a/api/units/data_rate_unittest.cc b/api/units/data_rate_unittest.cc
index 4a6dd21af3..f77b3702d4 100644
--- a/api/units/data_rate_unittest.cc
+++ b/api/units/data_rate_unittest.cc
@@ -175,7 +175,7 @@ TEST(UnitConversionTest, DataRateAndDataSizeAndFrequency) {
EXPECT_EQ((rate_b / freq_a).bytes(), kBitsPerSecond / kHertz / 8);
}
-TEST(UnitConversionTest, DivisionFailsOnLargeSize) {
+TEST(UnitConversionDeathTest, DivisionFailsOnLargeSize) {
// Note that the failure is expected since the current implementation is
// implementated in a way that does not support division of large sizes. If
// the implementation is changed, this test can safely be removed.
diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn
index 290c2f2abb..ee7e51d4c4 100644
--- a/api/video/BUILD.gn
+++ b/api/video/BUILD.gn
@@ -21,7 +21,6 @@ rtc_library("video_rtp_headers") {
"hdr_metadata.h",
"video_content_type.cc",
"video_content_type.h",
- "video_frame_marking.h",
"video_rotation.h",
"video_timing.cc",
"video_timing.h",
@@ -31,8 +30,8 @@ rtc_library("video_rtp_headers") {
"..:array_view",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_frame") {
@@ -56,8 +55,8 @@ rtc_library("video_frame") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (is_android) {
@@ -141,8 +140,8 @@ rtc_library("encoded_image") {
"../../rtc_base:deprecation",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("encoded_frame") {
@@ -173,8 +172,8 @@ rtc_library("video_bitrate_allocation") {
"../../rtc_base:safe_conversions",
"../../rtc_base:stringutils",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_bitrate_allocator") {
@@ -209,8 +208,8 @@ rtc_source_set("video_stream_decoder") {
"../task_queue",
"../units:time_delta",
"../video_codecs:video_codecs_api",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_stream_decoder_create") {
@@ -237,7 +236,10 @@ rtc_library("video_adaptation") {
"video_adaptation_reason.h",
]
- deps = [ "../../rtc_base:checks" ]
+ deps = [
+ "../../rtc_base:checks",
+ "../../rtc_base:stringutils",
+ ]
}
rtc_source_set("video_stream_encoder") {
@@ -256,10 +258,29 @@ rtc_source_set("video_stream_encoder") {
":video_codec_constants",
":video_frame",
"..:rtp_parameters",
+ "..:scoped_refptr",
"../:fec_controller_api",
"../:rtp_parameters",
+ "../adaptation:resource_adaptation_api",
"../units:data_rate",
"../video_codecs:video_codecs_api",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("video_frame_metadata") {
+ visibility = [ "*" ]
+ sources = [
+ "video_frame_metadata.cc",
+ "video_frame_metadata.h",
+ ]
+ deps = [
+ "..:array_view",
+ "../../modules/rtp_rtcp:rtp_video_header",
+ "../transport/rtp:dependency_descriptor",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -299,16 +320,21 @@ rtc_library("builtin_video_bitrate_allocator_factory") {
"../../modules/video_coding:video_coding_utility",
"../../modules/video_coding:webrtc_vp9_helpers",
"../video_codecs:video_codecs_api",
- "//third_party/abseil-cpp/absl/base:core_headers",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
}
if (rtc_include_tests) {
rtc_library("video_unittests") {
testonly = true
- sources = [ "video_stream_decoder_create_unittest.cc" ]
+ sources = [
+ "video_frame_metadata_unittest.cc",
+ "video_stream_decoder_create_unittest.cc",
+ ]
deps = [
+ ":video_frame_metadata",
":video_stream_decoder_create",
+ "../../modules/rtp_rtcp:rtp_video_header",
"../../test:test_support",
"../task_queue:default_task_queue_factory",
"../video_codecs:builtin_video_decoder_factory",
diff --git a/api/video/OWNERS.webrtc b/api/video/OWNERS.webrtc
index 315f85e7d0..e4a16c360a 100644
--- a/api/video/OWNERS.webrtc
+++ b/api/video/OWNERS.webrtc
@@ -1,3 +1,4 @@
+brandtr@webrtc.org
magjed@webrtc.org
nisse@webrtc.org
diff --git a/api/video/encoded_image.h b/api/video/encoded_image.h
index d89095f467..35c2584dfa 100644
--- a/api/video/encoded_image.h
+++ b/api/video/encoded_image.h
@@ -21,7 +21,6 @@
#include "api/scoped_refptr.h"
#include "api/video/color_space.h"
#include "api/video/video_codec_constants.h"
-#include "api/video/video_codec_type.h"
#include "api/video/video_content_type.h"
#include "api/video/video_frame_type.h"
#include "api/video/video_rotation.h"
diff --git a/api/video/i010_buffer.cc b/api/video/i010_buffer.cc
index 7286676ded..df43d78f2a 100644
--- a/api/video/i010_buffer.cc
+++ b/api/video/i010_buffer.cc
@@ -66,12 +66,16 @@ rtc::scoped_refptr<I010Buffer> I010Buffer::Copy(
const int width = source.width();
const int height = source.height();
rtc::scoped_refptr<I010Buffer> buffer = Create(width, height);
+#if 0
RTC_CHECK_EQ(
0, libyuv::I010Copy(
source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
source.DataV(), source.StrideV(), buffer->MutableDataY(),
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(), width, height));
+#else
+ abort();
+#endif
return buffer;
}
@@ -81,12 +85,16 @@ rtc::scoped_refptr<I010Buffer> I010Buffer::Copy(
const int width = source.width();
const int height = source.height();
rtc::scoped_refptr<I010Buffer> buffer = Create(width, height);
+#if 0
RTC_CHECK_EQ(
0, libyuv::I420ToI010(
source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
source.DataV(), source.StrideV(), buffer->MutableDataY(),
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(), width, height));
+#else
+ abort();
+#endif
return buffer;
}
@@ -150,11 +158,15 @@ rtc::scoped_refptr<I010Buffer> I010Buffer::Rotate(
rtc::scoped_refptr<I420BufferInterface> I010Buffer::ToI420() {
rtc::scoped_refptr<I420Buffer> i420_buffer =
I420Buffer::Create(width(), height());
+#if 0
libyuv::I010ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
i420_buffer->MutableDataV(), i420_buffer->StrideV(),
width(), height());
+#else
+ abort();
+#endif
return i420_buffer;
}
diff --git a/api/video/test/BUILD.gn b/api/video/test/BUILD.gn
index 5f697a081c..5633371102 100644
--- a/api/video/test/BUILD.gn
+++ b/api/video/test/BUILD.gn
@@ -21,8 +21,8 @@ rtc_library("rtc_api_video_unittests") {
"..:video_frame",
"..:video_rtp_headers",
"../../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("mock_recordable_encoded_frame") {
diff --git a/api/video/test/mock_recordable_encoded_frame.h b/api/video/test/mock_recordable_encoded_frame.h
index 1788a493c6..2178932d2a 100644
--- a/api/video/test/mock_recordable_encoded_frame.h
+++ b/api/video/test/mock_recordable_encoded_frame.h
@@ -17,13 +17,18 @@
namespace webrtc {
class MockRecordableEncodedFrame : public RecordableEncodedFrame {
public:
- MOCK_CONST_METHOD0(encoded_buffer,
- rtc::scoped_refptr<const EncodedImageBufferInterface>());
- MOCK_CONST_METHOD0(color_space, absl::optional<webrtc::ColorSpace>());
- MOCK_CONST_METHOD0(codec, VideoCodecType());
- MOCK_CONST_METHOD0(is_key_frame, bool());
- MOCK_CONST_METHOD0(resolution, EncodedResolution());
- MOCK_CONST_METHOD0(render_time, Timestamp());
+ MOCK_METHOD(rtc::scoped_refptr<const EncodedImageBufferInterface>,
+ encoded_buffer,
+ (),
+ (const, override));
+ MOCK_METHOD(absl::optional<webrtc::ColorSpace>,
+ color_space,
+ (),
+ (const, override));
+ MOCK_METHOD(VideoCodecType, codec, (), (const, override));
+ MOCK_METHOD(bool, is_key_frame, (), (const, override));
+ MOCK_METHOD(EncodedResolution, resolution, (), (const, override));
+ MOCK_METHOD(Timestamp, render_time, (), (const, override));
};
} // namespace webrtc
#endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_
diff --git a/api/video/video_adaptation_counters.cc b/api/video/video_adaptation_counters.cc
index 25e0bee1ff..df1769d5d4 100644
--- a/api/video/video_adaptation_counters.cc
+++ b/api/video/video_adaptation_counters.cc
@@ -10,6 +10,8 @@
#include "api/video/video_adaptation_counters.h"
+#include "rtc_base/strings/string_builder.h"
+
namespace webrtc {
bool VideoAdaptationCounters::operator==(
@@ -30,4 +32,11 @@ VideoAdaptationCounters VideoAdaptationCounters::operator+(
fps_adaptations + other.fps_adaptations);
}
+std::string VideoAdaptationCounters::ToString() const {
+ rtc::StringBuilder ss;
+ ss << "{ res=" << resolution_adaptations << " fps=" << fps_adaptations
+ << " }";
+ return ss.Release();
+}
+
} // namespace webrtc
diff --git a/api/video/video_adaptation_counters.h b/api/video/video_adaptation_counters.h
index eff0baaa21..2dea902f2f 100644
--- a/api/video/video_adaptation_counters.h
+++ b/api/video/video_adaptation_counters.h
@@ -11,6 +11,8 @@
#ifndef API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
#define API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
+#include <string>
+
#include "rtc_base/checks.h"
namespace webrtc {
@@ -33,6 +35,8 @@ struct VideoAdaptationCounters {
VideoAdaptationCounters operator+(const VideoAdaptationCounters& other) const;
+ std::string ToString() const;
+
int resolution_adaptations;
int fps_adaptations;
};
diff --git a/api/video/video_frame_marking.h b/api/video/video_frame_marking.h
deleted file mode 100644
index 2a34852f1d..0000000000
--- a/api/video/video_frame_marking.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef API_VIDEO_VIDEO_FRAME_MARKING_H_
-#define API_VIDEO_VIDEO_FRAME_MARKING_H_
-
-namespace webrtc {
-
-struct FrameMarking {
- bool start_of_frame;
- bool end_of_frame;
- bool independent_frame;
- bool discardable_frame;
- bool base_layer_sync;
- uint8_t temporal_id;
- uint8_t layer_id;
- uint8_t tl0_pic_idx;
-};
-
-} // namespace webrtc
-
-#endif // API_VIDEO_VIDEO_FRAME_MARKING_H_
diff --git a/api/video/video_frame_metadata.cc b/api/video/video_frame_metadata.cc
new file mode 100644
index 0000000000..df82875eb9
--- /dev/null
+++ b/api/video/video_frame_metadata.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_frame_metadata.h"
+
+#include "modules/rtp_rtcp/source/rtp_video_header.h"
+
+namespace webrtc {
+
+VideoFrameMetadata::VideoFrameMetadata(const RTPVideoHeader& header)
+ : width_(header.width), height_(header.height) {
+ if (header.generic) {
+ frame_id_ = header.generic->frame_id;
+ spatial_index_ = header.generic->spatial_index;
+ temporal_index_ = header.generic->temporal_index;
+ frame_dependencies_ = header.generic->dependencies;
+ decode_target_indications_ = header.generic->decode_target_indications;
+ }
+}
+
+} // namespace webrtc
diff --git a/api/video/video_frame_metadata.h b/api/video/video_frame_metadata.h
new file mode 100644
index 0000000000..2e9309841b
--- /dev/null
+++ b/api/video/video_frame_metadata.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_FRAME_METADATA_H_
+#define API_VIDEO_VIDEO_FRAME_METADATA_H_
+
+#include <cstdint>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+
+namespace webrtc {
+
+struct RTPVideoHeader;
+
+// A subset of metadata from the RTP video header, exposed in insertable streams
+// API.
+class VideoFrameMetadata {
+ public:
+ explicit VideoFrameMetadata(const RTPVideoHeader& header);
+ VideoFrameMetadata(const VideoFrameMetadata&) = default;
+ VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default;
+
+ uint16_t GetWidth() const { return width_; }
+ uint16_t GetHeight() const { return height_; }
+ absl::optional<int64_t> GetFrameId() const { return frame_id_; }
+ int GetSpatialIndex() const { return spatial_index_; }
+ int GetTemporalIndex() const { return temporal_index_; }
+
+ rtc::ArrayView<const int64_t> GetFrameDependencies() const {
+ return frame_dependencies_;
+ }
+
+ rtc::ArrayView<const DecodeTargetIndication> GetDecodeTargetIndications()
+ const {
+ return decode_target_indications_;
+ }
+
+ private:
+ int16_t width_;
+ int16_t height_;
+ absl::optional<int64_t> frame_id_;
+ int spatial_index_ = 0;
+ int temporal_index_ = 0;
+ absl::InlinedVector<int64_t, 5> frame_dependencies_;
+ absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications_;
+};
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_FRAME_METADATA_H_
diff --git a/api/video/video_frame_metadata_unittest.cc b/api/video/video_frame_metadata_unittest.cc
new file mode 100644
index 0000000000..7a808e1ea9
--- /dev/null
+++ b/api/video/video_frame_metadata_unittest.cc
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_frame_metadata.h"
+
+#include "modules/rtp_rtcp/source/rtp_video_header.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::IsEmpty;
+
+TEST(VideoFrameMetadata, GetWidthReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ video_header.width = 1280u;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetWidth(), video_header.width);
+}
+
+TEST(VideoFrameMetadata, GetHeightReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ video_header.height = 720u;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetHeight(), video_header.height);
+}
+
+TEST(VideoFrameMetadata, GetFrameIdReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.frame_id = 10;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetFrameId().value(), 10);
+}
+
+TEST(VideoFrameMetadata, HasNoFrameIdForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_EQ(metadata.GetFrameId(), absl::nullopt);
+}
+
+TEST(VideoFrameMetadata, GetSpatialIndexReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.spatial_index = 2;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetSpatialIndex(), 2);
+}
+
+TEST(VideoFrameMetadata, SpatialIndexIsZeroForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_EQ(metadata.GetSpatialIndex(), 0);
+}
+
+TEST(VideoFrameMetadata, GetTemporalIndexReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.temporal_index = 3;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetTemporalIndex(), 3);
+}
+
+TEST(VideoFrameMetadata, TemporalIndexIsZeroForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_EQ(metadata.GetTemporalIndex(), 0);
+}
+
+TEST(VideoFrameMetadata, GetFrameDependenciesReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.dependencies = {5, 6, 7};
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7));
+}
+
+TEST(VideoFrameMetadata, FrameDependencyVectorIsEmptyForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty());
+}
+
+TEST(VideoFrameMetadata, GetDecodeTargetIndicationsReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.decode_target_indications = {DecodeTargetIndication::kSwitch};
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_THAT(metadata.GetDecodeTargetIndications(),
+ ElementsAre(DecodeTargetIndication::kSwitch));
+}
+
+TEST(VideoFrameMetadata,
+ DecodeTargetIndicationsVectorIsEmptyForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty());
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/api/video/video_stream_encoder_interface.h b/api/video/video_stream_encoder_interface.h
index 8e1df0f858..d8dd8e1599 100644
--- a/api/video/video_stream_encoder_interface.h
+++ b/api/video/video_stream_encoder_interface.h
@@ -13,8 +13,10 @@
#include <vector>
+#include "api/adaptation/resource.h"
#include "api/fec_controller_override.h"
#include "api/rtp_parameters.h" // For DegradationPreference.
+#include "api/scoped_refptr.h"
#include "api/units/data_rate.h"
#include "api/video/video_bitrate_allocator.h"
#include "api/video/video_sink_interface.h"
@@ -49,6 +51,15 @@ class VideoStreamEncoderInterface : public rtc::VideoSinkInterface<VideoFrame> {
int min_transmit_bitrate_bps) = 0;
};
+ // If the resource is overusing, the VideoStreamEncoder will try to reduce
+ // resolution or frame rate until no resource is overusing.
+ // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor
+ // is moved to Call this method could be deleted altogether in favor of
+ // Call-level APIs only.
+ virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) = 0;
+ virtual std::vector<rtc::scoped_refptr<Resource>>
+ GetAdaptationResources() = 0;
+
// Sets the source that will provide video frames to the VideoStreamEncoder's
// OnFrame method. |degradation_preference| control whether or not resolution
// or frame rate may be reduced. The VideoStreamEncoder registers itself with
diff --git a/api/video_codecs/BUILD.gn b/api/video_codecs/BUILD.gn
index 21a5f6faa0..597478ba0a 100644
--- a/api/video_codecs/BUILD.gn
+++ b/api/video_codecs/BUILD.gn
@@ -49,6 +49,8 @@ rtc_library("video_codecs_api") {
"../video:video_codec_constants",
"../video:video_frame",
"../video:video_rtp_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings",
@@ -100,8 +102,8 @@ rtc_library("builtin_video_encoder_factory") {
"../../media:rtc_media_base",
"../../rtc_base:checks",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("vp8_temporal_layers_factory") {
@@ -148,6 +150,8 @@ rtc_library("rtc_software_fallback_wrappers") {
"../video:video_bitrate_allocation",
"../video:video_frame",
"../video:video_rtp_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
diff --git a/api/video_codecs/test/BUILD.gn b/api/video_codecs/test/BUILD.gn
index 243b78267f..10b18b6e5b 100644
--- a/api/video_codecs/test/BUILD.gn
+++ b/api/video_codecs/test/BUILD.gn
@@ -40,5 +40,6 @@ if (rtc_include_tests) {
"../../video:video_rtp_headers",
"//testing/gtest",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc
index ee61893563..30d5287c94 100644
--- a/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc
+++ b/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc
@@ -218,6 +218,68 @@ TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
fallback_wrapper_->Release();
}
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, FallbacksOnTooManyErrors) {
+ VideoCodec codec = {};
+ fallback_wrapper_->InitDecode(&codec, 2);
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ EncodedImage encoded_image;
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+ // Doesn't fallback from a single error.
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName());
+
+ // However, many frames with the same error, fallback should happen.
+ const int kNumFramesToEncode = 10;
+ for (int i = 0; i < kNumFramesToEncode; ++i) {
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ }
+ // Hard coded expected value since libvpx is the software implementation name
+ // for VP8. Change accordingly if the underlying implementation does.
+ EXPECT_STREQ("libvpx (fallback from: fake-decoder)",
+ fallback_wrapper_->ImplementationName());
+ fallback_wrapper_->Release();
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ DoesNotFallbackOnDeltaFramesErrors) {
+ VideoCodec codec = {};
+ fallback_wrapper_->InitDecode(&codec, 2);
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ EncodedImage encoded_image;
+ encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
+
+ // Many decoded frames with the same error
+ const int kNumFramesToEncode = 10;
+ for (int i = 0; i < kNumFramesToEncode; ++i) {
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ }
+ EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName());
+
+ fallback_wrapper_->Release();
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ DoesNotFallbacksOnNonConsequtiveErrors) {
+ VideoCodec codec = {};
+ fallback_wrapper_->InitDecode(&codec, 2);
+
+ EncodedImage encoded_image;
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+
+ const int kNumFramesToEncode = 10;
+ for (int i = 0; i < kNumFramesToEncode; ++i) {
+ // Interleaved errors and successful decodes.
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ }
+ EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName());
+ fallback_wrapper_->Release();
+}
+
class ForcedSoftwareDecoderFallbackTest
: public VideoDecoderSoftwareFallbackWrapperTest {
public:
diff --git a/api/video_codecs/video_codec.h b/api/video_codecs/video_codec.h
index 330bbbce19..c07fae9b8b 100644
--- a/api/video_codecs/video_codec.h
+++ b/api/video_codecs/video_codec.h
@@ -19,7 +19,7 @@
#include "absl/types/optional.h"
#include "api/video/video_bitrate_allocation.h"
#include "api/video/video_codec_type.h"
-#include "common_types.h" // NOLINT(build/include)
+#include "common_types.h" // NOLINT(build/include_directory)
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
diff --git a/api/video_codecs/video_decoder_software_fallback_wrapper.cc b/api/video_codecs/video_decoder_software_fallback_wrapper.cc
index f78d9b885f..128087f207 100644
--- a/api/video_codecs/video_decoder_software_fallback_wrapper.cc
+++ b/api/video_codecs/video_decoder_software_fallback_wrapper.cc
@@ -30,6 +30,8 @@ namespace webrtc {
namespace {
+constexpr size_t kMaxConsequtiveHwErrors = 4;
+
class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder {
public:
VideoDecoderSoftwareFallbackWrapper(
@@ -74,6 +76,7 @@ class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder {
const std::string fallback_implementation_name_;
DecodedImageCallback* callback_;
int32_t hw_decoded_frames_since_last_fallback_;
+ size_t hw_consequtive_generic_errors_;
};
VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper(
@@ -86,7 +89,8 @@ VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper(
std::string(fallback_decoder_->ImplementationName()) +
" (fallback from: " + hw_decoder_->ImplementationName() + ")"),
callback_(nullptr),
- hw_decoded_frames_since_last_fallback_(0) {}
+ hw_decoded_frames_since_last_fallback_(0),
+ hw_consequtive_generic_errors_(0) {}
VideoDecoderSoftwareFallbackWrapper::~VideoDecoderSoftwareFallbackWrapper() =
default;
@@ -196,14 +200,24 @@ int32_t VideoDecoderSoftwareFallbackWrapper::Decode(
int32_t ret = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
ret = hw_decoder_->Decode(input_image, missing_frames, render_time_ms);
if (ret != WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE) {
- if (ret == WEBRTC_VIDEO_CODEC_OK) {
+ if (ret != WEBRTC_VIDEO_CODEC_ERROR) {
++hw_decoded_frames_since_last_fallback_;
+ hw_consequtive_generic_errors_ = 0;
+ return ret;
+ }
+ if (input_image._frameType == VideoFrameType::kVideoFrameKey) {
+ // Only count errors on key-frames, since generic errors can happen
+ // with hw decoder due to many arbitrary reasons.
+ // However, requesting a key-frame is supposed to fix the issue.
+ ++hw_consequtive_generic_errors_;
+ }
+ if (hw_consequtive_generic_errors_ < kMaxConsequtiveHwErrors) {
+ return ret;
}
- return ret;
}
// HW decoder returned WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE or
- // initialization failed, fallback to software.
+ // too many generic errors on key-frames encountered.
if (!InitFallbackDecoder()) {
return ret;
}
diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h
index 630b7aa70c..c396090ea6 100644
--- a/api/video_codecs/video_encoder_factory.h
+++ b/api/video_codecs/video_encoder_factory.h
@@ -30,13 +30,13 @@ class VideoEncoderFactory {
struct CodecInfo {
// |is_hardware_accelerated| is true if the encoders created by this factory
// of the given codec will use hardware support.
- bool is_hardware_accelerated;
+ bool is_hardware_accelerated = false;
// |has_internal_source| is true if encoders created by this factory of the
// given codec will use internal camera sources, meaning that they don't
// require/expect frames to be delivered via webrtc::VideoEncoder::Encode.
// This flag is used as the internal_source parameter to
// webrtc::ViEExternalCodec::RegisterExternalSendCodec.
- bool has_internal_source;
+ bool has_internal_source = false;
};
// An injectable class that is continuously updated with encoding conditions
diff --git a/api/voip/BUILD.gn b/api/voip/BUILD.gn
index 2c5f71c988..6f92ed67f4 100644
--- a/api/voip/BUILD.gn
+++ b/api/voip/BUILD.gn
@@ -19,8 +19,8 @@ rtc_source_set("voip_api") {
deps = [
"..:array_view",
"../audio_codecs:audio_codecs_api",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("voip_engine_factory") {
diff --git a/audio/BUILD.gn b/audio/BUILD.gn
index 725128bb1a..78f6affe84 100644
--- a/audio/BUILD.gn
+++ b/audio/BUILD.gn
@@ -71,6 +71,7 @@ rtc_library("audio") {
"../modules/audio_coding:audio_coding_module_typedefs",
"../modules/audio_coding:audio_encoder_cng",
"../modules/audio_coding:audio_network_adaptor_config",
+ "../modules/audio_coding:red",
"../modules/audio_device",
"../modules/audio_processing",
"../modules/audio_processing:api",
@@ -89,12 +90,15 @@ rtc_library("audio") {
"../rtc_base:rtc_task_queue",
"../rtc_base:safe_minmax",
"../rtc_base/experiments:field_trial_parser",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/task_utils:to_queued_task",
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
"utility:audio_frame_operations",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -202,6 +206,7 @@ if (rtc_include_tests) {
"../api:network_emulation_manager_api",
"../api:peer_connection_quality_test_fixture_api",
"../api:simulated_network_api",
+ "../api:time_controller",
"../call:simulated_network",
"../common_audio",
"../system_wrappers",
diff --git a/audio/audio_level.cc b/audio/audio_level.cc
index 06702b4c0d..7874b73f1c 100644
--- a/audio/audio_level.cc
+++ b/audio/audio_level.cc
@@ -22,7 +22,7 @@ AudioLevel::AudioLevel()
AudioLevel::~AudioLevel() {}
void AudioLevel::Reset() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
abs_max_ = 0;
count_ = 0;
current_level_full_range_ = 0;
@@ -31,24 +31,24 @@ void AudioLevel::Reset() {
}
int16_t AudioLevel::LevelFullRange() const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return current_level_full_range_;
}
void AudioLevel::ResetLevelFullRange() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
abs_max_ = 0;
count_ = 0;
current_level_full_range_ = 0;
}
double AudioLevel::TotalEnergy() const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return total_energy_;
}
double AudioLevel::TotalDuration() const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return total_duration_;
}
@@ -63,7 +63,7 @@ void AudioLevel::ComputeLevel(const AudioFrame& audioFrame, double duration) {
// Protect member access using a lock since this method is called on a
// dedicated audio thread in the RecordedDataIsAvailable() callback.
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
if (abs_value > abs_max_)
abs_max_ = abs_value;
diff --git a/audio/audio_level.h b/audio/audio_level.h
index 430edb1703..acd1231fe2 100644
--- a/audio/audio_level.h
+++ b/audio/audio_level.h
@@ -11,7 +11,7 @@
#ifndef AUDIO_AUDIO_LEVEL_H_
#define AUDIO_AUDIO_LEVEL_H_
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -59,14 +59,14 @@ class AudioLevel {
private:
enum { kUpdateFrequency = 10 };
- rtc::CriticalSection crit_sect_;
+ mutable Mutex mutex_;
- int16_t abs_max_ RTC_GUARDED_BY(crit_sect_);
- int16_t count_ RTC_GUARDED_BY(crit_sect_);
- int16_t current_level_full_range_ RTC_GUARDED_BY(crit_sect_);
+ int16_t abs_max_ RTC_GUARDED_BY(mutex_);
+ int16_t count_ RTC_GUARDED_BY(mutex_);
+ int16_t current_level_full_range_ RTC_GUARDED_BY(mutex_);
- double total_energy_ RTC_GUARDED_BY(crit_sect_) = 0.0;
- double total_duration_ RTC_GUARDED_BY(crit_sect_) = 0.0;
+ double total_energy_ RTC_GUARDED_BY(mutex_) = 0.0;
+ double total_duration_ RTC_GUARDED_BY(mutex_) = 0.0;
};
} // namespace voe
diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc
index 8730c45258..1856902d5e 100644
--- a/audio/audio_send_stream.cc
+++ b/audio/audio_send_stream.cc
@@ -31,6 +31,7 @@
#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h"
#include "logging/rtc_event_log/rtc_stream_config.h"
#include "modules/audio_coding/codecs/cng/audio_encoder_cng.h"
+#include "modules/audio_coding/codecs/red/audio_encoder_copy_red.h"
#include "modules/audio_processing/include/audio_processing.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "rtc_base/checks.h"
@@ -115,18 +116,20 @@ AudioSendStream::AudioSendStream(
bitrate_allocator,
event_log,
suspended_rtp_state,
- voe::CreateChannelSend(clock,
- task_queue_factory,
- module_process_thread,
- config.send_transport,
- rtcp_rtt_stats,
- event_log,
- config.frame_encryptor,
- config.crypto_options,
- config.rtp.extmap_allow_mixed,
- config.rtcp_report_interval_ms,
- config.rtp.ssrc,
- config.frame_transformer)) {}
+ voe::CreateChannelSend(
+ clock,
+ task_queue_factory,
+ module_process_thread,
+ config.send_transport,
+ rtcp_rtt_stats,
+ event_log,
+ config.frame_encryptor,
+ config.crypto_options,
+ config.rtp.extmap_allow_mixed,
+ config.rtcp_report_interval_ms,
+ config.rtp.ssrc,
+ config.frame_transformer,
+ rtp_transport->transport_feedback_observer())) {}
AudioSendStream::AudioSendStream(
Clock* clock,
@@ -344,7 +347,7 @@ void AudioSendStream::ConfigureStream(
// Set currently known overhead (used in ANA, opus only).
{
- rtc::CritScope cs(&overhead_per_packet_lock_);
+ MutexLock lock(&overhead_per_packet_lock_);
UpdateOverheadForEncoder();
}
@@ -419,7 +422,7 @@ void AudioSendStream::SendAudioData(std::unique_ptr<AudioFrame> audio_frame) {
// TODO(https://crbug.com/webrtc/10771): All "media-source" related stats
// should move from send-streams to the local audio sources or tracks; a
// send-stream should not be required to read the microphone audio levels.
- rtc::CritScope cs(&audio_level_lock_);
+ MutexLock lock(&audio_level_lock_);
audio_level_.ComputeLevel(*audio_frame, duration);
}
channel_send_->ProcessAndEncodeAudio(std::move(audio_frame));
@@ -485,7 +488,7 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats(
}
{
- rtc::CritScope cs(&audio_level_lock_);
+ MutexLock lock(&audio_level_lock_);
stats.audio_level = audio_level_.LevelFullRange();
stats.total_input_energy = audio_level_.TotalEnergy();
stats.total_input_duration = audio_level_.TotalDuration();
@@ -505,15 +508,12 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats(
}
void AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
- // TODO(solenberg): Tests call this function on a network thread, libjingle
- // calls on the worker thread. We should move towards always using a network
- // thread. Then this check can be enabled.
- // RTC_DCHECK(!worker_thread_checker_.IsCurrent());
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
channel_send_->ReceivedRTCPPacket(packet, length);
worker_queue_->PostTask([&]() {
// Poll if overhead has changed, which it can do if ack triggers us to stop
// sending mid/rid.
- rtc::CritScope cs(&overhead_per_packet_lock_);
+ MutexLock lock(&overhead_per_packet_lock_);
UpdateOverheadForEncoder();
});
}
@@ -538,16 +538,18 @@ uint32_t AudioSendStream::OnBitrateUpdated(BitrateAllocationUpdate update) {
void AudioSendStream::SetTransportOverhead(
int transport_overhead_per_packet_bytes) {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
- rtc::CritScope cs(&overhead_per_packet_lock_);
+ MutexLock lock(&overhead_per_packet_lock_);
transport_overhead_per_packet_bytes_ = transport_overhead_per_packet_bytes;
UpdateOverheadForEncoder();
}
void AudioSendStream::UpdateOverheadForEncoder() {
- const size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes();
- if (overhead_per_packet_bytes == 0) {
- return; // Overhead is not known yet, do not tell the encoder.
+ size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes();
+ if (overhead_per_packet_ == overhead_per_packet_bytes) {
+ return;
}
+ overhead_per_packet_ = overhead_per_packet_bytes;
+
channel_send_->CallEncoder([&](AudioEncoder* encoder) {
encoder->OnReceivedOverhead(overhead_per_packet_bytes);
});
@@ -568,7 +570,7 @@ void AudioSendStream::UpdateOverheadForEncoder() {
}
size_t AudioSendStream::TestOnlyGetPerPacketOverheadBytes() const {
- rtc::CritScope cs(&overhead_per_packet_lock_);
+ MutexLock lock(&overhead_per_packet_lock_);
return GetPerPacketOverheadBytes();
}
@@ -644,7 +646,7 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) {
}
}
- // Wrap the encoder in a an AudioEncoderCNG, if VAD is enabled.
+ // Wrap the encoder in an AudioEncoderCNG, if VAD is enabled.
if (spec.cng_payload_type) {
AudioEncoderCngConfig cng_config;
cng_config.num_channels = encoder->NumChannels();
@@ -657,10 +659,18 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) {
new_config.send_codec_spec->format.clockrate_hz);
}
+ // Wrap the encoder in a RED encoder, if RED is enabled.
+ if (spec.red_payload_type) {
+ AudioEncoderCopyRed::Config red_config;
+ red_config.payload_type = *spec.red_payload_type;
+ red_config.speech_encoder = std::move(encoder);
+ encoder = std::make_unique<AudioEncoderCopyRed>(std::move(red_config));
+ }
+
// Set currently known overhead (used in ANA, opus only).
// If overhead changes later, it will be updated in UpdateOverheadForEncoder.
{
- rtc::CritScope cs(&overhead_per_packet_lock_);
+ MutexLock lock(&overhead_per_packet_lock_);
size_t overhead = GetPerPacketOverheadBytes();
if (overhead > 0) {
encoder->OnReceivedOverhead(overhead);
diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h
index 92e9a7fb16..7bc3183123 100644
--- a/audio/audio_send_stream.h
+++ b/audio/audio_send_stream.h
@@ -20,10 +20,11 @@
#include "call/audio_send_stream.h"
#include "call/audio_state.h"
#include "call/bitrate_allocator.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/experiments/struct_parameters_parser.h"
#include "rtc_base/race_checker.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_checker.h"
@@ -166,16 +167,16 @@ class AudioSendStream final : public webrtc::AudioSendStream,
int encoder_sample_rate_hz_ = 0;
size_t encoder_num_channels_ = 0;
bool sending_ = false;
- rtc::CriticalSection audio_level_lock_;
+ mutable Mutex audio_level_lock_;
// Keeps track of audio level, total audio energy and total samples duration.
// https://w3c.github.io/webrtc-stats/#dom-rtcaudiohandlerstats-totalaudioenergy
- webrtc::voe::AudioLevel audio_level_;
+ webrtc::voe::AudioLevel audio_level_ RTC_GUARDED_BY(audio_level_lock_);
BitrateAllocatorInterface* const bitrate_allocator_
RTC_GUARDED_BY(worker_queue_);
RtpTransportControllerSendInterface* const rtp_transport_;
- RtpRtcp* const rtp_rtcp_module_;
+ RtpRtcpInterface* const rtp_rtcp_module_;
absl::optional<RtpState> const suspended_rtp_state_;
// RFC 5285: Each distinct extension MUST have a unique ID. The value 0 is
@@ -194,7 +195,8 @@ class AudioSendStream final : public webrtc::AudioSendStream,
const std::vector<RtpExtension>& extensions);
static int TransportSeqNumId(const Config& config);
- rtc::CriticalSection overhead_per_packet_lock_;
+ mutable Mutex overhead_per_packet_lock_;
+ size_t overhead_per_packet_ RTC_GUARDED_BY(overhead_per_packet_lock_) = 0;
// Current transport overhead (ICE, TURN, etc.)
size_t transport_overhead_per_packet_bytes_
diff --git a/audio/audio_send_stream_unittest.cc b/audio/audio_send_stream_unittest.cc
index 334fdf50f7..d094198721 100644
--- a/audio/audio_send_stream_unittest.cc
+++ b/audio/audio_send_stream_unittest.cc
@@ -89,7 +89,10 @@ const DataRate kMaxOverheadRate = kOverheadPerPacket / kMinFrameLength;
class MockLimitObserver : public BitrateAllocator::LimitObserver {
public:
- MOCK_METHOD1(OnAllocationLimitsChanged, void(BitrateAllocationLimits));
+ MOCK_METHOD(void,
+ OnAllocationLimitsChanged,
+ (BitrateAllocationLimits),
+ (override));
};
std::unique_ptr<MockAudioEncoder> SetupAudioEncoderMock(
@@ -200,7 +203,7 @@ struct ConfigHelper {
return *static_cast<MockAudioEncoderFactory*>(
stream_config_.encoder_factory.get());
}
- MockRtpRtcp* rtp_rtcp() { return &rtp_rtcp_; }
+ MockRtpRtcpInterface* rtp_rtcp() { return &rtp_rtcp_; }
MockChannelSend* channel_send() { return channel_send_; }
RtpTransportControllerSendInterface* transport() { return &rtp_transport_; }
@@ -247,12 +250,12 @@ struct ConfigHelper {
void SetupMockForSetupSendCodec(bool expect_set_encoder_call) {
if (expect_set_encoder_call) {
- EXPECT_CALL(*channel_send_, SetEncoderForMock(_, _))
- .WillOnce(Invoke(
- [this](int payload_type, std::unique_ptr<AudioEncoder>* encoder) {
- this->audio_encoder_ = std::move(*encoder);
+ EXPECT_CALL(*channel_send_, SetEncoder)
+ .WillOnce(
+ [this](int payload_type, std::unique_ptr<AudioEncoder> encoder) {
+ this->audio_encoder_ = std::move(encoder);
return true;
- }));
+ });
}
}
@@ -329,7 +332,7 @@ struct ConfigHelper {
::testing::StrictMock<MockRtcpBandwidthObserver> bandwidth_observer_;
::testing::NiceMock<MockRtcEventLog> event_log_;
::testing::NiceMock<MockRtpTransportControllerSend> rtp_transport_;
- ::testing::NiceMock<MockRtpRtcp> rtp_rtcp_;
+ ::testing::NiceMock<MockRtpRtcpInterface> rtp_rtcp_;
::testing::NiceMock<MockLimitObserver> limit_observer_;
BitrateAllocator bitrate_allocator_;
// |worker_queue| is defined last to ensure all pending tasks are cancelled
@@ -368,6 +371,7 @@ TEST(AudioSendStreamTest, ConfigToString) {
config.send_codec_spec->nack_enabled = true;
config.send_codec_spec->transport_cc_enabled = false;
config.send_codec_spec->cng_payload_type = 42;
+ config.send_codec_spec->red_payload_type = 43;
config.encoder_factory = MockAudioEncoderFactory::CreateUnusedFactory();
config.rtp.extmap_allow_mixed = true;
config.rtp.extensions.push_back(
@@ -380,7 +384,7 @@ TEST(AudioSendStreamTest, ConfigToString) {
"send_transport: null, "
"min_bitrate_bps: 12000, max_bitrate_bps: 34000, "
"send_codec_spec: {nack_enabled: true, transport_cc_enabled: false, "
- "cng_payload_type: 42, payload_type: 103, "
+ "cng_payload_type: 42, red_payload_type: 43, payload_type: 103, "
"format: {name: isac, clockrate_hz: 16000, num_channels: 1, "
"parameters: {}}}}",
config.ToString());
@@ -473,7 +477,7 @@ TEST(AudioSendStreamTest, GetStatsAudioLevel) {
ConfigHelper helper(false, true, use_null_audio_processing);
auto send_stream = helper.CreateAudioSendStream();
helper.SetupMockForGetStats(use_null_audio_processing);
- EXPECT_CALL(*helper.channel_send(), ProcessAndEncodeAudioForMock(_))
+ EXPECT_CALL(*helper.channel_send(), ProcessAndEncodeAudio)
.Times(AnyNumber());
constexpr int kSampleRateHz = 48000;
@@ -558,15 +562,13 @@ TEST(AudioSendStreamTest, SendCodecCanApplyVad) {
helper.config().send_codec_spec =
AudioSendStream::Config::SendCodecSpec(9, kG722Format);
helper.config().send_codec_spec->cng_payload_type = 105;
- using ::testing::Invoke;
std::unique_ptr<AudioEncoder> stolen_encoder;
- EXPECT_CALL(*helper.channel_send(), SetEncoderForMock(_, _))
- .WillOnce(
- Invoke([&stolen_encoder](int payload_type,
- std::unique_ptr<AudioEncoder>* encoder) {
- stolen_encoder = std::move(*encoder);
- return true;
- }));
+ EXPECT_CALL(*helper.channel_send(), SetEncoder)
+ .WillOnce([&stolen_encoder](int payload_type,
+ std::unique_ptr<AudioEncoder> encoder) {
+ stolen_encoder = std::move(encoder);
+ return true;
+ });
EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000));
auto send_stream = helper.CreateAudioSendStream();
@@ -748,8 +750,7 @@ TEST(AudioSendStreamTest, DontRecreateEncoder) {
// test to be correct, it's instead set-up manually here. Otherwise a simple
// change to ConfigHelper (say to WillRepeatedly) would silently make this
// test useless.
- EXPECT_CALL(*helper.channel_send(), SetEncoderForMock(_, _))
- .WillOnce(Return());
+ EXPECT_CALL(*helper.channel_send(), SetEncoder).WillOnce(Return());
EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000));
@@ -794,7 +795,7 @@ TEST(AudioSendStreamTest, OnTransportOverheadChanged) {
auto new_config = helper.config();
// CallEncoder will be called on overhead change.
- EXPECT_CALL(*helper.channel_send(), CallEncoder(::testing::_)).Times(1);
+ EXPECT_CALL(*helper.channel_send(), CallEncoder);
const size_t transport_overhead_per_packet_bytes = 333;
send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes);
@@ -804,6 +805,27 @@ TEST(AudioSendStreamTest, OnTransportOverheadChanged) {
}
}
+TEST(AudioSendStreamTest, DoesntCallEncoderWhenOverheadUnchanged) {
+ for (bool use_null_audio_processing : {false, true}) {
+ ConfigHelper helper(false, true, use_null_audio_processing);
+ auto send_stream = helper.CreateAudioSendStream();
+ auto new_config = helper.config();
+
+ // CallEncoder will be called on overhead change.
+ EXPECT_CALL(*helper.channel_send(), CallEncoder);
+ const size_t transport_overhead_per_packet_bytes = 333;
+ send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes);
+
+ // Set the same overhead again, CallEncoder should not be called again.
+ EXPECT_CALL(*helper.channel_send(), CallEncoder).Times(0);
+ send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes);
+
+ // New overhead, call CallEncoder again
+ EXPECT_CALL(*helper.channel_send(), CallEncoder);
+ send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes + 1);
+ }
+}
+
TEST(AudioSendStreamTest, AudioOverheadChanged) {
for (bool use_null_audio_processing : {false, true}) {
ConfigHelper helper(false, true, use_null_audio_processing);
diff --git a/audio/audio_state.h b/audio/audio_state.h
index f696d5a8fe..70c7208320 100644
--- a/audio/audio_state.h
+++ b/audio/audio_state.h
@@ -19,7 +19,6 @@
#include "audio/null_audio_poller.h"
#include "call/audio_state.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/ref_count.h"
#include "rtc_base/thread_checker.h"
diff --git a/audio/audio_state_unittest.cc b/audio/audio_state_unittest.cc
index 76e08c549c..2bbe0fb0b7 100644
--- a/audio/audio_state_unittest.cc
+++ b/audio/audio_state_unittest.cc
@@ -60,8 +60,10 @@ class FakeAudioSource : public AudioMixer::Source {
int PreferredSampleRate() const /*override*/ { return kSampleRate; }
- MOCK_METHOD2(GetAudioFrameWithInfo,
- AudioFrameInfo(int sample_rate_hz, AudioFrame* audio_frame));
+ MOCK_METHOD(AudioFrameInfo,
+ GetAudioFrameWithInfo,
+ (int sample_rate_hz, AudioFrame*),
+ (override));
};
std::vector<int16_t> Create10msTestData(int sample_rate_hz,
diff --git a/audio/audio_transport_impl.cc b/audio/audio_transport_impl.cc
index 7648fb948f..11b37ffcf1 100644
--- a/audio/audio_transport_impl.cc
+++ b/audio/audio_transport_impl.cc
@@ -118,7 +118,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable(
size_t send_num_channels = 0;
bool swap_stereo_channels = false;
{
- rtc::CritScope lock(&capture_lock_);
+ MutexLock lock(&capture_lock_);
send_sample_rate_hz = send_sample_rate_hz_;
send_num_channels = send_num_channels_;
swap_stereo_channels = swap_stereo_channels_;
@@ -149,7 +149,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable(
// Copy frame and push to each sending stream. The copy is required since an
// encoding task will be posted internally to each stream.
{
- rtc::CritScope lock(&capture_lock_);
+ MutexLock lock(&capture_lock_);
typing_noise_detected_ = typing_detected;
RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0);
@@ -237,19 +237,19 @@ void AudioTransportImpl::PullRenderData(int bits_per_sample,
void AudioTransportImpl::UpdateAudioSenders(std::vector<AudioSender*> senders,
int send_sample_rate_hz,
size_t send_num_channels) {
- rtc::CritScope lock(&capture_lock_);
+ MutexLock lock(&capture_lock_);
audio_senders_ = std::move(senders);
send_sample_rate_hz_ = send_sample_rate_hz;
send_num_channels_ = send_num_channels;
}
void AudioTransportImpl::SetStereoChannelSwapping(bool enable) {
- rtc::CritScope lock(&capture_lock_);
+ MutexLock lock(&capture_lock_);
swap_stereo_channels_ = enable;
}
bool AudioTransportImpl::typing_noise_detected() const {
- rtc::CritScope lock(&capture_lock_);
+ MutexLock lock(&capture_lock_);
return typing_noise_detected_;
}
} // namespace webrtc
diff --git a/audio/audio_transport_impl.h b/audio/audio_transport_impl.h
index 2d9b4cf3a1..1643a29970 100644
--- a/audio/audio_transport_impl.h
+++ b/audio/audio_transport_impl.h
@@ -20,7 +20,7 @@
#include "modules/audio_processing/include/audio_processing.h"
#include "modules/audio_processing/typing_detection.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -71,7 +71,7 @@ class AudioTransportImpl : public AudioTransport {
AudioProcessing* audio_processing_ = nullptr;
// Capture side.
- rtc::CriticalSection capture_lock_;
+ mutable Mutex capture_lock_;
std::vector<AudioSender*> audio_senders_ RTC_GUARDED_BY(capture_lock_);
int send_sample_rate_hz_ RTC_GUARDED_BY(capture_lock_) = 8000;
size_t send_num_channels_ RTC_GUARDED_BY(capture_lock_) = 1;
diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc
index 66b4bb11f5..9cbaabbbb0 100644
--- a/audio/channel_receive.cc
+++ b/audio/channel_receive.cc
@@ -33,19 +33,19 @@
#include "modules/pacing/packet_router.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/format_macros.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_minmax.h"
#include "rtc_base/race_checker.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/time_utils.h"
#include "system_wrappers/include/metrics.h"
@@ -188,7 +188,7 @@ class ChannelReceive : public ChannelReceiveInterface {
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer);
bool Playing() const {
- rtc::CritScope lock(&playing_lock_);
+ MutexLock lock(&playing_lock_);
return playing_;
}
@@ -204,10 +204,10 @@ class ChannelReceive : public ChannelReceiveInterface {
// audio thread to another, but access is still sequential.
rtc::RaceChecker audio_thread_race_checker_;
rtc::RaceChecker video_capture_thread_race_checker_;
- rtc::CriticalSection _callbackCritSect;
- rtc::CriticalSection volume_settings_critsect_;
+ Mutex callback_mutex_;
+ Mutex volume_settings_mutex_;
- rtc::CriticalSection playing_lock_;
+ mutable Mutex playing_lock_;
bool playing_ RTC_GUARDED_BY(&playing_lock_) = false;
RtcEventLog* const event_log_;
@@ -216,12 +216,12 @@ class ChannelReceive : public ChannelReceiveInterface {
std::map<uint8_t, int> payload_type_frequencies_;
std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
- std::unique_ptr<RtpRtcp> _rtpRtcpModule;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
const uint32_t remote_ssrc_;
// Info for GetSyncInfo is updated on network or worker thread, and queried on
// the worker thread.
- rtc::CriticalSection sync_info_lock_;
+ mutable Mutex sync_info_lock_;
absl::optional<uint32_t> last_received_rtp_timestamp_
RTC_GUARDED_BY(&sync_info_lock_);
absl::optional<int64_t> last_received_rtp_system_time_ms_
@@ -237,7 +237,7 @@ class ChannelReceive : public ChannelReceiveInterface {
// Timestamp of the audio pulled from NetEq.
absl::optional<uint32_t> jitter_buffer_playout_timestamp_;
- rtc::CriticalSection video_sync_lock_;
+ mutable Mutex video_sync_lock_;
uint32_t playout_timestamp_rtp_ RTC_GUARDED_BY(video_sync_lock_);
absl::optional<int64_t> playout_timestamp_rtp_time_ms_
RTC_GUARDED_BY(video_sync_lock_);
@@ -247,7 +247,7 @@ class ChannelReceive : public ChannelReceiveInterface {
absl::optional<int64_t> playout_timestamp_ntp_time_ms_
RTC_GUARDED_BY(video_sync_lock_);
- rtc::CriticalSection ts_stats_lock_;
+ mutable Mutex ts_stats_lock_;
std::unique_ptr<rtc::TimestampWrapAroundHandler> rtp_ts_wraparound_handler_;
// The rtp timestamp of the first played out audio frame.
@@ -259,10 +259,10 @@ class ChannelReceive : public ChannelReceiveInterface {
// uses
ProcessThread* _moduleProcessThreadPtr;
AudioDeviceModule* _audioDeviceModulePtr;
- float _outputGain RTC_GUARDED_BY(volume_settings_critsect_);
+ float _outputGain RTC_GUARDED_BY(volume_settings_mutex_);
// An associated send channel.
- rtc::CriticalSection assoc_send_channel_lock_;
+ mutable Mutex assoc_send_channel_lock_;
const ChannelSendInterface* associated_send_channel_
RTC_GUARDED_BY(assoc_send_channel_lock_);
@@ -297,7 +297,7 @@ void ChannelReceive::OnReceivedPayloadData(
}
int64_t round_trip_time = 0;
- _rtpRtcpModule->RTT(remote_ssrc_, &round_trip_time, NULL, NULL, NULL);
+ rtp_rtcp_->RTT(remote_ssrc_, &round_trip_time, NULL, NULL, NULL);
std::vector<uint16_t> nack_list = acm_receiver_.GetNackList(round_trip_time);
if (!nack_list.empty()) {
@@ -359,7 +359,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo(
// scaling/panning, as that applies to the mix operation.
// External recipients of the audio (e.g. via AudioTrack), will do their
// own mixing/dynamic processing.
- rtc::CritScope cs(&_callbackCritSect);
+ MutexLock lock(&callback_mutex_);
if (audio_sink_) {
AudioSinkInterface::Data data(
audio_frame->data(), audio_frame->samples_per_channel_,
@@ -371,7 +371,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo(
float output_gain = 1.0f;
{
- rtc::CritScope cs(&volume_settings_critsect_);
+ MutexLock lock(&volume_settings_mutex_);
output_gain = _outputGain;
}
@@ -403,7 +403,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo(
(GetRtpTimestampRateHz() / 1000);
{
- rtc::CritScope lock(&ts_stats_lock_);
+ MutexLock lock(&ts_stats_lock_);
// Compute ntp time.
audio_frame->ntp_time_ms_ =
ntp_estimator_.Estimate(audio_frame->timestamp_);
@@ -421,7 +421,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo(
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.TargetJitterBufferDelayMs",
acm_receiver_.TargetDelayMs());
const int jitter_buffer_delay = acm_receiver_.FilteredCurrentDelayMs();
- rtc::CritScope lock(&video_sync_lock_);
+ MutexLock lock(&video_sync_lock_);
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverDelayEstimateMs",
jitter_buffer_delay + playout_delay_ms_);
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverJitterBufferDelayMs",
@@ -495,7 +495,7 @@ ChannelReceive::ChannelReceive(
_outputAudioLevel.ResetLevelFullRange();
rtp_receive_statistics_->EnableRetransmitDetection(remote_ssrc_, true);
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.clock = clock;
configuration.audio = true;
configuration.receiver_only = true;
@@ -507,14 +507,14 @@ ChannelReceive::ChannelReceive(
if (frame_transformer)
InitFrameTransformerDelegate(std::move(frame_transformer));
- _rtpRtcpModule = RtpRtcp::Create(configuration);
- _rtpRtcpModule->SetSendingMediaStatus(false);
- _rtpRtcpModule->SetRemoteSSRC(remote_ssrc_);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration);
+ rtp_rtcp_->SetSendingMediaStatus(false);
+ rtp_rtcp_->SetRemoteSSRC(remote_ssrc_);
- _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get(), RTC_FROM_HERE);
+ _moduleProcessThreadPtr->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE);
// Ensure that RTCP is enabled for the created channel.
- _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
+ rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound);
}
ChannelReceive::~ChannelReceive() {
@@ -527,24 +527,24 @@ ChannelReceive::~ChannelReceive() {
StopPlayout();
if (_moduleProcessThreadPtr)
- _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
+ _moduleProcessThreadPtr->DeRegisterModule(rtp_rtcp_.get());
}
void ChannelReceive::SetSink(AudioSinkInterface* sink) {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
- rtc::CritScope cs(&_callbackCritSect);
+ MutexLock lock(&callback_mutex_);
audio_sink_ = sink;
}
void ChannelReceive::StartPlayout() {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
- rtc::CritScope lock(&playing_lock_);
+ MutexLock lock(&playing_lock_);
playing_ = true;
}
void ChannelReceive::StopPlayout() {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
- rtc::CritScope lock(&playing_lock_);
+ MutexLock lock(&playing_lock_);
playing_ = false;
_outputAudioLevel.ResetLevelFullRange();
}
@@ -570,7 +570,7 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) {
int64_t now_ms = rtc::TimeMillis();
{
- rtc::CritScope cs(&sync_info_lock_);
+ MutexLock lock(&sync_info_lock_);
last_received_rtp_timestamp_ = packet.Timestamp();
last_received_rtp_system_time_ms_ = now_ms;
}
@@ -659,7 +659,7 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
UpdatePlayoutTimestamp(true, rtc::TimeMillis());
// Deliver RTCP packet to RTP/RTCP module for parsing
- _rtpRtcpModule->IncomingRtcpPacket(data, length);
+ rtp_rtcp_->IncomingRtcpPacket(data, length);
int64_t rtt = GetRTT();
if (rtt == 0) {
@@ -670,14 +670,14 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
uint32_t ntp_secs = 0;
uint32_t ntp_frac = 0;
uint32_t rtp_timestamp = 0;
- if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
- &rtp_timestamp)) {
+ if (0 !=
+ rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, &rtp_timestamp)) {
// Waiting for RTCP.
return;
}
{
- rtc::CritScope lock(&ts_stats_lock_);
+ MutexLock lock(&ts_stats_lock_);
ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
}
}
@@ -699,7 +699,7 @@ double ChannelReceive::GetTotalOutputDuration() const {
void ChannelReceive::SetChannelOutputVolumeScaling(float scaling) {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
- rtc::CritScope cs(&volume_settings_critsect_);
+ MutexLock lock(&volume_settings_mutex_);
_outputGain = scaling;
}
@@ -709,14 +709,14 @@ void ChannelReceive::RegisterReceiverCongestionControlObjects(
RTC_DCHECK(packet_router);
RTC_DCHECK(!packet_router_);
constexpr bool remb_candidate = false;
- packet_router->AddReceiveRtpModule(_rtpRtcpModule.get(), remb_candidate);
+ packet_router->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate);
packet_router_ = packet_router;
}
void ChannelReceive::ResetReceiverCongestionControlObjects() {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
RTC_DCHECK(packet_router_);
- packet_router_->RemoveReceiveRtpModule(_rtpRtcpModule.get());
+ packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get());
packet_router_ = nullptr;
}
@@ -759,7 +759,7 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const {
// --- Timestamps
{
- rtc::CritScope lock(&ts_stats_lock_);
+ MutexLock lock(&ts_stats_lock_);
stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
}
return stats;
@@ -781,13 +781,13 @@ void ChannelReceive::SetNACKStatus(bool enable, int max_packets) {
// Called when we are missing one or more packets.
int ChannelReceive::ResendPackets(const uint16_t* sequence_numbers,
int length) {
- return _rtpRtcpModule->SendNACK(sequence_numbers, length);
+ return rtp_rtcp_->SendNACK(sequence_numbers, length);
}
void ChannelReceive::SetAssociatedSendChannel(
const ChannelSendInterface* channel) {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
- rtc::CritScope lock(&assoc_send_channel_lock_);
+ MutexLock lock(&assoc_send_channel_lock_);
associated_send_channel_ = channel;
}
@@ -818,7 +818,7 @@ AudioDecodingCallStats ChannelReceive::GetDecodingCallStatistics() const {
uint32_t ChannelReceive::GetDelayEstimate() const {
RTC_DCHECK(worker_thread_checker_.IsCurrent() ||
module_process_thread_checker_.IsCurrent());
- rtc::CritScope lock(&video_sync_lock_);
+ MutexLock lock(&video_sync_lock_);
return acm_receiver_.FilteredCurrentDelayMs() + playout_delay_ms_;
}
@@ -838,7 +838,7 @@ bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
int64_t* time_ms) const {
RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_);
{
- rtc::CritScope lock(&video_sync_lock_);
+ MutexLock lock(&video_sync_lock_);
if (!playout_timestamp_rtp_time_ms_)
return false;
*rtp_timestamp = playout_timestamp_rtp_;
@@ -850,7 +850,7 @@ bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
int64_t time_ms) {
RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_);
- rtc::CritScope lock(&video_sync_lock_);
+ MutexLock lock(&video_sync_lock_);
playout_timestamp_ntp_ = ntp_timestamp_ms;
playout_timestamp_ntp_time_ms_ = time_ms;
}
@@ -858,7 +858,7 @@ void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
absl::optional<int64_t>
ChannelReceive::GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
- rtc::CritScope lock(&video_sync_lock_);
+ MutexLock lock(&video_sync_lock_);
if (!playout_timestamp_ntp_ || !playout_timestamp_ntp_time_ms_)
return absl::nullopt;
@@ -877,13 +877,13 @@ int ChannelReceive::GetBaseMinimumPlayoutDelayMs() const {
absl::optional<Syncable::Info> ChannelReceive::GetSyncInfo() const {
RTC_DCHECK(module_process_thread_checker_.IsCurrent());
Syncable::Info info;
- if (_rtpRtcpModule->RemoteNTP(&info.capture_time_ntp_secs,
- &info.capture_time_ntp_frac, nullptr, nullptr,
- &info.capture_time_source_clock) != 0) {
+ if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs,
+ &info.capture_time_ntp_frac, nullptr, nullptr,
+ &info.capture_time_source_clock) != 0) {
return absl::nullopt;
}
{
- rtc::CritScope cs(&sync_info_lock_);
+ MutexLock lock(&sync_info_lock_);
if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) {
return absl::nullopt;
}
@@ -917,7 +917,7 @@ void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) {
playout_timestamp -= (delay_ms * (GetRtpTimestampRateHz() / 1000));
{
- rtc::CritScope lock(&video_sync_lock_);
+ MutexLock lock(&video_sync_lock_);
if (!rtcp && playout_timestamp != playout_timestamp_rtp_) {
playout_timestamp_rtp_ = playout_timestamp;
playout_timestamp_rtp_time_ms_ = now_ms;
@@ -942,12 +942,12 @@ int ChannelReceive::GetRtpTimestampRateHz() const {
int64_t ChannelReceive::GetRTT() const {
std::vector<RTCPReportBlock> report_blocks;
- _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
+ rtp_rtcp_->RemoteRTCPStat(&report_blocks);
// TODO(nisse): Could we check the return value from the ->RTT() call below,
// instead of checking if we have any report blocks?
if (report_blocks.empty()) {
- rtc::CritScope lock(&assoc_send_channel_lock_);
+ MutexLock lock(&assoc_send_channel_lock_);
// Tries to get RTT from an associated channel.
if (!associated_send_channel_) {
return 0;
@@ -961,8 +961,7 @@ int64_t ChannelReceive::GetRTT() const {
int64_t min_rtt = 0;
// TODO(nisse): This method computes RTT based on sender reports, even though
// a receive stream is not supposed to do that.
- if (_rtpRtcpModule->RTT(remote_ssrc_, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
- 0) {
+ if (rtp_rtcp_->RTT(remote_ssrc_, &rtt, &avg_rtt, &min_rtt, &max_rtt) != 0) {
return 0;
}
return rtt;
diff --git a/audio/channel_send.cc b/audio/channel_send.cc
index 3387f271ba..80e7ab2f47 100644
--- a/audio/channel_send.cc
+++ b/audio/channel_send.cc
@@ -29,6 +29,7 @@
#include "modules/audio_coding/include/audio_coding_module.h"
#include "modules/audio_processing/rms_level.h"
#include "modules/pacing/packet_router.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/checks.h"
#include "rtc_base/event.h"
@@ -38,6 +39,7 @@
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/rate_limiter.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/time_utils.h"
@@ -54,7 +56,6 @@ constexpr int64_t kMaxRetransmissionWindowMs = 1000;
constexpr int64_t kMinRetransmissionWindowMs = 30;
class RtpPacketSenderProxy;
-class TransportFeedbackProxy;
class TransportSequenceNumberProxy;
class VoERtcpObserver;
@@ -77,7 +78,8 @@ class ChannelSend : public ChannelSendInterface,
bool extmap_allow_mixed,
int rtcp_report_interval_ms,
uint32_t ssrc,
- rtc::scoped_refptr<FrameTransformerInterface> frame_transformer);
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
+ TransportFeedbackObserver* feedback_observer);
~ChannelSend() override;
@@ -106,7 +108,7 @@ class ChannelSend : public ChannelSendInterface,
ANAStats GetANAStatistics() const override;
// Used by AudioSendStream.
- RtpRtcp* GetRtpRtcp() const override;
+ RtpRtcpInterface* GetRtpRtcp() const override;
void RegisterCngPayloadType(int payload_type, int payload_frequency) override;
@@ -185,13 +187,13 @@ class ChannelSend : public ChannelSendInterface,
// audio thread to another, but access is still sequential.
rtc::RaceChecker audio_thread_race_checker_;
- rtc::CriticalSection volume_settings_critsect_;
+ mutable Mutex volume_settings_mutex_;
bool sending_ RTC_GUARDED_BY(&worker_thread_checker_) = false;
RtcEventLog* const event_log_;
- std::unique_ptr<RtpRtcp> _rtpRtcpModule;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
std::unique_ptr<RTPSenderAudio> rtp_sender_audio_;
std::unique_ptr<AudioCodingModule> audio_coding_;
@@ -200,7 +202,7 @@ class ChannelSend : public ChannelSendInterface,
// uses
ProcessThread* const _moduleProcessThreadPtr;
RmsLevel rms_level_ RTC_GUARDED_BY(encoder_queue_);
- bool input_mute_ RTC_GUARDED_BY(volume_settings_critsect_);
+ bool input_mute_ RTC_GUARDED_BY(volume_settings_mutex_);
bool previous_frame_muted_ RTC_GUARDED_BY(encoder_queue_);
// VoeRTP_RTCP
// TODO(henrika): can today be accessed on the main thread and on the
@@ -212,7 +214,7 @@ class ChannelSend : public ChannelSendInterface,
PacketRouter* packet_router_ RTC_GUARDED_BY(&worker_thread_checker_) =
nullptr;
- const std::unique_ptr<TransportFeedbackProxy> feedback_observer_proxy_;
+ TransportFeedbackObserver* const feedback_observer_;
const std::unique_ptr<RtpPacketSenderProxy> rtp_packet_pacer_proxy_;
const std::unique_ptr<RateLimiter> retransmission_rate_limiter_;
@@ -233,8 +235,8 @@ class ChannelSend : public ChannelSendInterface,
rtc::scoped_refptr<ChannelSendFrameTransformerDelegate>
frame_transformer_delegate_ RTC_GUARDED_BY(encoder_queue_);
- rtc::CriticalSection bitrate_crit_section_;
- int configured_bitrate_bps_ RTC_GUARDED_BY(bitrate_crit_section_) = 0;
+ mutable Mutex bitrate_mutex_;
+ int configured_bitrate_bps_ RTC_GUARDED_BY(bitrate_mutex_) = 0;
// Defined last to ensure that there are no running tasks when the other
// members are destroyed.
@@ -243,63 +245,26 @@ class ChannelSend : public ChannelSendInterface,
const int kTelephoneEventAttenuationdB = 10;
-class TransportFeedbackProxy : public TransportFeedbackObserver {
- public:
- TransportFeedbackProxy() : feedback_observer_(nullptr) {
- pacer_thread_.Detach();
- network_thread_.Detach();
- }
-
- void SetTransportFeedbackObserver(
- TransportFeedbackObserver* feedback_observer) {
- RTC_DCHECK(thread_checker_.IsCurrent());
- rtc::CritScope lock(&crit_);
- feedback_observer_ = feedback_observer;
- }
-
- // Implements TransportFeedbackObserver.
- void OnAddPacket(const RtpPacketSendInfo& packet_info) override {
- RTC_DCHECK(pacer_thread_.IsCurrent());
- rtc::CritScope lock(&crit_);
- if (feedback_observer_)
- feedback_observer_->OnAddPacket(packet_info);
- }
-
- void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
- RTC_DCHECK(network_thread_.IsCurrent());
- rtc::CritScope lock(&crit_);
- if (feedback_observer_)
- feedback_observer_->OnTransportFeedback(feedback);
- }
-
- private:
- rtc::CriticalSection crit_;
- rtc::ThreadChecker thread_checker_;
- rtc::ThreadChecker pacer_thread_;
- rtc::ThreadChecker network_thread_;
- TransportFeedbackObserver* feedback_observer_ RTC_GUARDED_BY(&crit_);
-};
-
class RtpPacketSenderProxy : public RtpPacketSender {
public:
RtpPacketSenderProxy() : rtp_packet_pacer_(nullptr) {}
void SetPacketPacer(RtpPacketSender* rtp_packet_pacer) {
RTC_DCHECK(thread_checker_.IsCurrent());
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
rtp_packet_pacer_ = rtp_packet_pacer;
}
void EnqueuePackets(
std::vector<std::unique_ptr<RtpPacketToSend>> packets) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
rtp_packet_pacer_->EnqueuePackets(std::move(packets));
}
private:
rtc::ThreadChecker thread_checker_;
- rtc::CriticalSection crit_;
- RtpPacketSender* rtp_packet_pacer_ RTC_GUARDED_BY(&crit_);
+ Mutex mutex_;
+ RtpPacketSender* rtp_packet_pacer_ RTC_GUARDED_BY(&mutex_);
};
class VoERtcpObserver : public RtcpBandwidthObserver {
@@ -309,12 +274,12 @@ class VoERtcpObserver : public RtcpBandwidthObserver {
~VoERtcpObserver() override {}
void SetBandwidthObserver(RtcpBandwidthObserver* bandwidth_observer) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
bandwidth_observer_ = bandwidth_observer;
}
void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (bandwidth_observer_) {
bandwidth_observer_->OnReceivedEstimatedBitrate(bitrate);
}
@@ -324,7 +289,7 @@ class VoERtcpObserver : public RtcpBandwidthObserver {
int64_t rtt,
int64_t now_ms) override {
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (bandwidth_observer_) {
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, rtt,
now_ms);
@@ -372,8 +337,8 @@ class VoERtcpObserver : public RtcpBandwidthObserver {
ChannelSend* owner_;
// Maps remote side ssrc to extended highest sequence number received.
std::map<uint32_t, uint32_t> extended_max_sequence_number_;
- rtc::CriticalSection crit_;
- RtcpBandwidthObserver* bandwidth_observer_ RTC_GUARDED_BY(crit_);
+ Mutex mutex_;
+ RtcpBandwidthObserver* bandwidth_observer_ RTC_GUARDED_BY(mutex_);
};
int32_t ChannelSend::SendData(AudioFrameType frameType,
@@ -388,9 +353,9 @@ int32_t ChannelSend::SendData(AudioFrameType frameType,
// Asynchronously transform the payload before sending it. After the payload
// is transformed, the delegate will call SendRtpAudio to send it.
frame_transformer_delegate_->Transform(
- frameType, payloadType, rtp_timestamp, _rtpRtcpModule->StartTimestamp(),
+ frameType, payloadType, rtp_timestamp, rtp_rtcp_->StartTimestamp(),
payloadData, payloadSize, absolute_capture_timestamp_ms,
- _rtpRtcpModule->SSRC());
+ rtp_rtcp_->SSRC());
return 0;
}
return SendRtpAudio(frameType, payloadType, rtp_timestamp, payload,
@@ -427,7 +392,7 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType,
// Encrypt the audio payload into the buffer.
size_t bytes_written = 0;
int encrypt_status = frame_encryptor_->Encrypt(
- cricket::MEDIA_TYPE_AUDIO, _rtpRtcpModule->SSRC(),
+ cricket::MEDIA_TYPE_AUDIO, rtp_rtcp_->SSRC(),
/*additional_data=*/nullptr, payload, encrypted_audio_payload,
&bytes_written);
if (encrypt_status != 0) {
@@ -449,12 +414,12 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType,
// Push data from ACM to RTP/RTCP-module to deliver audio frame for
// packetization.
- if (!_rtpRtcpModule->OnSendingRtpFrame(rtp_timestamp,
- // Leaving the time when this frame was
- // received from the capture device as
- // undefined for voice for now.
- -1, payloadType,
- /*force_sender_report=*/false)) {
+ if (!rtp_rtcp_->OnSendingRtpFrame(rtp_timestamp,
+ // Leaving the time when this frame was
+ // received from the capture device as
+ // undefined for voice for now.
+ -1, payloadType,
+ /*force_sender_report=*/false)) {
return -1;
}
@@ -466,9 +431,8 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType,
// This call will trigger Transport::SendPacket() from the RTP/RTCP module.
if (!rtp_sender_audio_->SendAudio(
- frameType, payloadType,
- rtp_timestamp + _rtpRtcpModule->StartTimestamp(), payload.data(),
- payload.size(), absolute_capture_timestamp_ms)) {
+ frameType, payloadType, rtp_timestamp + rtp_rtcp_->StartTimestamp(),
+ payload.data(), payload.size(), absolute_capture_timestamp_ms)) {
RTC_DLOG(LS_ERROR)
<< "ChannelSend::SendData() failed to send data to RTP/RTCP module";
return -1;
@@ -489,7 +453,8 @@ ChannelSend::ChannelSend(
bool extmap_allow_mixed,
int rtcp_report_interval_ms,
uint32_t ssrc,
- rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
+ TransportFeedbackObserver* feedback_observer)
: event_log_(rtc_event_log),
_timeStamp(0), // This is just an offset, RTP module will add it's own
// random offset
@@ -498,7 +463,7 @@ ChannelSend::ChannelSend(
previous_frame_muted_(false),
_includeAudioLevelIndication(false),
rtcp_observer_(new VoERtcpObserver(this)),
- feedback_observer_proxy_(new TransportFeedbackProxy()),
+ feedback_observer_(feedback_observer),
rtp_packet_pacer_proxy_(new RtpPacketSenderProxy()),
retransmission_rate_limiter_(
new RateLimiter(clock, kMaxRetransmissionWindowMs)),
@@ -512,9 +477,9 @@ ChannelSend::ChannelSend(
audio_coding_.reset(AudioCodingModule::Create(AudioCodingModule::Config()));
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.bandwidth_callback = rtcp_observer_.get();
- configuration.transport_feedback_callback = feedback_observer_proxy_.get();
+ configuration.transport_feedback_callback = feedback_observer_;
configuration.clock = (clock ? clock : Clock::GetRealTimeClock());
configuration.audio = true;
configuration.outgoing_transport = rtp_transport;
@@ -530,16 +495,16 @@ ChannelSend::ChannelSend(
configuration.local_media_ssrc = ssrc;
- _rtpRtcpModule = RtpRtcp::Create(configuration);
- _rtpRtcpModule->SetSendingMediaStatus(false);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration);
+ rtp_rtcp_->SetSendingMediaStatus(false);
- rtp_sender_audio_ = std::make_unique<RTPSenderAudio>(
- configuration.clock, _rtpRtcpModule->RtpSender());
+ rtp_sender_audio_ = std::make_unique<RTPSenderAudio>(configuration.clock,
+ rtp_rtcp_->RtpSender());
- _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get(), RTC_FROM_HERE);
+ _moduleProcessThreadPtr->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE);
// Ensure that RTCP is enabled by default for the created channel.
- _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
+ rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound);
int error = audio_coding_->RegisterTransportCallback(this);
RTC_DCHECK_EQ(0, error);
@@ -559,7 +524,7 @@ ChannelSend::~ChannelSend() {
RTC_DCHECK_EQ(0, error);
if (_moduleProcessThreadPtr)
- _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
+ _moduleProcessThreadPtr->DeRegisterModule(rtp_rtcp_.get());
}
void ChannelSend::StartSend() {
@@ -567,8 +532,8 @@ void ChannelSend::StartSend() {
RTC_DCHECK(!sending_);
sending_ = true;
- _rtpRtcpModule->SetSendingMediaStatus(true);
- int ret = _rtpRtcpModule->SetSendingStatus(true);
+ rtp_rtcp_->SetSendingMediaStatus(true);
+ int ret = rtp_rtcp_->SetSendingStatus(true);
RTC_DCHECK_EQ(0, ret);
// It is now OK to start processing on the encoder task queue.
encoder_queue_.PostTask([this] {
@@ -594,10 +559,10 @@ void ChannelSend::StopSend() {
// Reset sending SSRC and sequence number and triggers direct transmission
// of RTCP BYE
- if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
+ if (rtp_rtcp_->SetSendingStatus(false) == -1) {
RTC_DLOG(LS_ERROR) << "StartSend() RTP/RTCP failed to stop sending";
}
- _rtpRtcpModule->SetSendingMediaStatus(false);
+ rtp_rtcp_->SetSendingMediaStatus(false);
}
void ChannelSend::SetEncoder(int payload_type,
@@ -608,8 +573,8 @@ void ChannelSend::SetEncoder(int payload_type,
// The RTP/RTCP module needs to know the RTP timestamp rate (i.e. clockrate)
// as well as some other things, so we collect this info and send it along.
- _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type,
- encoder->RtpTimestampRateHz());
+ rtp_rtcp_->RegisterSendPayloadFrequency(payload_type,
+ encoder->RtpTimestampRateHz());
rtp_sender_audio_->RegisterAudioPayload("audio", payload_type,
encoder->RtpTimestampRateHz(),
encoder->NumChannels(), 0);
@@ -642,7 +607,7 @@ void ChannelSend::OnBitrateAllocation(BitrateAllocationUpdate update) {
// rules.
// RTC_DCHECK(worker_thread_checker_.IsCurrent() ||
// module_process_thread_checker_.IsCurrent());
- rtc::CritScope lock(&bitrate_crit_section_);
+ MutexLock lock(&bitrate_mutex_);
CallEncoder([&](AudioEncoder* encoder) {
encoder->OnReceivedUplinkAllocation(update);
@@ -652,7 +617,7 @@ void ChannelSend::OnBitrateAllocation(BitrateAllocationUpdate update) {
}
int ChannelSend::GetBitrate() const {
- rtc::CritScope lock(&bitrate_crit_section_);
+ MutexLock lock(&bitrate_mutex_);
return configured_bitrate_bps_;
}
@@ -663,8 +628,10 @@ void ChannelSend::OnUplinkPacketLossRate(float packet_loss_rate) {
}
void ChannelSend::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+
// Deliver RTCP packet to RTP/RTCP module for parsing
- _rtpRtcpModule->IncomingRtcpPacket(data, length);
+ rtp_rtcp_->IncomingRtcpPacket(data, length);
int64_t rtt = GetRTT();
if (rtt == 0) {
@@ -685,12 +652,12 @@ void ChannelSend::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
void ChannelSend::SetInputMute(bool enable) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- rtc::CritScope cs(&volume_settings_critsect_);
+ MutexLock lock(&volume_settings_mutex_);
input_mute_ = enable;
}
bool ChannelSend::InputMute() const {
- rtc::CritScope cs(&volume_settings_critsect_);
+ MutexLock lock(&volume_settings_mutex_);
return input_mute_;
}
@@ -713,7 +680,7 @@ bool ChannelSend::SendTelephoneEventOutband(int event, int duration_ms) {
void ChannelSend::RegisterCngPayloadType(int payload_type,
int payload_frequency) {
- _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, payload_frequency);
+ rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, payload_frequency);
rtp_sender_audio_->RegisterAudioPayload("CN", payload_type, payload_frequency,
1, 0);
}
@@ -723,7 +690,7 @@ void ChannelSend::SetSendTelephoneEventPayloadType(int payload_type,
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
RTC_DCHECK_LE(0, payload_type);
RTC_DCHECK_GE(127, payload_type);
- _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, payload_frequency);
+ rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, payload_frequency);
rtp_sender_audio_->RegisterAudioPayload("telephone-event", payload_type,
payload_frequency, 0, 0);
}
@@ -732,9 +699,9 @@ void ChannelSend::SetSendAudioLevelIndicationStatus(bool enable, int id) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
_includeAudioLevelIndication = enable;
if (enable) {
- _rtpRtcpModule->RegisterRtpHeaderExtension(AudioLevel::kUri, id);
+ rtp_rtcp_->RegisterRtpHeaderExtension(AudioLevel::kUri, id);
} else {
- _rtpRtcpModule->DeregisterSendRtpHeaderExtension(AudioLevel::kUri);
+ rtp_rtcp_->DeregisterSendRtpHeaderExtension(AudioLevel::kUri);
}
}
@@ -743,31 +710,25 @@ void ChannelSend::RegisterSenderCongestionControlObjects(
RtcpBandwidthObserver* bandwidth_observer) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
RtpPacketSender* rtp_packet_pacer = transport->packet_sender();
- TransportFeedbackObserver* transport_feedback_observer =
- transport->transport_feedback_observer();
PacketRouter* packet_router = transport->packet_router();
RTC_DCHECK(rtp_packet_pacer);
- RTC_DCHECK(transport_feedback_observer);
RTC_DCHECK(packet_router);
RTC_DCHECK(!packet_router_);
rtcp_observer_->SetBandwidthObserver(bandwidth_observer);
- feedback_observer_proxy_->SetTransportFeedbackObserver(
- transport_feedback_observer);
rtp_packet_pacer_proxy_->SetPacketPacer(rtp_packet_pacer);
- _rtpRtcpModule->SetStorePacketsStatus(true, 600);
+ rtp_rtcp_->SetStorePacketsStatus(true, 600);
constexpr bool remb_candidate = false;
- packet_router->AddSendRtpModule(_rtpRtcpModule.get(), remb_candidate);
+ packet_router->AddSendRtpModule(rtp_rtcp_.get(), remb_candidate);
packet_router_ = packet_router;
}
void ChannelSend::ResetSenderCongestionControlObjects() {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
RTC_DCHECK(packet_router_);
- _rtpRtcpModule->SetStorePacketsStatus(false, 600);
+ rtp_rtcp_->SetStorePacketsStatus(false, 600);
rtcp_observer_->SetBandwidthObserver(nullptr);
- feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
- packet_router_->RemoveSendRtpModule(_rtpRtcpModule.get());
+ packet_router_->RemoveSendRtpModule(rtp_rtcp_.get());
packet_router_ = nullptr;
rtp_packet_pacer_proxy_->SetPacketPacer(nullptr);
}
@@ -776,7 +737,7 @@ void ChannelSend::SetRTCP_CNAME(absl::string_view c_name) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
// Note: SetCNAME() accepts a c string of length at most 255.
const std::string c_name_limited(c_name.substr(0, 255));
- int ret = _rtpRtcpModule->SetCNAME(c_name_limited.c_str()) != 0;
+ int ret = rtp_rtcp_->SetCNAME(c_name_limited.c_str()) != 0;
RTC_DCHECK_EQ(0, ret) << "SetRTCP_CNAME() failed to set RTCP CNAME";
}
@@ -787,7 +748,7 @@ std::vector<ReportBlock> ChannelSend::GetRemoteRTCPReportBlocks() const {
// report block according to RFC 3550.
std::vector<RTCPReportBlock> rtcp_report_blocks;
- int ret = _rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks);
+ int ret = rtp_rtcp_->RemoteRTCPStat(&rtcp_report_blocks);
RTC_DCHECK_EQ(0, ret);
std::vector<ReportBlock> report_blocks;
@@ -816,7 +777,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const {
StreamDataCounters rtp_stats;
StreamDataCounters rtx_stats;
- _rtpRtcpModule->GetSendStreamDataCounters(&rtp_stats, &rtx_stats);
+ rtp_rtcp_->GetSendStreamDataCounters(&rtp_stats, &rtx_stats);
stats.payload_bytes_sent =
rtp_stats.transmitted.payload_bytes + rtx_stats.transmitted.payload_bytes;
stats.header_and_padding_bytes_sent =
@@ -829,7 +790,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const {
stats.packetsSent =
rtp_stats.transmitted.packets + rtx_stats.transmitted.packets;
stats.retransmitted_packets_sent = rtp_stats.retransmitted.packets;
- stats.report_block_datas = _rtpRtcpModule->GetLatestReportBlockData();
+ stats.report_block_datas = rtp_rtcp_->GetLatestReportBlockData();
return stats;
}
@@ -894,14 +855,14 @@ ANAStats ChannelSend::GetANAStatistics() const {
return audio_coding_->GetANAStats();
}
-RtpRtcp* ChannelSend::GetRtpRtcp() const {
+RtpRtcpInterface* ChannelSend::GetRtpRtcp() const {
RTC_DCHECK(module_process_thread_checker_.IsCurrent());
- return _rtpRtcpModule.get();
+ return rtp_rtcp_.get();
}
int64_t ChannelSend::GetRTT() const {
std::vector<RTCPReportBlock> report_blocks;
- _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
+ rtp_rtcp_->RemoteRTCPStat(&report_blocks);
if (report_blocks.empty()) {
return 0;
@@ -913,8 +874,8 @@ int64_t ChannelSend::GetRTT() const {
int64_t min_rtt = 0;
// We don't know in advance the remote ssrc used by the other end's receiver
// reports, so use the SSRC of the first report block for calculating the RTT.
- if (_rtpRtcpModule->RTT(report_blocks[0].sender_ssrc, &rtt, &avg_rtt,
- &min_rtt, &max_rtt) != 0) {
+ if (rtp_rtcp_->RTT(report_blocks[0].sender_ssrc, &rtt, &avg_rtt, &min_rtt,
+ &max_rtt) != 0) {
return 0;
}
return rtt;
@@ -985,12 +946,13 @@ std::unique_ptr<ChannelSendInterface> CreateChannelSend(
bool extmap_allow_mixed,
int rtcp_report_interval_ms,
uint32_t ssrc,
- rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
+ TransportFeedbackObserver* feedback_observer) {
return std::make_unique<ChannelSend>(
clock, task_queue_factory, module_process_thread, rtp_transport,
rtcp_rtt_stats, rtc_event_log, frame_encryptor, crypto_options,
extmap_allow_mixed, rtcp_report_interval_ms, ssrc,
- std::move(frame_transformer));
+ std::move(frame_transformer), feedback_observer);
}
} // namespace voe
diff --git a/audio/channel_send.h b/audio/channel_send.h
index cb3b99287b..2e23ef5d2d 100644
--- a/audio/channel_send.h
+++ b/audio/channel_send.h
@@ -22,7 +22,7 @@
#include "api/function_view.h"
#include "api/task_queue/task_queue_factory.h"
#include "modules/rtp_rtcp/include/report_block_data.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/rtp_sender_audio.h"
namespace webrtc {
@@ -30,7 +30,6 @@ namespace webrtc {
class FrameEncryptorInterface;
class ProcessThread;
class RtcEventLog;
-class RtpRtcp;
class RtpTransportControllerSendInterface;
struct CallSendStatistics {
@@ -97,7 +96,7 @@ class ChannelSendInterface {
virtual void ProcessAndEncodeAudio(
std::unique_ptr<AudioFrame> audio_frame) = 0;
- virtual RtpRtcp* GetRtpRtcp() const = 0;
+ virtual RtpRtcpInterface* GetRtpRtcp() const = 0;
// In RTP we currently rely on RTCP packets (|ReceivedRTCPPacket|) to inform
// about RTT.
@@ -136,7 +135,8 @@ std::unique_ptr<ChannelSendInterface> CreateChannelSend(
bool extmap_allow_mixed,
int rtcp_report_interval_ms,
uint32_t ssrc,
- rtc::scoped_refptr<FrameTransformerInterface> frame_transformer);
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
+ TransportFeedbackObserver* feedback_observer);
} // namespace voe
} // namespace webrtc
diff --git a/audio/channel_send_frame_transformer_delegate.cc b/audio/channel_send_frame_transformer_delegate.cc
index 53b573eb8b..72a459d897 100644
--- a/audio/channel_send_frame_transformer_delegate.cc
+++ b/audio/channel_send_frame_transformer_delegate.cc
@@ -77,7 +77,7 @@ void ChannelSendFrameTransformerDelegate::Reset() {
frame_transformer_->UnregisterTransformedFrameCallback();
frame_transformer_ = nullptr;
- rtc::CritScope lock(&send_lock_);
+ MutexLock lock(&send_lock_);
send_frame_callback_ = SendFrameCallback();
}
@@ -97,7 +97,7 @@ void ChannelSendFrameTransformerDelegate::Transform(
void ChannelSendFrameTransformerDelegate::OnTransformedFrame(
std::unique_ptr<TransformableFrameInterface> frame) {
- rtc::CritScope lock(&send_lock_);
+ MutexLock lock(&send_lock_);
if (!send_frame_callback_)
return;
rtc::scoped_refptr<ChannelSendFrameTransformerDelegate> delegate = this;
@@ -109,7 +109,7 @@ void ChannelSendFrameTransformerDelegate::OnTransformedFrame(
void ChannelSendFrameTransformerDelegate::SendFrame(
std::unique_ptr<TransformableFrameInterface> frame) const {
- rtc::CritScope lock(&send_lock_);
+ MutexLock lock(&send_lock_);
RTC_DCHECK_RUN_ON(encoder_queue_);
if (!send_frame_callback_)
return;
diff --git a/audio/channel_send_frame_transformer_delegate.h b/audio/channel_send_frame_transformer_delegate.h
index 5added7b31..531d1bc110 100644
--- a/audio/channel_send_frame_transformer_delegate.h
+++ b/audio/channel_send_frame_transformer_delegate.h
@@ -16,7 +16,7 @@
#include "api/frame_transformer_interface.h"
#include "modules/audio_coding/include/audio_coding_module_typedefs.h"
#include "rtc_base/buffer.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
@@ -72,7 +72,7 @@ class ChannelSendFrameTransformerDelegate : public TransformedFrameCallback {
~ChannelSendFrameTransformerDelegate() override = default;
private:
- rtc::CriticalSection send_lock_;
+ mutable Mutex send_lock_;
SendFrameCallback send_frame_callback_ RTC_GUARDED_BY(send_lock_);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_;
rtc::TaskQueue* encoder_queue_ RTC_GUARDED_BY(send_lock_);
diff --git a/audio/mock_voe_channel_proxy.h b/audio/mock_voe_channel_proxy.h
index 38ad208e1a..542358f687 100644
--- a/audio/mock_voe_channel_proxy.h
+++ b/audio/mock_voe_channel_proxy.h
@@ -28,102 +28,144 @@ namespace test {
class MockChannelReceive : public voe::ChannelReceiveInterface {
public:
- MOCK_METHOD2(SetNACKStatus, void(bool enable, int max_packets));
- MOCK_METHOD1(RegisterReceiverCongestionControlObjects,
- void(PacketRouter* packet_router));
- MOCK_METHOD0(ResetReceiverCongestionControlObjects, void());
- MOCK_CONST_METHOD0(GetRTCPStatistics, CallReceiveStatistics());
- MOCK_CONST_METHOD0(GetNetworkStatistics, NetworkStatistics());
- MOCK_CONST_METHOD0(GetDecodingCallStatistics, AudioDecodingCallStats());
- MOCK_CONST_METHOD0(GetSpeechOutputLevelFullRange, int());
- MOCK_CONST_METHOD0(GetTotalOutputEnergy, double());
- MOCK_CONST_METHOD0(GetTotalOutputDuration, double());
- MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t());
- MOCK_METHOD1(SetSink, void(AudioSinkInterface* sink));
- MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived& packet));
- MOCK_METHOD2(ReceivedRTCPPacket, void(const uint8_t* packet, size_t length));
- MOCK_METHOD1(SetChannelOutputVolumeScaling, void(float scaling));
- MOCK_METHOD2(GetAudioFrameWithInfo,
- AudioMixer::Source::AudioFrameInfo(int sample_rate_hz,
- AudioFrame* audio_frame));
- MOCK_CONST_METHOD0(PreferredSampleRate, int());
- MOCK_METHOD1(SetAssociatedSendChannel,
- void(const voe::ChannelSendInterface* send_channel));
- MOCK_CONST_METHOD2(GetPlayoutRtpTimestamp,
- bool(uint32_t* rtp_timestamp, int64_t* time_ms));
- MOCK_METHOD2(SetEstimatedPlayoutNtpTimestampMs,
- void(int64_t ntp_timestamp_ms, int64_t time_ms));
- MOCK_CONST_METHOD1(GetCurrentEstimatedPlayoutNtpTimestampMs,
- absl::optional<int64_t>(int64_t now_ms));
- MOCK_CONST_METHOD0(GetSyncInfo, absl::optional<Syncable::Info>());
- MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms));
- MOCK_METHOD1(SetBaseMinimumPlayoutDelayMs, bool(int delay_ms));
- MOCK_CONST_METHOD0(GetBaseMinimumPlayoutDelayMs, int());
- MOCK_CONST_METHOD0(GetReceiveCodec,
- absl::optional<std::pair<int, SdpAudioFormat>>());
- MOCK_METHOD1(SetReceiveCodecs,
- void(const std::map<int, SdpAudioFormat>& codecs));
- MOCK_CONST_METHOD0(GetSources, std::vector<RtpSource>());
- MOCK_METHOD0(StartPlayout, void());
- MOCK_METHOD0(StopPlayout, void());
- MOCK_METHOD1(SetDepacketizerToDecoderFrameTransformer,
- void(rtc::scoped_refptr<webrtc::FrameTransformerInterface>
- frame_transformer));
+ MOCK_METHOD(void, SetNACKStatus, (bool enable, int max_packets), (override));
+ MOCK_METHOD(void,
+ RegisterReceiverCongestionControlObjects,
+ (PacketRouter*),
+ (override));
+ MOCK_METHOD(void, ResetReceiverCongestionControlObjects, (), (override));
+ MOCK_METHOD(CallReceiveStatistics, GetRTCPStatistics, (), (const, override));
+ MOCK_METHOD(NetworkStatistics, GetNetworkStatistics, (), (const, override));
+ MOCK_METHOD(AudioDecodingCallStats,
+ GetDecodingCallStatistics,
+ (),
+ (const, override));
+ MOCK_METHOD(int, GetSpeechOutputLevelFullRange, (), (const, override));
+ MOCK_METHOD(double, GetTotalOutputEnergy, (), (const, override));
+ MOCK_METHOD(double, GetTotalOutputDuration, (), (const, override));
+ MOCK_METHOD(uint32_t, GetDelayEstimate, (), (const, override));
+ MOCK_METHOD(void, SetSink, (AudioSinkInterface*), (override));
+ MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived& packet), (override));
+ MOCK_METHOD(void,
+ ReceivedRTCPPacket,
+ (const uint8_t*, size_t length),
+ (override));
+ MOCK_METHOD(void, SetChannelOutputVolumeScaling, (float scaling), (override));
+ MOCK_METHOD(AudioMixer::Source::AudioFrameInfo,
+ GetAudioFrameWithInfo,
+ (int sample_rate_hz, AudioFrame*),
+ (override));
+ MOCK_METHOD(int, PreferredSampleRate, (), (const, override));
+ MOCK_METHOD(void,
+ SetAssociatedSendChannel,
+ (const voe::ChannelSendInterface*),
+ (override));
+ MOCK_METHOD(bool,
+ GetPlayoutRtpTimestamp,
+ (uint32_t*, int64_t*),
+ (const, override));
+ MOCK_METHOD(void,
+ SetEstimatedPlayoutNtpTimestampMs,
+ (int64_t ntp_timestamp_ms, int64_t time_ms),
+ (override));
+ MOCK_METHOD(absl::optional<int64_t>,
+ GetCurrentEstimatedPlayoutNtpTimestampMs,
+ (int64_t now_ms),
+ (const, override));
+ MOCK_METHOD(absl::optional<Syncable::Info>,
+ GetSyncInfo,
+ (),
+ (const, override));
+ MOCK_METHOD(void, SetMinimumPlayoutDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, SetBaseMinimumPlayoutDelayMs, (int delay_ms), (override));
+ MOCK_METHOD(int, GetBaseMinimumPlayoutDelayMs, (), (const, override));
+ MOCK_METHOD((absl::optional<std::pair<int, SdpAudioFormat>>),
+ GetReceiveCodec,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ SetReceiveCodecs,
+ ((const std::map<int, SdpAudioFormat>& codecs)),
+ (override));
+ MOCK_METHOD(void, StartPlayout, (), (override));
+ MOCK_METHOD(void, StopPlayout, (), (override));
+ MOCK_METHOD(
+ void,
+ SetDepacketizerToDecoderFrameTransformer,
+ (rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer),
+ (override));
};
class MockChannelSend : public voe::ChannelSendInterface {
public:
- // GMock doesn't like move-only types, like std::unique_ptr.
- virtual void SetEncoder(int payload_type,
- std::unique_ptr<AudioEncoder> encoder) {
- return SetEncoderForMock(payload_type, &encoder);
- }
- MOCK_METHOD2(SetEncoderForMock,
- void(int payload_type, std::unique_ptr<AudioEncoder>* encoder));
- MOCK_METHOD1(
+ MOCK_METHOD(void,
+ SetEncoder,
+ (int payload_type, std::unique_ptr<AudioEncoder> encoder),
+ (override));
+ MOCK_METHOD(
+ void,
ModifyEncoder,
- void(rtc::FunctionView<void(std::unique_ptr<AudioEncoder>*)> modifier));
- MOCK_METHOD1(CallEncoder,
- void(rtc::FunctionView<void(AudioEncoder*)> modifier));
- MOCK_METHOD1(SetRTCP_CNAME, void(absl::string_view c_name));
- MOCK_METHOD2(SetSendAudioLevelIndicationStatus, void(bool enable, int id));
- MOCK_METHOD2(RegisterSenderCongestionControlObjects,
- void(RtpTransportControllerSendInterface* transport,
- RtcpBandwidthObserver* bandwidth_observer));
- MOCK_METHOD0(ResetSenderCongestionControlObjects, void());
- MOCK_CONST_METHOD0(GetRTCPStatistics, CallSendStatistics());
- MOCK_CONST_METHOD0(GetRemoteRTCPReportBlocks, std::vector<ReportBlock>());
- MOCK_CONST_METHOD0(GetANAStatistics, ANAStats());
- MOCK_METHOD2(RegisterCngPayloadType,
- void(int payload_type, int payload_frequency));
- MOCK_METHOD2(SetSendTelephoneEventPayloadType,
- void(int payload_type, int payload_frequency));
- MOCK_METHOD2(SendTelephoneEventOutband, bool(int event, int duration_ms));
- MOCK_METHOD1(OnBitrateAllocation, void(BitrateAllocationUpdate update));
- MOCK_METHOD1(SetInputMute, void(bool muted));
- MOCK_METHOD2(ReceivedRTCPPacket, void(const uint8_t* packet, size_t length));
- // GMock doesn't like move-only types, like std::unique_ptr.
- virtual void ProcessAndEncodeAudio(std::unique_ptr<AudioFrame> audio_frame) {
- ProcessAndEncodeAudioForMock(&audio_frame);
- }
- MOCK_METHOD1(ProcessAndEncodeAudioForMock,
- void(std::unique_ptr<AudioFrame>* audio_frame));
- MOCK_METHOD1(SetTransportOverhead,
- void(size_t transport_overhead_per_packet));
- MOCK_CONST_METHOD0(GetRtpRtcp, RtpRtcp*());
- MOCK_CONST_METHOD0(GetBitrate, int());
- MOCK_METHOD1(OnTwccBasedUplinkPacketLossRate, void(float packet_loss_rate));
- MOCK_METHOD1(OnRecoverableUplinkPacketLossRate,
- void(float recoverable_packet_loss_rate));
- MOCK_CONST_METHOD0(GetRTT, int64_t());
- MOCK_METHOD0(StartSend, void());
- MOCK_METHOD0(StopSend, void());
- MOCK_METHOD1(
- SetFrameEncryptor,
- void(rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor));
- MOCK_METHOD1(SetEncoderToPacketizerFrameTransformer,
- void(rtc::scoped_refptr<webrtc::FrameTransformerInterface>
- frame_transformer));
+ (rtc::FunctionView<void(std::unique_ptr<AudioEncoder>*)> modifier),
+ (override));
+ MOCK_METHOD(void,
+ CallEncoder,
+ (rtc::FunctionView<void(AudioEncoder*)> modifier),
+ (override));
+ MOCK_METHOD(void, SetRTCP_CNAME, (absl::string_view c_name), (override));
+ MOCK_METHOD(void,
+ SetSendAudioLevelIndicationStatus,
+ (bool enable, int id),
+ (override));
+ MOCK_METHOD(void,
+ RegisterSenderCongestionControlObjects,
+ (RtpTransportControllerSendInterface*, RtcpBandwidthObserver*),
+ (override));
+ MOCK_METHOD(void, ResetSenderCongestionControlObjects, (), (override));
+ MOCK_METHOD(CallSendStatistics, GetRTCPStatistics, (), (const, override));
+ MOCK_METHOD(std::vector<ReportBlock>,
+ GetRemoteRTCPReportBlocks,
+ (),
+ (const, override));
+ MOCK_METHOD(ANAStats, GetANAStatistics, (), (const, override));
+ MOCK_METHOD(void,
+ RegisterCngPayloadType,
+ (int payload_type, int payload_frequency),
+ (override));
+ MOCK_METHOD(void,
+ SetSendTelephoneEventPayloadType,
+ (int payload_type, int payload_frequency),
+ (override));
+ MOCK_METHOD(bool,
+ SendTelephoneEventOutband,
+ (int event, int duration_ms),
+ (override));
+ MOCK_METHOD(void,
+ OnBitrateAllocation,
+ (BitrateAllocationUpdate update),
+ (override));
+ MOCK_METHOD(void, SetInputMute, (bool muted), (override));
+ MOCK_METHOD(void,
+ ReceivedRTCPPacket,
+ (const uint8_t*, size_t length),
+ (override));
+ MOCK_METHOD(void,
+ ProcessAndEncodeAudio,
+ (std::unique_ptr<AudioFrame>),
+ (override));
+ MOCK_METHOD(RtpRtcpInterface*, GetRtpRtcp, (), (const, override));
+ MOCK_METHOD(int, GetBitrate, (), (const, override));
+ MOCK_METHOD(int64_t, GetRTT, (), (const, override));
+ MOCK_METHOD(void, StartSend, (), (override));
+ MOCK_METHOD(void, StopSend, (), (override));
+ MOCK_METHOD(void,
+ SetFrameEncryptor,
+ (rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor),
+ (override));
+ MOCK_METHOD(
+ void,
+ SetEncoderToPacketizerFrameTransformer,
+ (rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer),
+ (override));
};
} // namespace test
} // namespace webrtc
diff --git a/audio/test/low_bandwidth_audio_test.cc b/audio/test/low_bandwidth_audio_test.cc
index 049b5e5150..50cf499920 100644
--- a/audio/test/low_bandwidth_audio_test.cc
+++ b/audio/test/low_bandwidth_audio_test.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "absl/flags/declare.h"
#include "absl/flags/flag.h"
#include "api/test/simulated_network.h"
#include "audio/test/audio_end_to_end_test.h"
diff --git a/audio/test/pc_low_bandwidth_audio_test.cc b/audio/test/pc_low_bandwidth_audio_test.cc
index aafb65f15d..95a32238c5 100644
--- a/audio/test/pc_low_bandwidth_audio_test.cc
+++ b/audio/test/pc_low_bandwidth_audio_test.cc
@@ -10,12 +10,14 @@
#include <memory>
+#include "absl/flags/declare.h"
#include "absl/flags/flag.h"
#include "api/test/create_network_emulation_manager.h"
#include "api/test/create_peerconnection_quality_test_fixture.h"
#include "api/test/network_emulation_manager.h"
#include "api/test/peerconnection_quality_test_fixture.h"
#include "api/test/simulated_network.h"
+#include "api/test/time_controller.h"
#include "call/simulated_network.h"
#include "test/gtest.h"
#include "test/pc/e2e/network_quality_metrics_reporter.h"
@@ -70,12 +72,13 @@ CreateTwoNetworkLinks(NetworkEmulationManager* emulation,
std::unique_ptr<webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture>
CreateTestFixture(const std::string& test_case_name,
+ TimeController& time_controller,
std::pair<EmulatedNetworkManagerInterface*,
EmulatedNetworkManagerInterface*> network_links,
rtc::FunctionView<void(PeerConfigurer*)> alice_configurer,
rtc::FunctionView<void(PeerConfigurer*)> bob_configurer) {
auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture(
- test_case_name, /*audio_quality_analyzer=*/nullptr,
+ test_case_name, time_controller, /*audio_quality_analyzer=*/nullptr,
/*video_quality_analyzer=*/nullptr);
fixture->AddPeer(network_links.first->network_thread(),
network_links.first->network_manager(), alice_configurer);
@@ -127,7 +130,7 @@ TEST(PCLowBandwidthAudioTest, PCGoodNetworkHighBitrate) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
- GetMetricTestCaseName(),
+ GetMetricTestCaseName(), *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -154,7 +157,7 @@ TEST(PCLowBandwidthAudioTest, PC40kbpsNetwork) {
config.queue_delay_ms = 400;
config.loss_percent = 1;
auto fixture = CreateTestFixture(
- GetMetricTestCaseName(),
+ GetMetricTestCaseName(), *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
AudioConfig audio;
diff --git a/audio/utility/audio_frame_operations_unittest.cc b/audio/utility/audio_frame_operations_unittest.cc
index 1d38875add..1a2c16e45f 100644
--- a/audio/utility/audio_frame_operations_unittest.cc
+++ b/audio/utility/audio_frame_operations_unittest.cc
@@ -27,6 +27,8 @@ class AudioFrameOperationsTest : public ::testing::Test {
AudioFrame frame_;
};
+class AudioFrameOperationsDeathTest : public AudioFrameOperationsTest {};
+
void SetFrameData(int16_t ch1,
int16_t ch2,
int16_t ch3,
@@ -105,7 +107,7 @@ void VerifyFrameDataBounds(const AudioFrame& frame,
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(AudioFrameOperationsTest, MonoToStereoFailsWithBadParameters) {
+TEST_F(AudioFrameOperationsDeathTest, MonoToStereoFailsWithBadParameters) {
EXPECT_DEATH(AudioFrameOperations::UpmixChannels(2, &frame_), "");
frame_.samples_per_channel_ = AudioFrame::kMaxDataSizeSamples;
frame_.num_channels_ = 1;
@@ -136,7 +138,7 @@ TEST_F(AudioFrameOperationsTest, MonoToStereoMuted) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(AudioFrameOperationsTest, StereoToMonoFailsWithBadParameters) {
+TEST_F(AudioFrameOperationsDeathTest, StereoToMonoFailsWithBadParameters) {
frame_.num_channels_ = 1;
EXPECT_DEATH(AudioFrameOperations::DownmixChannels(1, &frame_), "");
}
diff --git a/audio/voip/BUILD.gn b/audio/voip/BUILD.gn
index 60232d5144..52f9d07f17 100644
--- a/audio/voip/BUILD.gn
+++ b/audio/voip/BUILD.gn
@@ -26,8 +26,9 @@ rtc_library("voip_core") {
"../../modules/utility:utility",
"../../rtc_base:criticalsection",
"../../rtc_base:logging",
- "//third_party/abseil-cpp/absl/types:optional",
+ "../../rtc_base/synchronization:mutex",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_channel") {
@@ -74,6 +75,7 @@ rtc_library("audio_ingress") {
"../../rtc_base:logging",
"../../rtc_base:safe_minmax",
"../../rtc_base:timeutils",
+ "../../rtc_base/synchronization:mutex",
"../utility:audio_frame_operations",
]
}
@@ -95,6 +97,7 @@ rtc_library("audio_egress") {
"../../rtc_base:rtc_task_queue",
"../../rtc_base:thread_checker",
"../../rtc_base:timeutils",
+ "../../rtc_base/synchronization:mutex",
"../utility:audio_frame_operations",
]
}
diff --git a/audio/voip/audio_channel.cc b/audio/voip/audio_channel.cc
index b9ce7accd1..d9c89fcdc4 100644
--- a/audio/voip/audio_channel.cc
+++ b/audio/voip/audio_channel.cc
@@ -16,7 +16,7 @@
#include "api/audio_codecs/audio_format.h"
#include "api/task_queue/task_queue_factory.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "rtc_base/critical_section.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
@@ -43,7 +43,7 @@ AudioChannel::AudioChannel(
Clock* clock = Clock::GetRealTimeClock();
receive_statistics_ = ReceiveStatistics::Create(clock);
- RtpRtcp::Configuration rtp_config;
+ RtpRtcpInterface::Configuration rtp_config;
rtp_config.clock = clock;
rtp_config.audio = true;
rtp_config.receive_statistics = receive_statistics_.get();
@@ -51,7 +51,7 @@ AudioChannel::AudioChannel(
rtp_config.outgoing_transport = transport;
rtp_config.local_media_ssrc = local_ssrc;
- rtp_rtcp_ = RtpRtcp::Create(rtp_config);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config);
rtp_rtcp_->SetSendingMediaStatus(false);
rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound);
diff --git a/audio/voip/audio_channel.h b/audio/voip/audio_channel.h
index 8b6f1a8e59..659e990c30 100644
--- a/audio/voip/audio_channel.h
+++ b/audio/voip/audio_channel.h
@@ -20,9 +20,8 @@
#include "api/voip/voip_base.h"
#include "audio/voip/audio_egress.h"
#include "audio/voip/audio_ingress.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/utility/include/process_thread.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/ref_count.h"
namespace webrtc {
@@ -88,7 +87,7 @@ class AudioChannel : public rtc::RefCountInterface {
// Listed in order for safe destruction of AudioChannel object.
// Synchronization for these are handled internally.
std::unique_ptr<ReceiveStatistics> receive_statistics_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
std::unique_ptr<AudioIngress> ingress_;
std::unique_ptr<AudioEgress> egress_;
};
diff --git a/audio/voip/audio_egress.cc b/audio/voip/audio_egress.cc
index a7bc202a41..305f712624 100644
--- a/audio/voip/audio_egress.cc
+++ b/audio/voip/audio_egress.cc
@@ -17,7 +17,7 @@
namespace webrtc {
-AudioEgress::AudioEgress(RtpRtcp* rtp_rtcp,
+AudioEgress::AudioEgress(RtpRtcpInterface* rtp_rtcp,
Clock* clock,
TaskQueueFactory* task_queue_factory)
: rtp_rtcp_(rtp_rtcp),
diff --git a/audio/voip/audio_egress.h b/audio/voip/audio_egress.h
index e5632cde32..8ec048f915 100644
--- a/audio/voip/audio_egress.h
+++ b/audio/voip/audio_egress.h
@@ -20,8 +20,9 @@
#include "call/audio_sender.h"
#include "modules/audio_coding/include/audio_coding_module.h"
#include "modules/rtp_rtcp/include/report_block_data.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/rtp_sender_audio.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/time_utils.h"
@@ -43,7 +44,7 @@ namespace webrtc {
// smaller footprint.
class AudioEgress : public AudioSender, public AudioPacketizationCallback {
public:
- AudioEgress(RtpRtcp* rtp_rtcp,
+ AudioEgress(RtpRtcpInterface* rtp_rtcp,
Clock* clock,
TaskQueueFactory* task_queue_factory);
~AudioEgress() override;
@@ -72,7 +73,7 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback {
// Retrieve current encoder format info. This returns encoder format set
// by SetEncoder() and if encoder is not set, this will return nullopt.
absl::optional<SdpAudioFormat> GetEncoderFormat() const {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
return encoder_format_;
}
@@ -99,17 +100,17 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback {
private:
void SetEncoderFormat(const SdpAudioFormat& encoder_format) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
encoder_format_ = encoder_format;
}
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
// Current encoder format selected by caller.
absl::optional<SdpAudioFormat> encoder_format_ RTC_GUARDED_BY(lock_);
// Synchronization is handled internally by RtpRtcp.
- RtpRtcp* const rtp_rtcp_;
+ RtpRtcpInterface* const rtp_rtcp_;
// Synchronization is handled internally by RTPSenderAudio.
RTPSenderAudio rtp_sender_audio_;
diff --git a/audio/voip/audio_ingress.cc b/audio/voip/audio_ingress.cc
index fb43fcd753..560055d4f4 100644
--- a/audio/voip/audio_ingress.cc
+++ b/audio/voip/audio_ingress.cc
@@ -17,7 +17,6 @@
#include "api/audio_codecs/audio_format.h"
#include "audio/utility/audio_frame_operations.h"
#include "modules/audio_coding/include/audio_coding_module.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_minmax.h"
@@ -36,7 +35,7 @@ AudioCodingModule::Config CreateAcmConfig(
} // namespace
AudioIngress::AudioIngress(
- RtpRtcp* rtp_rtcp,
+ RtpRtcpInterface* rtp_rtcp,
Clock* clock,
ReceiveStatistics* receive_statistics,
rtc::scoped_refptr<AudioDecoderFactory> decoder_factory)
@@ -83,7 +82,7 @@ AudioMixer::Source::AudioFrameInfo AudioIngress::GetAudioFrameWithInfo(
// Compute elapsed and NTP times.
int64_t unwrap_timestamp;
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
unwrap_timestamp =
timestamp_wrap_handler_.Unwrap(audio_frame->timestamp_);
audio_frame->ntp_time_ms_ =
@@ -107,7 +106,7 @@ AudioMixer::Source::AudioFrameInfo AudioIngress::GetAudioFrameWithInfo(
void AudioIngress::SetReceiveCodecs(
const std::map<int, SdpAudioFormat>& codecs) {
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
for (const auto& kv : codecs) {
receive_codec_info_[kv.first] = kv.second.clockrate_hz;
}
@@ -125,7 +124,7 @@ void AudioIngress::ReceivedRTPPacket(rtc::ArrayView<const uint8_t> rtp_packet) {
// Set payload type's sampling rate before we feed it into ReceiveStatistics.
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
const auto& it =
receive_codec_info_.find(rtp_packet_received.PayloadType());
// If sampling rate info is not available in our received codec set, it
@@ -185,7 +184,7 @@ void AudioIngress::ReceivedRTCPPacket(
}
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
}
}
diff --git a/audio/voip/audio_ingress.h b/audio/voip/audio_ingress.h
index 99766741d6..5a8df21f7a 100644
--- a/audio/voip/audio_ingress.h
+++ b/audio/voip/audio_ingress.h
@@ -26,9 +26,9 @@
#include "modules/audio_coding/include/audio_coding_module.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
-#include "rtc_base/critical_section.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/time_utils.h"
namespace webrtc {
@@ -44,7 +44,7 @@ namespace webrtc {
// smaller footprint.
class AudioIngress : public AudioMixer::Source {
public:
- AudioIngress(RtpRtcp* rtp_rtcp,
+ AudioIngress(RtpRtcpInterface* rtp_rtcp,
Clock* clock,
ReceiveStatistics* receive_statistics,
rtc::scoped_refptr<AudioDecoderFactory> decoder_factory);
@@ -122,8 +122,8 @@ class AudioIngress : public AudioMixer::Source {
// Synchronizaton is handled internally by ReceiveStatistics.
ReceiveStatistics* const rtp_receive_statistics_;
- // Synchronizaton is handled internally by RtpRtcp.
- RtpRtcp* const rtp_rtcp_;
+ // Synchronizaton is handled internally by RtpRtcpInterface.
+ RtpRtcpInterface* const rtp_rtcp_;
// Synchronizaton is handled internally by acm2::AcmReceiver.
acm2::AcmReceiver acm_receiver_;
@@ -131,7 +131,7 @@ class AudioIngress : public AudioMixer::Source {
// Synchronizaton is handled internally by voe::AudioLevel.
voe::AudioLevel output_audio_level_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
RemoteNtpTimeEstimator ntp_estimator_ RTC_GUARDED_BY(lock_);
diff --git a/audio/voip/test/BUILD.gn b/audio/voip/test/BUILD.gn
index 39f100a3aa..d698b3321d 100644
--- a/audio/voip/test/BUILD.gn
+++ b/audio/voip/test/BUILD.gn
@@ -36,6 +36,7 @@ if (rtc_include_tests) {
"../../../api/task_queue:default_task_queue_factory",
"../../../modules/audio_mixer:audio_mixer_impl",
"../../../modules/audio_mixer:audio_mixer_test_utils",
+ "../../../modules/rtp_rtcp:rtp_rtcp",
"../../../modules/rtp_rtcp:rtp_rtcp_format",
"../../../modules/utility",
"../../../rtc_base:logging",
@@ -56,6 +57,7 @@ if (rtc_include_tests) {
"../../../api/audio_codecs:builtin_audio_encoder_factory",
"../../../api/task_queue:default_task_queue_factory",
"../../../modules/audio_mixer:audio_mixer_test_utils",
+ "../../../modules/rtp_rtcp:rtp_rtcp",
"../../../rtc_base:logging",
"../../../rtc_base:rtc_event",
"../../../test:mock_transport",
@@ -72,6 +74,7 @@ if (rtc_include_tests) {
"../../../api/audio_codecs:builtin_audio_encoder_factory",
"../../../api/task_queue:default_task_queue_factory",
"../../../modules/audio_mixer:audio_mixer_test_utils",
+ "../../../modules/rtp_rtcp:rtp_rtcp",
"../../../modules/rtp_rtcp:rtp_rtcp_format",
"../../../rtc_base:logging",
"../../../rtc_base:rtc_event",
diff --git a/audio/voip/test/audio_egress_unittest.cc b/audio/voip/test/audio_egress_unittest.cc
index 3391265880..70fb6dcf36 100644
--- a/audio/voip/test/audio_egress_unittest.cc
+++ b/audio/voip/test/audio_egress_unittest.cc
@@ -14,6 +14,7 @@
#include "api/task_queue/default_task_queue_factory.h"
#include "modules/audio_mixer/sine_wave_generator.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "test/gmock.h"
@@ -27,16 +28,16 @@ using ::testing::Invoke;
using ::testing::NiceMock;
using ::testing::Unused;
-std::unique_ptr<RtpRtcp> CreateRtpStack(Clock* clock,
- Transport* transport,
- uint32_t remote_ssrc) {
- RtpRtcp::Configuration rtp_config;
+std::unique_ptr<ModuleRtpRtcpImpl2> CreateRtpStack(Clock* clock,
+ Transport* transport,
+ uint32_t remote_ssrc) {
+ RtpRtcpInterface::Configuration rtp_config;
rtp_config.clock = clock;
rtp_config.audio = true;
rtp_config.rtcp_report_interval_ms = 5000;
rtp_config.outgoing_transport = transport;
rtp_config.local_media_ssrc = remote_ssrc;
- auto rtp_rtcp = RtpRtcp::Create(rtp_config);
+ auto rtp_rtcp = ModuleRtpRtcpImpl2::Create(rtp_config);
rtp_rtcp->SetSendingMediaStatus(false);
rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
return rtp_rtcp;
@@ -100,7 +101,7 @@ class AudioEgressTest : public ::testing::Test {
SimulatedClock fake_clock_;
NiceMock<MockTransport> transport_;
SineWaveGenerator wave_generator_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
std::unique_ptr<TaskQueueFactory> task_queue_factory_;
rtc::scoped_refptr<AudioEncoderFactory> encoder_factory_;
std::unique_ptr<AudioEgress> egress_;
diff --git a/audio/voip/test/audio_ingress_unittest.cc b/audio/voip/test/audio_ingress_unittest.cc
index bedb82e211..3a2a66a325 100644
--- a/audio/voip/test/audio_ingress_unittest.cc
+++ b/audio/voip/test/audio_ingress_unittest.cc
@@ -15,6 +15,7 @@
#include "api/task_queue/default_task_queue_factory.h"
#include "audio/voip/audio_egress.h"
#include "modules/audio_mixer/sine_wave_generator.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "test/gmock.h"
@@ -38,14 +39,14 @@ class AudioIngressTest : public ::testing::Test {
: fake_clock_(123456789), wave_generator_(1000.0, kAudioLevel) {
receive_statistics_ = ReceiveStatistics::Create(&fake_clock_);
- RtpRtcp::Configuration rtp_config;
+ RtpRtcpInterface::Configuration rtp_config;
rtp_config.clock = &fake_clock_;
rtp_config.audio = true;
rtp_config.receive_statistics = receive_statistics_.get();
rtp_config.rtcp_report_interval_ms = 5000;
rtp_config.outgoing_transport = &transport_;
rtp_config.local_media_ssrc = 0xdeadc0de;
- rtp_rtcp_ = RtpRtcp::Create(rtp_config);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config);
rtp_rtcp_->SetSendingMediaStatus(false);
rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound);
@@ -94,7 +95,7 @@ class AudioIngressTest : public ::testing::Test {
SineWaveGenerator wave_generator_;
NiceMock<MockTransport> transport_;
std::unique_ptr<ReceiveStatistics> receive_statistics_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
rtc::scoped_refptr<AudioEncoderFactory> encoder_factory_;
rtc::scoped_refptr<AudioDecoderFactory> decoder_factory_;
std::unique_ptr<TaskQueueFactory> task_queue_factory_;
diff --git a/audio/voip/voip_core.cc b/audio/voip/voip_core.cc
index 3275f028cd..7292644648 100644
--- a/audio/voip/voip_core.cc
+++ b/audio/voip/voip_core.cc
@@ -15,7 +15,6 @@
#include <utility>
#include "api/audio_codecs/audio_format.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
namespace webrtc {
@@ -134,7 +133,7 @@ absl::optional<ChannelId> VoipCore::CreateChannel(
process_thread_.get(), audio_mixer_.get(), decoder_factory_);
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
channel = static_cast<ChannelId>(next_channel_id_);
channels_[*channel] = audio_channel;
@@ -154,7 +153,7 @@ void VoipCore::ReleaseChannel(ChannelId channel) {
// Destroy channel outside of the lock.
rtc::scoped_refptr<AudioChannel> audio_channel;
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
auto iter = channels_.find(channel);
if (iter != channels_.end()) {
@@ -170,7 +169,7 @@ void VoipCore::ReleaseChannel(ChannelId channel) {
rtc::scoped_refptr<AudioChannel> VoipCore::GetChannel(ChannelId channel) {
rtc::scoped_refptr<AudioChannel> audio_channel;
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
auto iter = channels_.find(channel);
if (iter != channels_.end()) {
audio_channel = iter->second;
@@ -191,7 +190,7 @@ bool VoipCore::UpdateAudioTransportWithSenders() {
int max_sampling_rate = 8000;
size_t max_num_channels = 1;
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
// Reserve to prevent run time vector re-allocation.
audio_senders.reserve(channels_.size());
for (auto kv : channels_) {
@@ -290,7 +289,7 @@ bool VoipCore::StopPlayout(ChannelId channel) {
bool stop_device = true;
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
for (auto kv : channels_) {
rtc::scoped_refptr<AudioChannel>& channel = kv.second;
if (channel->IsPlaying()) {
diff --git a/audio/voip/voip_core.h b/audio/voip/voip_core.h
index 08929d3afd..22a6559981 100644
--- a/audio/voip/voip_core.h
+++ b/audio/voip/voip_core.h
@@ -31,7 +31,7 @@
#include "modules/audio_mixer/audio_mixer_impl.h"
#include "modules/audio_processing/include/audio_processing.h"
#include "modules/utility/include/process_thread.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -123,7 +123,7 @@ class VoipCore : public VoipEngine,
// Must be placed before |channels_| for proper destruction.
std::unique_ptr<ProcessThread> process_thread_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
// Member to track a next ChannelId for new AudioChannel.
int next_channel_id_ RTC_GUARDED_BY(lock_) = 0;
diff --git a/base/third_party/libevent/event.h b/base/third_party/libevent/event.h
new file mode 100644
index 0000000000..3efc1e9fea
--- /dev/null
+++ b/base/third_party/libevent/event.h
@@ -0,0 +1 @@
+#include <event.h>
diff --git a/build_overrides/build.gni b/build_overrides/build.gni
index 669044db81..8a9dfacd77 100644
--- a/build_overrides/build.gni
+++ b/build_overrides/build.gni
@@ -16,19 +16,19 @@ linux_use_bundled_binutils_override = true
# only needed to support both WebRTC standalone and Chromium builds.
build_with_chromium = false
+# WebRTC checks out google_benchmark by default since it is always used.
+checkout_google_benchmark = true
+
# Use our own suppressions files.
asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
lsan_suppressions_file = "//tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc"
tsan_suppressions_file = "//tools_webrtc/sanitizers/tsan_suppressions_webrtc.cc"
msan_blacklist_path =
- rebase_path("//tools_webrtc/msan/blacklist.txt", root_build_dir)
+ rebase_path("//tools_webrtc/msan/suppressions.txt", root_build_dir)
ubsan_blacklist_path =
- rebase_path("//tools_webrtc/ubsan/blacklist.txt", root_build_dir)
+ rebase_path("//tools_webrtc/ubsan/suppressions.txt", root_build_dir)
ubsan_vptr_blacklist_path =
- rebase_path("//tools_webrtc/ubsan/vptr_blacklist.txt", root_build_dir)
-
-# Android lint suppressions file
-lint_suppressions_file = "//tools_webrtc/android/suppressions.xml"
+ rebase_path("//tools_webrtc/ubsan/vptr_suppressions.txt", root_build_dir)
# For Chromium, Android 32-bit non-component, non-clang builds hit a 4GiB size
# limit, making them requiring symbol_level=2. WebRTC doesn't hit that problem
@@ -46,3 +46,14 @@ if (host_os == "mac") {
"hermetic toolchain if the minimum OS version is not met.")
use_system_xcode = _result == 0
}
+
+declare_args() {
+ # WebRTC doesn't depend on //base from production code but only for testing
+ # purposes. In any case, it doesn't depend on //third_party/perfetto which
+ # is used for base tracing, so this feature is disabled.
+ enable_base_tracing = false
+
+ # If true, it assumes that //third_party/abseil-cpp is an available
+ # dependency for googletest.
+ gtest_enable_absl_printers = true
+}
diff --git a/call/BUILD.gn b/call/BUILD.gn
index a9037c3819..65b545c11e 100644
--- a/call/BUILD.gn
+++ b/call/BUILD.gn
@@ -39,6 +39,7 @@ rtc_library("call_interfaces") {
"../api:rtp_parameters",
"../api:scoped_refptr",
"../api:transport_api",
+ "../api/adaptation:resource_adaptation_api",
"../api/audio:audio_mixer_api",
"../api/audio_codecs:audio_codecs_api",
"../api/crypto:frame_decryptor_interface",
@@ -61,8 +62,8 @@ rtc_library("call_interfaces") {
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base/network:sent_packet",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("audio_sender_interface") {
@@ -80,7 +81,6 @@ rtc_library("rtp_interfaces") {
# client code gets updated.
visibility = [ "*" ]
sources = [
- "rtcp_packet_sink_interface.h",
"rtp_config.cc",
"rtp_config.h",
"rtp_packet_sink_interface.h",
@@ -100,6 +100,8 @@ rtc_library("rtp_interfaces") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -108,17 +110,12 @@ rtc_library("rtp_interfaces") {
rtc_library("rtp_receiver") {
visibility = [ "*" ]
sources = [
- "rtcp_demuxer.cc",
- "rtcp_demuxer.h",
"rtp_demuxer.cc",
"rtp_demuxer.h",
- "rtp_rtcp_demuxer_helper.cc",
- "rtp_rtcp_demuxer_helper.h",
"rtp_stream_receiver_controller.cc",
"rtp_stream_receiver_controller.h",
"rtx_receive_stream.cc",
"rtx_receive_stream.h",
- "ssrc_binding_observer.h",
]
deps = [
":rtp_interfaces",
@@ -128,8 +125,8 @@ rtc_library("rtp_receiver") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtp_sender") {
@@ -171,6 +168,7 @@ rtc_library("rtp_sender") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../modules/rtp_rtcp:rtp_video_header",
"../modules/utility",
+ "../modules/video_coding:chain_diff_calculator",
"../modules/video_coding:codec_globals_headers",
"../modules/video_coding:frame_dependencies_calculator",
"../modules/video_coding:video_codec_interface",
@@ -179,7 +177,10 @@ rtc_library("rtp_sender") {
"../rtc_base:rate_limiter",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_task_queue",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/task_utils:repeating_task",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings:strings",
@@ -202,8 +203,8 @@ rtc_library("bitrate_configurator") {
"../api/units:data_rate",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("bitrate_allocator") {
@@ -223,8 +224,8 @@ rtc_library("bitrate_allocator") {
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/algorithm:container",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
}
rtc_library("call") {
@@ -279,14 +280,15 @@ rtc_library("call") {
"../rtc_base:safe_minmax",
"../rtc_base/experiments:field_trial_parser",
"../rtc_base/network:sent_packet",
- "../rtc_base/synchronization:rw_lock_wrapper",
"../rtc_base/synchronization:sequence_checker",
+ "../rtc_base/task_utils:pending_task_safety_flag",
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
"../video",
- "//third_party/abseil-cpp/absl/types:optional",
+ "adaptation:resource_adaptation",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_stream_api") {
@@ -301,7 +303,9 @@ rtc_library("video_stream_api") {
"../api:frame_transformer_interface",
"../api:rtp_headers",
"../api:rtp_parameters",
+ "../api:scoped_refptr",
"../api:transport_api",
+ "../api/adaptation:resource_adaptation_api",
"../api/crypto:frame_decryptor_interface",
"../api/crypto:frame_encryptor_interface",
"../api/crypto:options",
@@ -315,8 +319,8 @@ rtc_library("video_stream_api") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("simulated_network") {
@@ -332,9 +336,10 @@ rtc_library("simulated_network") {
"../api/units:timestamp",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("simulated_packet_receiver") {
@@ -360,6 +365,7 @@ rtc_library("fake_network") {
"../modules/utility",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
"../system_wrappers",
]
@@ -375,11 +381,9 @@ if (rtc_include_tests) {
"call_unittest.cc",
"flexfec_receive_stream_unittest.cc",
"receive_time_calculator_unittest.cc",
- "rtcp_demuxer_unittest.cc",
"rtp_bitrate_configurator_unittest.cc",
"rtp_demuxer_unittest.cc",
"rtp_payload_params_unittest.cc",
- "rtp_rtcp_demuxer_helper_unittest.cc",
"rtp_video_sender_unittest.cc",
"rtx_receive_stream_unittest.cc",
]
@@ -402,7 +406,9 @@ if (rtc_include_tests) {
"../api/audio_codecs:builtin_audio_decoder_factory",
"../api/rtc_event_log",
"../api/task_queue:default_task_queue_factory",
+ "../api/test/video:function_video_factory",
"../api/transport:field_trial_based_config",
+ "../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_frame",
"../api/video:video_rtp_headers",
"../audio",
@@ -423,6 +429,7 @@ if (rtc_include_tests) {
"../rtc_base:rate_limiter",
"../rtc_base:rtc_base_approved",
"../rtc_base:task_queue_for_test",
+ "../rtc_base/synchronization:mutex",
"../system_wrappers",
"../test:audio_codec_mocks",
"../test:direct_transport",
@@ -436,12 +443,16 @@ if (rtc_include_tests) {
"../test:video_test_common",
"../test/time_controller:time_controller",
"../video",
+ "adaptation:resource_adaptation_test_utilities",
"//test/scenario:scenario",
"//testing/gmock",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
+ "//third_party/abseil-cpp/absl/types:variant",
]
}
@@ -478,6 +489,7 @@ if (rtc_include_tests) {
"../rtc_base:rtc_base_approved",
"../rtc_base:task_queue_for_test",
"../rtc_base:task_queue_for_test",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/task_utils:repeating_task",
"../system_wrappers",
"../system_wrappers:metrics",
@@ -494,8 +506,8 @@ if (rtc_include_tests) {
"../test:video_test_common",
"../video",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/flags:flag",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
}
# TODO(eladalon): This should be moved, as with the TODO for |rtp_interfaces|.
@@ -553,7 +565,7 @@ if (rtc_include_tests) {
"../system_wrappers",
"../test:test_support",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/algorithm:container",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
}
}
diff --git a/call/adaptation/BUILD.gn b/call/adaptation/BUILD.gn
index deac3156d6..94944d6820 100644
--- a/call/adaptation/BUILD.gn
+++ b/call/adaptation/BUILD.gn
@@ -10,10 +10,16 @@ import("../../webrtc.gni")
rtc_library("resource_adaptation") {
sources = [
+ "adaptation_constraint.cc",
+ "adaptation_constraint.h",
+ "adaptation_listener.cc",
+ "adaptation_listener.h",
+ "broadcast_resource_listener.cc",
+ "broadcast_resource_listener.h",
+ "degradation_preference_provider.cc",
+ "degradation_preference_provider.h",
"encoder_settings.cc",
"encoder_settings.h",
- "resource.cc",
- "resource.h",
"resource_adaptation_processor.cc",
"resource_adaptation_processor.h",
"resource_adaptation_processor_interface.cc",
@@ -29,6 +35,9 @@ rtc_library("resource_adaptation") {
]
deps = [
"../../api:rtp_parameters",
+ "../../api:scoped_refptr",
+ "../../api/adaptation:resource_adaptation_api",
+ "../../api/task_queue:task_queue",
"../../api/video:video_adaptation",
"../../api/video:video_frame",
"../../api/video:video_stream_encoder",
@@ -36,9 +45,16 @@ rtc_library("resource_adaptation") {
"../../modules/video_coding:video_coding_utility",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base:rtc_task_queue",
"../../rtc_base/experiments:balanced_degradation_settings",
+ "../../rtc_base/synchronization:mutex",
+ "../../rtc_base/synchronization:sequence_checker",
+ "../../rtc_base/task_utils:to_queued_task",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
+ "//third_party/abseil-cpp/absl/types:variant",
]
}
@@ -47,6 +63,7 @@ if (rtc_include_tests) {
testonly = true
sources = [
+ "broadcast_resource_listener_unittest.cc",
"resource_adaptation_processor_unittest.cc",
"resource_unittest.cc",
"video_source_restrictions_unittest.cc",
@@ -56,31 +73,50 @@ if (rtc_include_tests) {
deps = [
":resource_adaptation",
":resource_adaptation_test_utilities",
+ "../../api:scoped_refptr",
+ "../../api/adaptation:resource_adaptation_api",
+ "../../api/task_queue:default_task_queue_factory",
+ "../../api/task_queue:task_queue",
"../../api/video:video_adaptation",
"../../api/video_codecs:video_codecs_api",
"../../rtc_base:checks",
+ "../../rtc_base:gunit_helpers",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base:rtc_task_queue",
+ "../../rtc_base:task_queue_for_test",
+ "../../rtc_base/synchronization:mutex",
"../../test:field_trial",
"../../test:rtc_expect_death",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("resource_adaptation_test_utilities") {
testonly = true
sources = [
+ "test/fake_adaptation_constraint.cc",
+ "test/fake_adaptation_constraint.h",
+ "test/fake_adaptation_listener.cc",
+ "test/fake_adaptation_listener.h",
"test/fake_frame_rate_provider.cc",
"test/fake_frame_rate_provider.h",
"test/fake_resource.cc",
"test/fake_resource.h",
+ "test/mock_resource_listener.h",
]
deps = [
":resource_adaptation",
+ "../../api:scoped_refptr",
+ "../../api/adaptation:resource_adaptation_api",
+ "../../api/task_queue:task_queue",
"../../api/video:video_stream_encoder",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base/synchronization:sequence_checker",
+ "../../rtc_base/task_utils:to_queued_task",
"../../test:test_support",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/call/adaptation/adaptation_constraint.cc b/call/adaptation/adaptation_constraint.cc
new file mode 100644
index 0000000000..d62bb74f87
--- /dev/null
+++ b/call/adaptation/adaptation_constraint.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/adaptation_constraint.h"
+
+namespace webrtc {
+
+AdaptationConstraint::~AdaptationConstraint() {}
+
+} // namespace webrtc
diff --git a/call/adaptation/adaptation_constraint.h b/call/adaptation/adaptation_constraint.h
new file mode 100644
index 0000000000..9ff15d6b86
--- /dev/null
+++ b/call/adaptation/adaptation_constraint.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_
+#define CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_
+
+#include <string>
+
+#include "api/adaptation/resource.h"
+#include "api/scoped_refptr.h"
+#include "call/adaptation/video_source_restrictions.h"
+#include "call/adaptation/video_stream_input_state.h"
+
+namespace webrtc {
+
+// Adaptation constraints have the ability to prevent applying a proposed
+// adaptation (expressed as restrictions before/after adaptation).
+class AdaptationConstraint {
+ public:
+ virtual ~AdaptationConstraint();
+
+ virtual std::string Name() const = 0;
+
+ // TODO(https://crbug.com/webrtc/11172): When we have multi-stream adaptation
+ // support, this interface needs to indicate which stream the adaptation
+ // applies to.
+ virtual bool IsAdaptationUpAllowed(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) const = 0;
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_
diff --git a/call/adaptation/adaptation_listener.cc b/call/adaptation/adaptation_listener.cc
new file mode 100644
index 0000000000..acc1564f77
--- /dev/null
+++ b/call/adaptation/adaptation_listener.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/adaptation_listener.h"
+
+namespace webrtc {
+
+AdaptationListener::~AdaptationListener() {}
+
+} // namespace webrtc
diff --git a/call/adaptation/adaptation_listener.h b/call/adaptation/adaptation_listener.h
new file mode 100644
index 0000000000..4a96baef8e
--- /dev/null
+++ b/call/adaptation/adaptation_listener.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_ADAPTATION_LISTENER_H_
+#define CALL_ADAPTATION_ADAPTATION_LISTENER_H_
+
+#include "api/adaptation/resource.h"
+#include "api/scoped_refptr.h"
+#include "call/adaptation/video_source_restrictions.h"
+#include "call/adaptation/video_stream_input_state.h"
+
+namespace webrtc {
+
+// TODO(hbos): Can this be consolidated with
+// ResourceAdaptationProcessorListener::OnVideoSourceRestrictionsUpdated()? Both
+// listen to adaptations being applied, but on different layers with different
+// arguments.
+class AdaptationListener {
+ public:
+ virtual ~AdaptationListener();
+
+ // TODO(https://crbug.com/webrtc/11172): When we have multi-stream adaptation
+ // support, this interface needs to indicate which stream the adaptation
+ // applies to.
+ virtual void OnAdaptationApplied(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) = 0;
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_ADAPTATION_LISTENER_H_
diff --git a/call/adaptation/broadcast_resource_listener.cc b/call/adaptation/broadcast_resource_listener.cc
new file mode 100644
index 0000000000..59bd1e0c7f
--- /dev/null
+++ b/call/adaptation/broadcast_resource_listener.cc
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/broadcast_resource_listener.h"
+
+#include <algorithm>
+#include <string>
+#include <utility>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+
+// The AdapterResource redirects resource usage measurements from its parent to
+// a single ResourceListener.
+class BroadcastResourceListener::AdapterResource : public Resource {
+ public:
+ explicit AdapterResource(std::string name) : name_(std::move(name)) {}
+ ~AdapterResource() override { RTC_DCHECK(!listener_); }
+
+ // The parent is letting us know we have a usage neasurement.
+ void OnResourceUsageStateMeasured(ResourceUsageState usage_state) {
+ MutexLock lock(&lock_);
+ if (!listener_)
+ return;
+ listener_->OnResourceUsageStateMeasured(this, usage_state);
+ }
+
+ // Resource implementation.
+ std::string Name() const override { return name_; }
+ void SetResourceListener(ResourceListener* listener) override {
+ MutexLock lock(&lock_);
+ RTC_DCHECK(!listener_ || !listener);
+ listener_ = listener;
+ }
+
+ private:
+ const std::string name_;
+ Mutex lock_;
+ ResourceListener* listener_ RTC_GUARDED_BY(lock_) = nullptr;
+};
+
+BroadcastResourceListener::BroadcastResourceListener(
+ rtc::scoped_refptr<Resource> source_resource)
+ : source_resource_(source_resource), is_listening_(false) {
+ RTC_DCHECK(source_resource_);
+}
+
+BroadcastResourceListener::~BroadcastResourceListener() {
+ RTC_DCHECK(!is_listening_);
+}
+
+rtc::scoped_refptr<Resource> BroadcastResourceListener::SourceResource() const {
+ return source_resource_;
+}
+
+void BroadcastResourceListener::StartListening() {
+ MutexLock lock(&lock_);
+ RTC_DCHECK(!is_listening_);
+ source_resource_->SetResourceListener(this);
+ is_listening_ = true;
+}
+
+void BroadcastResourceListener::StopListening() {
+ MutexLock lock(&lock_);
+ RTC_DCHECK(is_listening_);
+ RTC_DCHECK(adapters_.empty());
+ source_resource_->SetResourceListener(nullptr);
+ is_listening_ = false;
+}
+
+rtc::scoped_refptr<Resource>
+BroadcastResourceListener::CreateAdapterResource() {
+ MutexLock lock(&lock_);
+ RTC_DCHECK(is_listening_);
+ rtc::scoped_refptr<AdapterResource> adapter =
+ new rtc::RefCountedObject<AdapterResource>(source_resource_->Name() +
+ "Adapter");
+ adapters_.push_back(adapter);
+ return adapter;
+}
+
+void BroadcastResourceListener::RemoveAdapterResource(
+ rtc::scoped_refptr<Resource> resource) {
+ MutexLock lock(&lock_);
+ auto it = std::find(adapters_.begin(), adapters_.end(), resource);
+ RTC_DCHECK(it != adapters_.end());
+ adapters_.erase(it);
+}
+
+std::vector<rtc::scoped_refptr<Resource>>
+BroadcastResourceListener::GetAdapterResources() {
+ std::vector<rtc::scoped_refptr<Resource>> resources;
+ MutexLock lock(&lock_);
+ for (const auto& adapter : adapters_) {
+ resources.push_back(adapter);
+ }
+ return resources;
+}
+
+void BroadcastResourceListener::OnResourceUsageStateMeasured(
+ rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ RTC_DCHECK_EQ(resource, source_resource_);
+ MutexLock lock(&lock_);
+ for (const auto& adapter : adapters_) {
+ adapter->OnResourceUsageStateMeasured(usage_state);
+ }
+}
+
+} // namespace webrtc
diff --git a/call/adaptation/broadcast_resource_listener.h b/call/adaptation/broadcast_resource_listener.h
new file mode 100644
index 0000000000..2c5a5c703b
--- /dev/null
+++ b/call/adaptation/broadcast_resource_listener.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_
+#define CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_
+
+#include <vector>
+
+#include "api/adaptation/resource.h"
+#include "api/scoped_refptr.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+
+// Responsible for forwarding 1 resource usage measurement to N listeners by
+// creating N "adapter" resources.
+//
+// Example:
+// If we have ResourceA, ResourceListenerX and ResourceListenerY we can create a
+// BroadcastResourceListener that listens to ResourceA, use CreateAdapter() to
+// spawn adapter resources ResourceX and ResourceY and let ResourceListenerX
+// listen to ResourceX and ResourceListenerY listen to ResourceY. When ResourceA
+// makes a measurement it will be echoed by both ResourceX and ResourceY.
+//
+// TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor is
+// moved to call there will only be one ResourceAdaptationProcessor that needs
+// to listen to the injected resources. When this is the case, delete this class
+// and DCHECK that a Resource's listener is never overwritten.
+class BroadcastResourceListener : public ResourceListener {
+ public:
+ explicit BroadcastResourceListener(
+ rtc::scoped_refptr<Resource> source_resource);
+ ~BroadcastResourceListener() override;
+
+ rtc::scoped_refptr<Resource> SourceResource() const;
+ void StartListening();
+ void StopListening();
+
+ // Creates a Resource that redirects any resource usage measurements that
+ // BroadcastResourceListener receives to its listener.
+ rtc::scoped_refptr<Resource> CreateAdapterResource();
+
+ // Unregister the adapter from the BroadcastResourceListener; it will no
+ // longer receive resource usage measurement and will no longer be referenced.
+ // Use this to prevent memory leaks of old adapters.
+ void RemoveAdapterResource(rtc::scoped_refptr<Resource> resource);
+ std::vector<rtc::scoped_refptr<Resource>> GetAdapterResources();
+
+ // ResourceListener implementation.
+ void OnResourceUsageStateMeasured(rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) override;
+
+ private:
+ class AdapterResource;
+ friend class AdapterResource;
+
+ const rtc::scoped_refptr<Resource> source_resource_;
+ Mutex lock_;
+ bool is_listening_ RTC_GUARDED_BY(lock_);
+ // The AdapterResource unregisters itself prior to destruction, guaranteeing
+ // that these pointers are safe to use.
+ std::vector<rtc::scoped_refptr<AdapterResource>> adapters_
+ RTC_GUARDED_BY(lock_);
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_
diff --git a/call/adaptation/broadcast_resource_listener_unittest.cc b/call/adaptation/broadcast_resource_listener_unittest.cc
new file mode 100644
index 0000000000..9cd80500c2
--- /dev/null
+++ b/call/adaptation/broadcast_resource_listener_unittest.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/broadcast_resource_listener.h"
+
+#include "call/adaptation/test/fake_resource.h"
+#include "call/adaptation/test/mock_resource_listener.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+using ::testing::_;
+using ::testing::StrictMock;
+
+TEST(BroadcastResourceListenerTest, CreateAndRemoveAdapterResource) {
+ rtc::scoped_refptr<FakeResource> source_resource =
+ FakeResource::Create("SourceResource");
+ BroadcastResourceListener broadcast_resource_listener(source_resource);
+ broadcast_resource_listener.StartListening();
+
+ EXPECT_TRUE(broadcast_resource_listener.GetAdapterResources().empty());
+ rtc::scoped_refptr<Resource> adapter =
+ broadcast_resource_listener.CreateAdapterResource();
+ StrictMock<MockResourceListener> listener;
+ adapter->SetResourceListener(&listener);
+ EXPECT_EQ(std::vector<rtc::scoped_refptr<Resource>>{adapter},
+ broadcast_resource_listener.GetAdapterResources());
+
+ // The removed adapter is not referenced by the broadcaster.
+ broadcast_resource_listener.RemoveAdapterResource(adapter);
+ EXPECT_TRUE(broadcast_resource_listener.GetAdapterResources().empty());
+ // The removed adapter is not forwarding measurements.
+ EXPECT_CALL(listener, OnResourceUsageStateMeasured(_, _)).Times(0);
+ source_resource->SetUsageState(ResourceUsageState::kOveruse);
+ // Cleanup.
+ adapter->SetResourceListener(nullptr);
+ broadcast_resource_listener.StopListening();
+}
+
+TEST(BroadcastResourceListenerTest, AdapterNameIsBasedOnSourceResourceName) {
+ rtc::scoped_refptr<FakeResource> source_resource =
+ FakeResource::Create("FooBarResource");
+ BroadcastResourceListener broadcast_resource_listener(source_resource);
+ broadcast_resource_listener.StartListening();
+
+ rtc::scoped_refptr<Resource> adapter =
+ broadcast_resource_listener.CreateAdapterResource();
+ EXPECT_EQ("FooBarResourceAdapter", adapter->Name());
+
+ broadcast_resource_listener.RemoveAdapterResource(adapter);
+ broadcast_resource_listener.StopListening();
+}
+
+TEST(BroadcastResourceListenerTest, AdaptersForwardsUsageMeasurements) {
+ rtc::scoped_refptr<FakeResource> source_resource =
+ FakeResource::Create("SourceResource");
+ BroadcastResourceListener broadcast_resource_listener(source_resource);
+ broadcast_resource_listener.StartListening();
+
+ StrictMock<MockResourceListener> destination_listener1;
+ StrictMock<MockResourceListener> destination_listener2;
+ rtc::scoped_refptr<Resource> adapter1 =
+ broadcast_resource_listener.CreateAdapterResource();
+ adapter1->SetResourceListener(&destination_listener1);
+ rtc::scoped_refptr<Resource> adapter2 =
+ broadcast_resource_listener.CreateAdapterResource();
+ adapter2->SetResourceListener(&destination_listener2);
+
+ // Expect kOveruse to be echoed.
+ EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([adapter1](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(adapter1, resource);
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
+ });
+ EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([adapter2](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(adapter2, resource);
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
+ });
+ source_resource->SetUsageState(ResourceUsageState::kOveruse);
+
+ // Expect kUnderuse to be echoed.
+ EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([adapter1](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(adapter1, resource);
+ EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state);
+ });
+ EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([adapter2](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(adapter2, resource);
+ EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state);
+ });
+ source_resource->SetUsageState(ResourceUsageState::kUnderuse);
+
+ // Adapters have to be unregistered before they or the broadcaster is
+ // destroyed, ensuring safe use of raw pointers.
+ adapter1->SetResourceListener(nullptr);
+ adapter2->SetResourceListener(nullptr);
+
+ broadcast_resource_listener.RemoveAdapterResource(adapter1);
+ broadcast_resource_listener.RemoveAdapterResource(adapter2);
+ broadcast_resource_listener.StopListening();
+}
+
+} // namespace webrtc
diff --git a/call/adaptation/degradation_preference_provider.cc b/call/adaptation/degradation_preference_provider.cc
new file mode 100644
index 0000000000..c87e49f366
--- /dev/null
+++ b/call/adaptation/degradation_preference_provider.cc
@@ -0,0 +1,14 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/degradation_preference_provider.h"
+
+webrtc::DegradationPreferenceProvider::~DegradationPreferenceProvider() =
+ default;
diff --git a/call/adaptation/degradation_preference_provider.h b/call/adaptation/degradation_preference_provider.h
new file mode 100644
index 0000000000..035fed1e55
--- /dev/null
+++ b/call/adaptation/degradation_preference_provider.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_DEGRADATION_PREFERENCE_PROVIDER_H_
+#define CALL_ADAPTATION_DEGRADATION_PREFERENCE_PROVIDER_H_
+
+#include "api/rtp_parameters.h"
+
+namespace webrtc {
+
+// Thread-safe retrieval of degradation preferences.
+class DegradationPreferenceProvider {
+ public:
+ virtual ~DegradationPreferenceProvider();
+
+ virtual DegradationPreference degradation_preference() const = 0;
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_DEGRADATION_PREFERENCE_PROVIDER_H_
diff --git a/call/adaptation/resource.cc b/call/adaptation/resource.cc
deleted file mode 100644
index 1f90934258..0000000000
--- a/call/adaptation/resource.cc
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 2019 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "call/adaptation/resource.h"
-
-#include "absl/algorithm/container.h"
-#include "rtc_base/checks.h"
-
-namespace webrtc {
-
-ResourceListener::~ResourceListener() {}
-
-Resource::Resource() : usage_state_(absl::nullopt), listener_(nullptr) {}
-
-Resource::~Resource() {}
-
-void Resource::SetResourceListener(ResourceListener* listener) {
- // If you want to change listener you need to unregister the old listener by
- // setting it to null first.
- RTC_DCHECK(!listener_ || !listener) << "A listener is already set";
- listener_ = listener;
-}
-
-absl::optional<ResourceUsageState> Resource::usage_state() const {
- return usage_state_;
-}
-
-void Resource::ClearUsageState() {
- usage_state_ = absl::nullopt;
-}
-
-bool Resource::IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const {
- return true;
-}
-
-void Resource::OnAdaptationApplied(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) {}
-
-void Resource::OnResourceUsageStateMeasured(ResourceUsageState usage_state) {
- usage_state_ = usage_state;
- if (!listener_)
- return;
- listener_->OnResourceUsageStateMeasured(*this);
-}
-
-} // namespace webrtc
diff --git a/call/adaptation/resource.h b/call/adaptation/resource.h
deleted file mode 100644
index 1f58dc127c..0000000000
--- a/call/adaptation/resource.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright 2019 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef CALL_ADAPTATION_RESOURCE_H_
-#define CALL_ADAPTATION_RESOURCE_H_
-
-#include <string>
-#include <vector>
-
-#include "absl/types/optional.h"
-#include "call/adaptation/video_source_restrictions.h"
-#include "call/adaptation/video_stream_input_state.h"
-
-namespace webrtc {
-
-class Resource;
-
-enum class ResourceUsageState {
- // Action is needed to minimze the load on this resource.
- kOveruse,
- // Increasing the load on this resource is desired, if possible.
- kUnderuse,
-};
-
-class ResourceListener {
- public:
- virtual ~ResourceListener();
-
- // Informs the listener of a new measurement of resource usage. This means
- // that |resource.usage_state()| is now up-to-date.
- virtual void OnResourceUsageStateMeasured(const Resource& resource) = 0;
-};
-
-class Resource {
- public:
- // By default, usage_state() is null until a measurement is made.
- Resource();
- virtual ~Resource();
-
- void SetResourceListener(ResourceListener* listener);
-
- absl::optional<ResourceUsageState> usage_state() const;
- void ClearUsageState();
-
- // This method allows the Resource to reject a proposed adaptation in the "up"
- // direction if it predicts this would cause overuse of this resource. The
- // default implementation unconditionally returns true (= allowed).
- virtual bool IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const;
- virtual void OnAdaptationApplied(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource);
-
- virtual std::string name() const = 0;
-
- protected:
- // Updates the usage state and informs all registered listeners.
- void OnResourceUsageStateMeasured(ResourceUsageState usage_state);
-
- private:
- absl::optional<ResourceUsageState> usage_state_;
- ResourceListener* listener_;
-};
-
-} // namespace webrtc
-
-#endif // CALL_ADAPTATION_RESOURCE_H_
diff --git a/call/adaptation/resource_adaptation_processor.cc b/call/adaptation/resource_adaptation_processor.cc
index 79fb9daab2..b3095ed857 100644
--- a/call/adaptation/resource_adaptation_processor.cc
+++ b/call/adaptation/resource_adaptation_processor.cc
@@ -10,204 +10,304 @@
#include "call/adaptation/resource_adaptation_processor.h"
+#include <algorithm>
+#include <string>
#include <utility>
#include "absl/algorithm/container.h"
+#include "api/video/video_adaptation_counters.h"
+#include "call/adaptation/video_stream_adapter.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/task_utils/to_queued_task.h"
namespace webrtc {
-ResourceAdaptationProcessor::ResourceAdaptationProcessor(
- VideoStreamInputStateProvider* input_state_provider,
- VideoStreamEncoderObserver* encoder_stats_observer)
- : input_state_provider_(input_state_provider),
- encoder_stats_observer_(encoder_stats_observer),
- resources_(),
- degradation_preference_(DegradationPreference::DISABLED),
- effective_degradation_preference_(DegradationPreference::DISABLED),
- is_screenshare_(false),
- stream_adapter_(std::make_unique<VideoStreamAdapter>()),
- last_reported_source_restrictions_(),
- processing_in_progress_(false) {}
-
-ResourceAdaptationProcessor::~ResourceAdaptationProcessor() = default;
+ResourceAdaptationProcessor::ResourceListenerDelegate::ResourceListenerDelegate(
+ ResourceAdaptationProcessor* processor)
+ : resource_adaptation_queue_(nullptr), processor_(processor) {}
-DegradationPreference ResourceAdaptationProcessor::degradation_preference()
- const {
- return degradation_preference_;
+void ResourceAdaptationProcessor::ResourceListenerDelegate::
+ SetResourceAdaptationQueue(TaskQueueBase* resource_adaptation_queue) {
+ RTC_DCHECK(!resource_adaptation_queue_);
+ RTC_DCHECK(resource_adaptation_queue);
+ resource_adaptation_queue_ = resource_adaptation_queue;
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
}
-DegradationPreference
-ResourceAdaptationProcessor::effective_degradation_preference() const {
- return effective_degradation_preference_;
+void ResourceAdaptationProcessor::ResourceListenerDelegate::
+ OnProcessorDestroyed() {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ processor_ = nullptr;
}
-void ResourceAdaptationProcessor::StartResourceAdaptation() {
- for (auto* resource : resources_) {
- resource->SetResourceListener(this);
+void ResourceAdaptationProcessor::ResourceListenerDelegate::
+ OnResourceUsageStateMeasured(rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ if (!resource_adaptation_queue_->IsCurrent()) {
+ resource_adaptation_queue_->PostTask(ToQueuedTask(
+ [this_ref = rtc::scoped_refptr<ResourceListenerDelegate>(this),
+ resource, usage_state] {
+ this_ref->OnResourceUsageStateMeasured(resource, usage_state);
+ }));
+ return;
+ }
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ if (processor_) {
+ processor_->OnResourceUsageStateMeasured(resource, usage_state);
}
}
-void ResourceAdaptationProcessor::StopResourceAdaptation() {
- for (auto* resource : resources_) {
- resource->SetResourceListener(nullptr);
- }
+ResourceAdaptationProcessor::MitigationResultAndLogMessage::
+ MitigationResultAndLogMessage()
+ : result(MitigationResult::kAdaptationApplied), message() {}
+
+ResourceAdaptationProcessor::MitigationResultAndLogMessage::
+ MitigationResultAndLogMessage(MitigationResult result, std::string message)
+ : result(result), message(std::move(message)) {}
+
+ResourceAdaptationProcessor::ResourceAdaptationProcessor(
+ VideoStreamEncoderObserver* encoder_stats_observer,
+ VideoStreamAdapter* stream_adapter)
+ : resource_adaptation_queue_(nullptr),
+ resource_listener_delegate_(
+ new rtc::RefCountedObject<ResourceListenerDelegate>(this)),
+ encoder_stats_observer_(encoder_stats_observer),
+ resources_(),
+ stream_adapter_(stream_adapter),
+ last_reported_source_restrictions_(),
+ previous_mitigation_results_(),
+ processing_in_progress_(false) {
+ RTC_DCHECK(stream_adapter_);
}
-void ResourceAdaptationProcessor::AddAdaptationListener(
- ResourceAdaptationProcessorListener* adaptation_listener) {
- adaptation_listeners_.push_back(adaptation_listener);
+ResourceAdaptationProcessor::~ResourceAdaptationProcessor() {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_DCHECK(resources_.empty())
+ << "There are resource(s) attached to a ResourceAdaptationProcessor "
+ << "being destroyed.";
+ stream_adapter_->RemoveRestrictionsListener(this);
+ resource_listener_delegate_->OnProcessorDestroyed();
}
-void ResourceAdaptationProcessor::AddResource(Resource* resource) {
- resources_.push_back(resource);
+void ResourceAdaptationProcessor::SetResourceAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) {
+ RTC_DCHECK(!resource_adaptation_queue_);
+ RTC_DCHECK(resource_adaptation_queue);
+ resource_adaptation_queue_ = resource_adaptation_queue;
+ resource_listener_delegate_->SetResourceAdaptationQueue(
+ resource_adaptation_queue);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ // Now that we have the adaptation queue we can attach as adaptation listener.
+ stream_adapter_->AddRestrictionsListener(this);
}
-void ResourceAdaptationProcessor::SetDegradationPreference(
- DegradationPreference degradation_preference) {
- degradation_preference_ = degradation_preference;
- MaybeUpdateEffectiveDegradationPreference();
+void ResourceAdaptationProcessor::AddResourceLimitationsListener(
+ ResourceLimitationsListener* limitations_listener) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_DCHECK(std::find(resource_limitations_listeners_.begin(),
+ resource_limitations_listeners_.end(),
+ limitations_listener) ==
+ resource_limitations_listeners_.end());
+ resource_limitations_listeners_.push_back(limitations_listener);
+}
+void ResourceAdaptationProcessor::RemoveResourceLimitationsListener(
+ ResourceLimitationsListener* limitations_listener) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ auto it =
+ std::find(resource_limitations_listeners_.begin(),
+ resource_limitations_listeners_.end(), limitations_listener);
+ RTC_DCHECK(it != resource_limitations_listeners_.end());
+ resource_limitations_listeners_.erase(it);
}
-void ResourceAdaptationProcessor::SetIsScreenshare(bool is_screenshare) {
- is_screenshare_ = is_screenshare;
- MaybeUpdateEffectiveDegradationPreference();
+void ResourceAdaptationProcessor::AddResource(
+ rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK(resource);
+ {
+ MutexLock crit(&resources_lock_);
+ RTC_DCHECK(absl::c_find(resources_, resource) == resources_.end())
+ << "Resource \"" << resource->Name() << "\" was already registered.";
+ resources_.push_back(resource);
+ }
+ resource->SetResourceListener(resource_listener_delegate_);
}
-void ResourceAdaptationProcessor::MaybeUpdateEffectiveDegradationPreference() {
- effective_degradation_preference_ =
- (is_screenshare_ &&
- degradation_preference_ == DegradationPreference::BALANCED)
- ? DegradationPreference::MAINTAIN_RESOLUTION
- : degradation_preference_;
- stream_adapter_->SetDegradationPreference(effective_degradation_preference_);
- MaybeUpdateVideoSourceRestrictions(nullptr);
+std::vector<rtc::scoped_refptr<Resource>>
+ResourceAdaptationProcessor::GetResources() const {
+ MutexLock crit(&resources_lock_);
+ return resources_;
}
-void ResourceAdaptationProcessor::ResetVideoSourceRestrictions() {
- stream_adapter_->ClearRestrictions();
- adaptations_counts_by_resource_.clear();
- MaybeUpdateVideoSourceRestrictions(nullptr);
+void ResourceAdaptationProcessor::RemoveResource(
+ rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK(resource);
+ RTC_LOG(INFO) << "Removing resource \"" << resource->Name() << "\".";
+ resource->SetResourceListener(nullptr);
+ {
+ MutexLock crit(&resources_lock_);
+ auto it = absl::c_find(resources_, resource);
+ RTC_DCHECK(it != resources_.end()) << "Resource \"" << resource->Name()
+ << "\" was not a registered resource.";
+ resources_.erase(it);
+ }
+ RemoveLimitationsImposedByResource(std::move(resource));
}
-void ResourceAdaptationProcessor::MaybeUpdateVideoSourceRestrictions(
- const Resource* reason) {
- VideoSourceRestrictions new_source_restrictions =
- FilterRestrictionsByDegradationPreference(
- stream_adapter_->source_restrictions(),
- effective_degradation_preference_);
- if (last_reported_source_restrictions_ != new_source_restrictions) {
- last_reported_source_restrictions_ = std::move(new_source_restrictions);
- for (auto* adaptation_listener : adaptation_listeners_) {
- adaptation_listener->OnVideoSourceRestrictionsUpdated(
- last_reported_source_restrictions_,
- stream_adapter_->adaptation_counters(), reason);
+void ResourceAdaptationProcessor::RemoveLimitationsImposedByResource(
+ rtc::scoped_refptr<Resource> resource) {
+ if (!resource_adaptation_queue_->IsCurrent()) {
+ resource_adaptation_queue_->PostTask(ToQueuedTask(
+ [this, resource]() { RemoveLimitationsImposedByResource(resource); }));
+ return;
+ }
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ auto resource_adaptation_limits =
+ adaptation_limits_by_resources_.find(resource);
+ if (resource_adaptation_limits != adaptation_limits_by_resources_.end()) {
+ VideoStreamAdapter::RestrictionsWithCounters adaptation_limits =
+ resource_adaptation_limits->second;
+ adaptation_limits_by_resources_.erase(resource_adaptation_limits);
+ if (adaptation_limits_by_resources_.empty()) {
+ // Only the resource being removed was adapted so clear restrictions.
+ stream_adapter_->ClearRestrictions();
+ return;
}
- if (reason) {
- UpdateResourceDegradationCounts(reason);
+
+ VideoStreamAdapter::RestrictionsWithCounters most_limited =
+ FindMostLimitedResources().second;
+
+ if (adaptation_limits.counters.Total() <= most_limited.counters.Total()) {
+ // The removed limitations were less limited than the most limited
+ // resource. Don't change the current restrictions.
+ return;
}
+
+ // Apply the new most limited resource as the next restrictions.
+ Adaptation adapt_to = stream_adapter_->GetAdaptationTo(
+ most_limited.counters, most_limited.restrictions);
+ RTC_DCHECK_EQ(adapt_to.status(), Adaptation::Status::kValid);
+ stream_adapter_->ApplyAdaptation(adapt_to, nullptr);
+
+ RTC_LOG(INFO) << "Most limited resource removed. Restoring restrictions to "
+ "next most limited restrictions: "
+ << most_limited.restrictions.ToString() << " with counters "
+ << most_limited.counters.ToString();
}
}
void ResourceAdaptationProcessor::OnResourceUsageStateMeasured(
- const Resource& resource) {
- RTC_DCHECK(resource.usage_state().has_value());
- switch (resource.usage_state().value()) {
+ rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_DCHECK(resource);
+ // |resource| could have been removed after signalling.
+ {
+ MutexLock crit(&resources_lock_);
+ if (absl::c_find(resources_, resource) == resources_.end()) {
+ RTC_LOG(INFO) << "Ignoring signal from removed resource \""
+ << resource->Name() << "\".";
+ return;
+ }
+ }
+ MitigationResultAndLogMessage result_and_message;
+ switch (usage_state) {
case ResourceUsageState::kOveruse:
- OnResourceOveruse(resource);
+ result_and_message = OnResourceOveruse(resource);
break;
case ResourceUsageState::kUnderuse:
- OnResourceUnderuse(resource);
+ result_and_message = OnResourceUnderuse(resource);
break;
}
+ // Maybe log the result of the operation.
+ auto it = previous_mitigation_results_.find(resource.get());
+ if (it != previous_mitigation_results_.end() &&
+ it->second == result_and_message.result) {
+ // This resource has previously reported the same result and we haven't
+ // successfully adapted since - don't log to avoid spam.
+ return;
+ }
+ RTC_LOG(INFO) << "Resource \"" << resource->Name() << "\" signalled "
+ << ResourceUsageStateToString(usage_state) << ". "
+ << result_and_message.message;
+ if (result_and_message.result == MitigationResult::kAdaptationApplied) {
+ previous_mitigation_results_.clear();
+ } else {
+ previous_mitigation_results_.insert(
+ std::make_pair(resource.get(), result_and_message.result));
+ }
}
-bool ResourceAdaptationProcessor::HasSufficientInputForAdaptation(
- const VideoStreamInputState& input_state) const {
- return input_state.HasInputFrameSizeAndFramesPerSecond() &&
- (effective_degradation_preference_ !=
- DegradationPreference::MAINTAIN_RESOLUTION ||
- input_state.frames_per_second() >= kMinFrameRateFps);
-}
-
-void ResourceAdaptationProcessor::OnResourceUnderuse(
- const Resource& reason_resource) {
+ResourceAdaptationProcessor::MitigationResultAndLogMessage
+ResourceAdaptationProcessor::OnResourceUnderuse(
+ rtc::scoped_refptr<Resource> reason_resource) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
RTC_DCHECK(!processing_in_progress_);
processing_in_progress_ = true;
- // Clear all usage states. In order to re-run adaptation logic, resources need
- // to provide new resource usage measurements.
- // TODO(hbos): Support not unconditionally clearing usage states by having the
- // ResourceAdaptationProcessor check in on its resources at certain intervals.
- for (Resource* resource : resources_) {
- resource->ClearUsageState();
- }
- VideoStreamInputState input_state = input_state_provider_->InputState();
- if (effective_degradation_preference_ == DegradationPreference::DISABLED ||
- !HasSufficientInputForAdaptation(input_state)) {
- processing_in_progress_ = false;
- return;
- }
- if (!IsResourceAllowedToAdaptUp(&reason_resource)) {
- processing_in_progress_ = false;
- return;
- }
- // Update video input states and encoder settings for accurate adaptation.
- stream_adapter_->SetInput(input_state);
// How can this stream be adapted up?
- Adaptation adaptation = stream_adapter_->GetAdaptationUp();
+ Adaptation adaptation = stream_adapter_->GetAdaptationUp(reason_resource);
if (adaptation.status() != Adaptation::Status::kValid) {
processing_in_progress_ = false;
- return;
+ rtc::StringBuilder message;
+ message << "Not adapting up because VideoStreamAdapter returned "
+ << Adaptation::StatusToString(adaptation.status());
+ return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter,
+ message.Release());
}
- // Are all resources OK with this adaptation being applied?
- VideoSourceRestrictions restrictions_before =
- stream_adapter_->source_restrictions();
- VideoSourceRestrictions restrictions_after =
- stream_adapter_->PeekNextRestrictions(adaptation);
- if (!absl::c_all_of(resources_, [&input_state, &restrictions_before,
- &restrictions_after,
- &reason_resource](const Resource* resource) {
- return resource->IsAdaptationUpAllowed(input_state, restrictions_before,
- restrictions_after,
- reason_resource);
- })) {
- processing_in_progress_ = false;
- return;
+ // Check that resource is most limited.
+ std::vector<rtc::scoped_refptr<Resource>> most_limited_resources;
+ VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions;
+ std::tie(most_limited_resources, most_limited_restrictions) =
+ FindMostLimitedResources();
+
+ // If the most restricted resource is less limited than current restrictions
+ // then proceed with adapting up.
+ if (!most_limited_resources.empty() &&
+ most_limited_restrictions.counters.Total() >=
+ stream_adapter_->adaptation_counters().Total()) {
+ // If |reason_resource| is not one of the most limiting resources then abort
+ // adaptation.
+ if (absl::c_find(most_limited_resources, reason_resource) ==
+ most_limited_resources.end()) {
+ processing_in_progress_ = false;
+ rtc::StringBuilder message;
+ message << "Resource \"" << reason_resource->Name()
+ << "\" was not the most limited resource.";
+ return MitigationResultAndLogMessage(
+ MitigationResult::kNotMostLimitedResource, message.Release());
+ }
+
+ if (most_limited_resources.size() > 1) {
+ // If there are multiple most limited resources, all must signal underuse
+ // before the adaptation is applied.
+ UpdateResourceLimitations(reason_resource, adaptation.restrictions(),
+ adaptation.counters());
+ processing_in_progress_ = false;
+ rtc::StringBuilder message;
+ message << "Resource \"" << reason_resource->Name()
+ << "\" was not the only most limited resource.";
+ return MitigationResultAndLogMessage(
+ MitigationResult::kSharedMostLimitedResource, message.Release());
+ }
}
// Apply adaptation.
- stream_adapter_->ApplyAdaptation(adaptation);
- for (Resource* resource : resources_) {
- resource->OnAdaptationApplied(input_state, restrictions_before,
- restrictions_after, reason_resource);
- }
- // Update VideoSourceRestrictions based on adaptation. This also informs the
- // |adaptation_listeners_|.
- MaybeUpdateVideoSourceRestrictions(&reason_resource);
+ stream_adapter_->ApplyAdaptation(adaptation, reason_resource);
processing_in_progress_ = false;
+ rtc::StringBuilder message;
+ message << "Adapted up successfully. Unfiltered adaptations: "
+ << stream_adapter_->adaptation_counters().ToString();
+ return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied,
+ message.Release());
}
-void ResourceAdaptationProcessor::OnResourceOveruse(
- const Resource& reason_resource) {
+ResourceAdaptationProcessor::MitigationResultAndLogMessage
+ResourceAdaptationProcessor::OnResourceOveruse(
+ rtc::scoped_refptr<Resource> reason_resource) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
RTC_DCHECK(!processing_in_progress_);
processing_in_progress_ = true;
- // Clear all usage states. In order to re-run adaptation logic, resources need
- // to provide new resource usage measurements.
- // TODO(hbos): Support not unconditionally clearing usage states by having the
- // ResourceAdaptationProcessor check in on its resources at certain intervals.
- for (Resource* resource : resources_) {
- resource->ClearUsageState();
- }
- VideoStreamInputState input_state = input_state_provider_->InputState();
- if (!input_state.has_input()) {
- processing_in_progress_ = false;
- return;
- }
- if (effective_degradation_preference_ == DegradationPreference::DISABLED ||
- !HasSufficientInputForAdaptation(input_state)) {
- processing_in_progress_ = false;
- return;
- }
- // Update video input states and encoder settings for accurate adaptation.
- stream_adapter_->SetInput(input_state);
// How can this stream be adapted up?
Adaptation adaptation = stream_adapter_->GetAdaptationDown();
if (adaptation.min_pixel_limit_reached()) {
@@ -215,61 +315,87 @@ void ResourceAdaptationProcessor::OnResourceOveruse(
}
if (adaptation.status() != Adaptation::Status::kValid) {
processing_in_progress_ = false;
- return;
+ rtc::StringBuilder message;
+ message << "Not adapting down because VideoStreamAdapter returned "
+ << Adaptation::StatusToString(adaptation.status());
+ return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter,
+ message.Release());
}
// Apply adaptation.
- VideoSourceRestrictions restrictions_before =
- stream_adapter_->source_restrictions();
- VideoSourceRestrictions restrictions_after =
- stream_adapter_->PeekNextRestrictions(adaptation);
- stream_adapter_->ApplyAdaptation(adaptation);
- for (Resource* resource : resources_) {
- resource->OnAdaptationApplied(input_state, restrictions_before,
- restrictions_after, reason_resource);
- }
- // Update VideoSourceRestrictions based on adaptation. This also informs the
- // |adaptation_listeners_|.
- MaybeUpdateVideoSourceRestrictions(&reason_resource);
+ UpdateResourceLimitations(reason_resource, adaptation.restrictions(),
+ adaptation.counters());
+ stream_adapter_->ApplyAdaptation(adaptation, reason_resource);
processing_in_progress_ = false;
+ rtc::StringBuilder message;
+ message << "Adapted down successfully. Unfiltered adaptations: "
+ << stream_adapter_->adaptation_counters().ToString();
+ return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied,
+ message.Release());
}
-void ResourceAdaptationProcessor::TriggerAdaptationDueToFrameDroppedDueToSize(
- const Resource& reason_resource) {
- VideoAdaptationCounters counters_before =
- stream_adapter_->adaptation_counters();
- OnResourceOveruse(reason_resource);
- if (degradation_preference_ == DegradationPreference::BALANCED &&
- stream_adapter_->adaptation_counters().fps_adaptations >
- counters_before.fps_adaptations) {
- // Oops, we adapted frame rate. Adapt again, maybe it will adapt resolution!
- // Though this is not guaranteed...
- OnResourceOveruse(reason_resource);
- }
- if (stream_adapter_->adaptation_counters().resolution_adaptations >
- counters_before.resolution_adaptations) {
- encoder_stats_observer_->OnInitialQualityResolutionAdaptDown();
+std::pair<std::vector<rtc::scoped_refptr<Resource>>,
+ VideoStreamAdapter::RestrictionsWithCounters>
+ResourceAdaptationProcessor::FindMostLimitedResources() const {
+ std::vector<rtc::scoped_refptr<Resource>> most_limited_resources;
+ VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions{
+ VideoSourceRestrictions(), VideoAdaptationCounters()};
+
+ for (const auto& resource_and_adaptation_limit_ :
+ adaptation_limits_by_resources_) {
+ const auto& restrictions_with_counters =
+ resource_and_adaptation_limit_.second;
+ if (restrictions_with_counters.counters.Total() >
+ most_limited_restrictions.counters.Total()) {
+ most_limited_restrictions = restrictions_with_counters;
+ most_limited_resources.clear();
+ most_limited_resources.push_back(resource_and_adaptation_limit_.first);
+ } else if (most_limited_restrictions.counters ==
+ restrictions_with_counters.counters) {
+ most_limited_resources.push_back(resource_and_adaptation_limit_.first);
+ }
}
+ return std::make_pair(std::move(most_limited_resources),
+ most_limited_restrictions);
}
-void ResourceAdaptationProcessor::UpdateResourceDegradationCounts(
- const Resource* resource) {
- RTC_DCHECK(resource);
- int delta = stream_adapter_->adaptation_counters().Total();
- for (const auto& adaptations : adaptations_counts_by_resource_) {
- delta -= adaptations.second;
+void ResourceAdaptationProcessor::UpdateResourceLimitations(
+ rtc::scoped_refptr<Resource> reason_resource,
+ const VideoSourceRestrictions& restrictions,
+ const VideoAdaptationCounters& counters) {
+ auto& adaptation_limits = adaptation_limits_by_resources_[reason_resource];
+ if (adaptation_limits.restrictions == restrictions &&
+ adaptation_limits.counters == counters) {
+ return;
}
+ adaptation_limits = {restrictions, counters};
- // Default value is 0, inserts the value if missing.
- adaptations_counts_by_resource_[resource] += delta;
- RTC_DCHECK_GE(adaptations_counts_by_resource_[resource], 0);
+ std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters> limitations;
+ for (const auto& p : adaptation_limits_by_resources_) {
+ limitations.insert(std::make_pair(p.first, p.second.counters));
+ }
+ for (auto limitations_listener : resource_limitations_listeners_) {
+ limitations_listener->OnResourceLimitationChanged(reason_resource,
+ limitations);
+ }
}
-bool ResourceAdaptationProcessor::IsResourceAllowedToAdaptUp(
- const Resource* resource) const {
- RTC_DCHECK(resource);
- const auto& adaptations = adaptations_counts_by_resource_.find(resource);
- return adaptations != adaptations_counts_by_resource_.end() &&
- adaptations->second > 0;
+void ResourceAdaptationProcessor::OnVideoSourceRestrictionsUpdated(
+ VideoSourceRestrictions restrictions,
+ const VideoAdaptationCounters& adaptation_counters,
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ if (reason) {
+ UpdateResourceLimitations(reason, unfiltered_restrictions,
+ adaptation_counters);
+ } else if (adaptation_counters.Total() == 0) {
+ // Adaptations are cleared.
+ adaptation_limits_by_resources_.clear();
+ previous_mitigation_results_.clear();
+ for (auto limitations_listener : resource_limitations_listeners_) {
+ limitations_listener->OnResourceLimitationChanged(nullptr, {});
+ }
+ }
}
} // namespace webrtc
diff --git a/call/adaptation/resource_adaptation_processor.h b/call/adaptation/resource_adaptation_processor.h
index de38751399..7ba871e104 100644
--- a/call/adaptation/resource_adaptation_processor.h
+++ b/call/adaptation/resource_adaptation_processor.h
@@ -13,13 +13,18 @@
#include <map>
#include <memory>
+#include <string>
+#include <utility>
#include <vector>
#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
#include "api/rtp_parameters.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/video/video_adaptation_counters.h"
#include "api/video/video_frame.h"
#include "api/video/video_stream_encoder_observer.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
#include "call/adaptation/video_source_restrictions.h"
#include "call/adaptation/video_stream_adapter.h"
@@ -28,80 +33,141 @@
namespace webrtc {
+// The Resource Adaptation Processor is responsible for reacting to resource
+// usage measurements (e.g. overusing or underusing CPU). When a resource is
+// overused the Processor is responsible for performing mitigations in order to
+// consume less resources.
+//
+// Today we have one Processor per VideoStreamEncoder and the Processor is only
+// capable of restricting resolution or frame rate of the encoded stream. In the
+// future we should have a single Processor responsible for all encoded streams,
+// and it should be capable of reconfiguring other things than just
+// VideoSourceRestrictions (e.g. reduce render frame rate).
+// See Resource-Adaptation hotlist:
+// https://bugs.chromium.org/u/590058293/hotlists/Resource-Adaptation
+//
+// The ResourceAdaptationProcessor is single-threaded. It may be constructed on
+// any thread but MUST subsequently be used and destroyed on a single sequence,
+// i.e. the "resource adaptation task queue". Resources can be added and removed
+// from any thread.
class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
+ public VideoSourceRestrictionsListener,
public ResourceListener {
public:
ResourceAdaptationProcessor(
- VideoStreamInputStateProvider* input_state_provider,
- VideoStreamEncoderObserver* encoder_stats_observer);
+ VideoStreamEncoderObserver* encoder_stats_observer,
+ VideoStreamAdapter* video_stream_adapter);
~ResourceAdaptationProcessor() override;
- // ResourceAdaptationProcessorInterface implementation.
- DegradationPreference degradation_preference() const override;
- DegradationPreference effective_degradation_preference() const override;
-
- void StartResourceAdaptation() override;
- void StopResourceAdaptation() override;
- void AddAdaptationListener(
- ResourceAdaptationProcessorListener* adaptation_listener) override;
- void AddResource(Resource* resource) override;
+ void SetResourceAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) override;
- void SetDegradationPreference(
- DegradationPreference degradation_preference) override;
- void SetIsScreenshare(bool is_screenshare) override;
- void ResetVideoSourceRestrictions() override;
+ // ResourceAdaptationProcessorInterface implementation.
+ void AddResourceLimitationsListener(
+ ResourceLimitationsListener* limitations_listener) override;
+ void RemoveResourceLimitationsListener(
+ ResourceLimitationsListener* limitations_listener) override;
+ void AddResource(rtc::scoped_refptr<Resource> resource) override;
+ std::vector<rtc::scoped_refptr<Resource>> GetResources() const override;
+ void RemoveResource(rtc::scoped_refptr<Resource> resource) override;
// ResourceListener implementation.
// Triggers OnResourceUnderuse() or OnResourceOveruse().
- void OnResourceUsageStateMeasured(const Resource& resource) override;
+ void OnResourceUsageStateMeasured(rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) override;
- // May trigger 1-2 adaptations. It is meant to reduce resolution but this is
- // not guaranteed. It may adapt frame rate, which does not address the issue.
- // TODO(hbos): Can we get rid of this?
- void TriggerAdaptationDueToFrameDroppedDueToSize(
- const Resource& reason_resource) override;
+ // VideoSourceRestrictionsListener implementation.
+ void OnVideoSourceRestrictionsUpdated(
+ VideoSourceRestrictions restrictions,
+ const VideoAdaptationCounters& adaptation_counters,
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) override;
private:
- bool HasSufficientInputForAdaptation(
- const VideoStreamInputState& input_state) const;
+ // If resource usage measurements happens off the adaptation task queue, this
+ // class takes care of posting the measurement for the processor to handle it
+ // on the adaptation task queue.
+ class ResourceListenerDelegate : public rtc::RefCountInterface,
+ public ResourceListener {
+ public:
+ explicit ResourceListenerDelegate(ResourceAdaptationProcessor* processor);
+
+ void SetResourceAdaptationQueue(TaskQueueBase* resource_adaptation_queue);
+ void OnProcessorDestroyed();
+
+ // ResourceListener implementation.
+ void OnResourceUsageStateMeasured(rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) override;
+
+ private:
+ TaskQueueBase* resource_adaptation_queue_;
+ ResourceAdaptationProcessor* processor_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ };
+
+ enum class MitigationResult {
+ kNotMostLimitedResource,
+ kSharedMostLimitedResource,
+ kRejectedByAdapter,
+ kAdaptationApplied,
+ };
+
+ struct MitigationResultAndLogMessage {
+ MitigationResultAndLogMessage();
+ MitigationResultAndLogMessage(MitigationResult result, std::string message);
+ MitigationResult result;
+ std::string message;
+ };
// Performs the adaptation by getting the next target, applying it and
// informing listeners of the new VideoSourceRestriction and adaptation
// counters.
- void OnResourceUnderuse(const Resource& reason_resource);
- void OnResourceOveruse(const Resource& reason_resource);
-
- // Needs to be invoked any time |degradation_preference_| or |is_screenshare_|
- // changes to ensure |effective_degradation_preference_| is up-to-date.
- void MaybeUpdateEffectiveDegradationPreference();
- // If the filtered source restrictions are different than
- // |last_reported_source_restrictions_|, inform the listeners.
- void MaybeUpdateVideoSourceRestrictions(const Resource* reason);
- // Updates the number of times the resource has degraded based on the latest
- // degradation applied.
- void UpdateResourceDegradationCounts(const Resource* resource);
- // Returns true if a Resource has been overused in the pass and is responsible
- // for creating a VideoSourceRestriction. The current algorithm counts the
- // number of times the resource caused an adaptation and allows adapting up
- // if that number is non-zero. This is consistent with how adaptation has
- // traditionally been handled.
- // TODO(crbug.com/webrtc/11553) Change this algorithm to look at the resources
- // restrictions rather than just the counters.
- bool IsResourceAllowedToAdaptUp(const Resource* resource) const;
-
+ MitigationResultAndLogMessage OnResourceUnderuse(
+ rtc::scoped_refptr<Resource> reason_resource);
+ MitigationResultAndLogMessage OnResourceOveruse(
+ rtc::scoped_refptr<Resource> reason_resource);
+
+ void UpdateResourceLimitations(rtc::scoped_refptr<Resource> reason_resource,
+ const VideoSourceRestrictions& restrictions,
+ const VideoAdaptationCounters& counters)
+ RTC_RUN_ON(resource_adaptation_queue_);
+
+ // Searches |adaptation_limits_by_resources_| for each resource with the
+ // highest total adaptation counts. Adaptation up may only occur if the
+ // resource performing the adaptation is the only most limited resource. This
+ // function returns the list of all most limited resources as well as the
+ // corresponding adaptation of that resource.
+ std::pair<std::vector<rtc::scoped_refptr<Resource>>,
+ VideoStreamAdapter::RestrictionsWithCounters>
+ FindMostLimitedResources() const RTC_RUN_ON(resource_adaptation_queue_);
+
+ void RemoveLimitationsImposedByResource(
+ rtc::scoped_refptr<Resource> resource);
+
+ TaskQueueBase* resource_adaptation_queue_;
+ rtc::scoped_refptr<ResourceListenerDelegate> resource_listener_delegate_;
// Input and output.
- VideoStreamInputStateProvider* const input_state_provider_;
- VideoStreamEncoderObserver* const encoder_stats_observer_;
- std::vector<ResourceAdaptationProcessorListener*> adaptation_listeners_;
- std::vector<Resource*> resources_;
- std::map<const Resource*, int> adaptations_counts_by_resource_;
- // Adaptation strategy settings.
- DegradationPreference degradation_preference_;
- DegradationPreference effective_degradation_preference_;
- bool is_screenshare_;
+ VideoStreamEncoderObserver* const encoder_stats_observer_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ mutable Mutex resources_lock_;
+ std::vector<rtc::scoped_refptr<Resource>> resources_
+ RTC_GUARDED_BY(resources_lock_);
+ std::vector<ResourceLimitationsListener*> resource_limitations_listeners_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ // Purely used for statistics, does not ensure mapped resources stay alive.
+ std::map<rtc::scoped_refptr<Resource>,
+ VideoStreamAdapter::RestrictionsWithCounters>
+ adaptation_limits_by_resources_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
// Responsible for generating and applying possible adaptations.
- const std::unique_ptr<VideoStreamAdapter> stream_adapter_;
- VideoSourceRestrictions last_reported_source_restrictions_;
+ VideoStreamAdapter* const stream_adapter_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ VideoSourceRestrictions last_reported_source_restrictions_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ // Keeps track of previous mitigation results per resource since the last
+ // successful adaptation. Used to avoid RTC_LOG spam.
+ std::map<Resource*, MitigationResult> previous_mitigation_results_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
// Prevents recursion.
//
// This is used to prevent triggering resource adaptation in the process of
@@ -113,7 +179,7 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
// Resource::OnAdaptationApplied() ->
// Resource::OnResourceUsageStateMeasured() ->
// ResourceAdaptationProcessor::OnResourceOveruse() // Boom, not allowed.
- bool processing_in_progress_;
+ bool processing_in_progress_ RTC_GUARDED_BY(resource_adaptation_queue_);
};
} // namespace webrtc
diff --git a/call/adaptation/resource_adaptation_processor_interface.cc b/call/adaptation/resource_adaptation_processor_interface.cc
index 4e5251ce90..79f099b267 100644
--- a/call/adaptation/resource_adaptation_processor_interface.cc
+++ b/call/adaptation/resource_adaptation_processor_interface.cc
@@ -12,8 +12,9 @@
namespace webrtc {
-ResourceAdaptationProcessorListener::~ResourceAdaptationProcessorListener() {}
+ResourceAdaptationProcessorInterface::~ResourceAdaptationProcessorInterface() =
+ default;
-ResourceAdaptationProcessorInterface::~ResourceAdaptationProcessorInterface() {}
+ResourceLimitationsListener::~ResourceLimitationsListener() = default;
} // namespace webrtc
diff --git a/call/adaptation/resource_adaptation_processor_interface.h b/call/adaptation/resource_adaptation_processor_interface.h
index 6984273a29..6b9afccf3f 100644
--- a/call/adaptation/resource_adaptation_processor_interface.h
+++ b/call/adaptation/resource_adaptation_processor_interface.h
@@ -11,70 +11,59 @@
#ifndef CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_INTERFACE_H_
#define CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_INTERFACE_H_
+#include <map>
+#include <vector>
+
#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
#include "api/rtp_parameters.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/task_queue_base.h"
#include "api/video/video_adaptation_counters.h"
#include "api/video/video_frame.h"
+#include "call/adaptation/adaptation_constraint.h"
+#include "call/adaptation/adaptation_listener.h"
#include "call/adaptation/encoder_settings.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/video_source_restrictions.h"
namespace webrtc {
-// The listener is responsible for carrying out the reconfiguration of the video
-// source such that the VideoSourceRestrictions are fulfilled.
-class ResourceAdaptationProcessorListener {
+class ResourceLimitationsListener {
public:
- virtual ~ResourceAdaptationProcessorListener();
+ virtual ~ResourceLimitationsListener();
- // The |restrictions| are filtered by degradation preference but not the
- // |adaptation_counters|, which are currently only reported for legacy stats
- // calculation purposes.
- virtual void OnVideoSourceRestrictionsUpdated(
- VideoSourceRestrictions restrictions,
- const VideoAdaptationCounters& adaptation_counters,
- const Resource* reason) = 0;
+ // The limitations on a resource were changed. This does not mean the current
+ // video restrictions have changed.
+ virtual void OnResourceLimitationChanged(
+ rtc::scoped_refptr<Resource> resource,
+ const std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters>&
+ resource_limitations) = 0;
};
-// Responsible for reconfiguring encoded streams based on resource consumption,
-// such as scaling down resolution or frame rate when CPU is overused. This
-// interface is meant to be injectable into VideoStreamEncoder.
+// The Resource Adaptation Processor is responsible for reacting to resource
+// usage measurements (e.g. overusing or underusing CPU). When a resource is
+// overused the Processor is responsible for performing mitigations in order to
+// consume less resources.
class ResourceAdaptationProcessorInterface {
public:
virtual ~ResourceAdaptationProcessorInterface();
- virtual DegradationPreference degradation_preference() const = 0;
- // Reinterprets "balanced + screenshare" as "maintain-resolution".
- // TODO(hbos): Don't do this. This is not what "balanced" means. If the
- // application wants to maintain resolution it should set that degradation
- // preference rather than depend on non-standard behaviors.
- virtual DegradationPreference effective_degradation_preference() const = 0;
+ virtual void SetResourceAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) = 0;
+ virtual void AddResourceLimitationsListener(
+ ResourceLimitationsListener* limitations_listener) = 0;
+ virtual void RemoveResourceLimitationsListener(
+ ResourceLimitationsListener* limitations_listener) = 0;
// Starts or stops listening to resources, effectively enabling or disabling
- // processing.
+ // processing. May be called from anywhere.
// TODO(https://crbug.com/webrtc/11172): Automatically register and unregister
// with AddResource() and RemoveResource() instead. When the processor is
// multi-stream aware, stream-specific resouces will get added and removed
// over time.
- virtual void StartResourceAdaptation() = 0;
- virtual void StopResourceAdaptation() = 0;
- virtual void AddAdaptationListener(
- ResourceAdaptationProcessorListener* adaptation_listener) = 0;
- virtual void AddResource(Resource* resource) = 0;
-
- virtual void SetDegradationPreference(
- DegradationPreference degradation_preference) = 0;
- virtual void SetIsScreenshare(bool is_screenshare) = 0;
- virtual void ResetVideoSourceRestrictions() = 0;
-
- // May trigger one or more adaptations. It is meant to reduce resolution -
- // useful if a frame was dropped due to its size - however, the implementation
- // may not guarantee this (see resource_adaptation_processor.h).
- // TODO(hbos): This is only part of the interface for backwards-compatiblity
- // reasons. Can we replace this by something which actually satisfies the
- // resolution constraints or get rid of it altogether?
- virtual void TriggerAdaptationDueToFrameDroppedDueToSize(
- const Resource& reason_resource) = 0;
+ virtual void AddResource(rtc::scoped_refptr<Resource> resource) = 0;
+ virtual std::vector<rtc::scoped_refptr<Resource>> GetResources() const = 0;
+ virtual void RemoveResource(rtc::scoped_refptr<Resource> resource) = 0;
};
} // namespace webrtc
diff --git a/call/adaptation/resource_adaptation_processor_unittest.cc b/call/adaptation/resource_adaptation_processor_unittest.cc
index 7e7fe590dc..69b224e711 100644
--- a/call/adaptation/resource_adaptation_processor_unittest.cc
+++ b/call/adaptation/resource_adaptation_processor_unittest.cc
@@ -10,13 +10,19 @@
#include "call/adaptation/resource_adaptation_processor.h"
+#include "api/adaptation/resource.h"
+#include "api/scoped_refptr.h"
#include "api/video/video_adaptation_counters.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
+#include "call/adaptation/test/fake_adaptation_listener.h"
#include "call/adaptation/test/fake_frame_rate_provider.h"
#include "call/adaptation/test/fake_resource.h"
#include "call/adaptation/video_source_restrictions.h"
#include "call/adaptation/video_stream_input_state_provider.h"
+#include "rtc_base/event.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
#include "test/gtest.h"
namespace webrtc {
@@ -25,31 +31,42 @@ namespace {
const int kDefaultFrameRate = 30;
const int kDefaultFrameSize = 1280 * 720;
+const int kDefaultTimeoutMs = 5000;
-class ResourceAdaptationProcessorListenerForTesting
- : public ResourceAdaptationProcessorListener {
+class VideoSourceRestrictionsListenerForTesting
+ : public VideoSourceRestrictionsListener {
public:
- ResourceAdaptationProcessorListenerForTesting()
+ VideoSourceRestrictionsListenerForTesting()
: restrictions_updated_count_(0),
restrictions_(),
adaptation_counters_(),
reason_(nullptr) {}
- ~ResourceAdaptationProcessorListenerForTesting() override {}
+ ~VideoSourceRestrictionsListenerForTesting() override {}
size_t restrictions_updated_count() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
return restrictions_updated_count_;
}
- const VideoSourceRestrictions& restrictions() const { return restrictions_; }
- const VideoAdaptationCounters& adaptation_counters() const {
+ VideoSourceRestrictions restrictions() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return restrictions_;
+ }
+ VideoAdaptationCounters adaptation_counters() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
return adaptation_counters_;
}
- const Resource* reason() const { return reason_; }
+ rtc::scoped_refptr<Resource> reason() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return reason_;
+ }
- // ResourceAdaptationProcessorListener implementation.
+ // VideoSourceRestrictionsListener implementation.
void OnVideoSourceRestrictionsUpdated(
VideoSourceRestrictions restrictions,
const VideoAdaptationCounters& adaptation_counters,
- const Resource* reason) override {
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) override {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
++restrictions_updated_count_;
restrictions_ = restrictions;
adaptation_counters_ = adaptation_counters;
@@ -57,10 +74,12 @@ class ResourceAdaptationProcessorListenerForTesting
}
private:
- size_t restrictions_updated_count_;
- VideoSourceRestrictions restrictions_;
- VideoAdaptationCounters adaptation_counters_;
- const Resource* reason_;
+ SequenceChecker sequence_checker_;
+ size_t restrictions_updated_count_ RTC_GUARDED_BY(&sequence_checker_);
+ VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&sequence_checker_);
+ VideoAdaptationCounters adaptation_counters_
+ RTC_GUARDED_BY(&sequence_checker_);
+ rtc::scoped_refptr<Resource> reason_ RTC_GUARDED_BY(&sequence_checker_);
};
class ResourceAdaptationProcessorTest : public ::testing::Test {
@@ -68,16 +87,24 @@ class ResourceAdaptationProcessorTest : public ::testing::Test {
ResourceAdaptationProcessorTest()
: frame_rate_provider_(),
input_state_provider_(&frame_rate_provider_),
- resource_("FakeResource"),
- other_resource_("OtherFakeResource"),
- processor_(&input_state_provider_,
- /*encoder_stats_observer=*/&frame_rate_provider_) {
- processor_.AddAdaptationListener(&processor_listener_);
- processor_.AddResource(&resource_);
- processor_.AddResource(&other_resource_);
+ resource_(FakeResource::Create("FakeResource")),
+ other_resource_(FakeResource::Create("OtherFakeResource")),
+ adaptation_listener_(),
+ video_stream_adapter_(
+ std::make_unique<VideoStreamAdapter>(&input_state_provider_)),
+ processor_(std::make_unique<ResourceAdaptationProcessor>(
+ /*encoder_stats_observer=*/&frame_rate_provider_,
+ video_stream_adapter_.get())) {
+ processor_->SetResourceAdaptationQueue(TaskQueueBase::Current());
+ video_stream_adapter_->AddRestrictionsListener(&restrictions_listener_);
+ processor_->AddResource(resource_);
+ processor_->AddResource(other_resource_);
+ video_stream_adapter_->AddAdaptationListener(&adaptation_listener_);
}
~ResourceAdaptationProcessorTest() override {
- processor_.StopResourceAdaptation();
+ if (processor_) {
+ DestroyProcessor();
+ }
}
void SetInputStates(bool has_input, int fps, int frame_size) {
@@ -94,42 +121,54 @@ class ResourceAdaptationProcessorTest : public ::testing::Test {
: restrictions.max_pixels_per_frame().value_or(kDefaultFrameSize));
}
+ void DestroyProcessor() {
+ if (resource_) {
+ processor_->RemoveResource(resource_);
+ }
+ if (other_resource_) {
+ processor_->RemoveResource(other_resource_);
+ }
+ video_stream_adapter_->RemoveAdaptationListener(&adaptation_listener_);
+ video_stream_adapter_->RemoveRestrictionsListener(&restrictions_listener_);
+ processor_.reset();
+ }
+
+ static void WaitUntilTaskQueueIdle() {
+ ASSERT_TRUE(rtc::Thread::Current()->ProcessMessages(0));
+ }
+
protected:
FakeFrameRateProvider frame_rate_provider_;
VideoStreamInputStateProvider input_state_provider_;
- FakeResource resource_;
- FakeResource other_resource_;
- ResourceAdaptationProcessor processor_;
- ResourceAdaptationProcessorListenerForTesting processor_listener_;
+ rtc::scoped_refptr<FakeResource> resource_;
+ rtc::scoped_refptr<FakeResource> other_resource_;
+ FakeAdaptationListener adaptation_listener_;
+ std::unique_ptr<VideoStreamAdapter> video_stream_adapter_;
+ std::unique_ptr<ResourceAdaptationProcessor> processor_;
+ VideoSourceRestrictionsListenerForTesting restrictions_listener_;
};
} // namespace
TEST_F(ResourceAdaptationProcessorTest, DisabledByDefault) {
- EXPECT_EQ(DegradationPreference::DISABLED,
- processor_.degradation_preference());
- EXPECT_EQ(DegradationPreference::DISABLED,
- processor_.effective_degradation_preference());
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- processor_.StartResourceAdaptation();
// Adaptation does not happen when disabled.
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
}
TEST_F(ResourceAdaptationProcessorTest, InsufficientInput) {
- processor_.SetDegradationPreference(
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
// Adaptation does not happen if input is insufficient.
// When frame size is missing (OnFrameSizeObserved not called yet).
input_state_provider_.OnHasInputChanged(true);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
// When "has input" is missing.
SetInputStates(false, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
// Note: frame rate cannot be missing, if unset it is 0.
}
@@ -139,212 +178,548 @@ TEST_F(ResourceAdaptationProcessorTest, InsufficientInput) {
// restrictions. For that, see video_stream_adapter_unittest.cc.
TEST_F(ResourceAdaptationProcessorTest,
OveruseTriggersRestrictingResolutionInMaintainFrameRate) {
- processor_.SetDegradationPreference(
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
EXPECT_TRUE(
- processor_listener_.restrictions().max_pixels_per_frame().has_value());
+ restrictions_listener_.restrictions().max_pixels_per_frame().has_value());
}
TEST_F(ResourceAdaptationProcessorTest,
OveruseTriggersRestrictingFrameRateInMaintainResolution) {
- processor_.SetDegradationPreference(
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_RESOLUTION);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- EXPECT_TRUE(processor_listener_.restrictions().max_frame_rate().has_value());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ EXPECT_TRUE(
+ restrictions_listener_.restrictions().max_frame_rate().has_value());
}
TEST_F(ResourceAdaptationProcessorTest,
OveruseTriggersRestrictingFrameRateAndResolutionInBalanced) {
- processor_.SetDegradationPreference(DegradationPreference::BALANCED);
- processor_.StartResourceAdaptation();
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::BALANCED);
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- // Adapting multiple times eventually resticts both frame rate and resolution.
- // Exactly many times we need to adapt depends on BalancedDegradationSettings,
- // VideoStreamAdapter and default input states. This test requires it to be
- // achieved within 4 adaptations.
+ // Adapting multiple times eventually resticts both frame rate and
+ // resolution. Exactly many times we need to adapt depends on
+ // BalancedDegradationSettings, VideoStreamAdapter and default input
+ // states. This test requires it to be achieved within 4 adaptations.
for (size_t i = 0; i < 4; ++i) {
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(i + 1, processor_listener_.restrictions_updated_count());
- RestrictSource(processor_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(i + 1, restrictions_listener_.restrictions_updated_count());
+ RestrictSource(restrictions_listener_.restrictions());
}
EXPECT_TRUE(
- processor_listener_.restrictions().max_pixels_per_frame().has_value());
- EXPECT_TRUE(processor_listener_.restrictions().max_frame_rate().has_value());
+ restrictions_listener_.restrictions().max_pixels_per_frame().has_value());
+ EXPECT_TRUE(
+ restrictions_listener_.restrictions().max_frame_rate().has_value());
}
TEST_F(ResourceAdaptationProcessorTest, AwaitingPreviousAdaptation) {
- processor_.SetDegradationPreference(
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- // If we don't restrict the source then adaptation will not happen again due
- // to "awaiting previous adaptation". This prevents "double-adapt".
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ // If we don't restrict the source then adaptation will not happen again
+ // due to "awaiting previous adaptation". This prevents "double-adapt".
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
}
TEST_F(ResourceAdaptationProcessorTest, CannotAdaptUpWhenUnrestricted) {
- processor_.SetDegradationPreference(
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
}
TEST_F(ResourceAdaptationProcessorTest, UnderuseTakesUsBackToUnrestricted) {
- processor_.SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- RestrictSource(processor_listener_.restrictions());
- resource_.set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(2u, processor_listener_.restrictions_updated_count());
- EXPECT_EQ(VideoSourceRestrictions(), processor_listener_.restrictions());
-}
-
-TEST_F(ResourceAdaptationProcessorTest, ResourcesCanPreventAdaptingUp) {
- processor_.SetDegradationPreference(
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- // Adapt down so that we can adapt up.
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- RestrictSource(processor_listener_.restrictions());
- // Adapting up is prevented.
- resource_.set_is_adaptation_up_allowed(false);
- resource_.set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2u, restrictions_listener_.restrictions_updated_count());
+ EXPECT_EQ(VideoSourceRestrictions(), restrictions_listener_.restrictions());
}
TEST_F(ResourceAdaptationProcessorTest,
ResourcesCanNotAdaptUpIfNeverAdaptedDown) {
- processor_.SetDegradationPreference(
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- RestrictSource(processor_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ RestrictSource(restrictions_listener_.restrictions());
// Other resource signals under-use
- other_resource_.set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
}
TEST_F(ResourceAdaptationProcessorTest,
ResourcesCanNotAdaptUpIfNotAdaptedDownAfterReset) {
- processor_.SetDegradationPreference(
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
-
- processor_.ResetVideoSourceRestrictions();
- EXPECT_EQ(0, processor_listener_.adaptation_counters().Total());
- other_resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
-
- // resource_ did not overuse after we reset the restrictions, so adapt up
- // should be disallowed.
- resource_.set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+
+ video_stream_adapter_->ClearRestrictions();
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // resource_ did not overuse after we reset the restrictions, so adapt
+ // up should be disallowed.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
}
-TEST_F(ResourceAdaptationProcessorTest,
- MultipleResourcesCanTriggerMultipleAdaptations) {
- processor_.SetDegradationPreference(
+TEST_F(ResourceAdaptationProcessorTest, OnlyMostLimitedResourceMayAdaptUp) {
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- other_resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(2, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- other_resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(3, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
-
- resource_.set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(2, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- // Does not trigger adaptation since resource has no adaptations left.
- resource_.set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(2, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
-
- other_resource_.set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- other_resource_.set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(0, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // |other_resource_| is most limited, resource_ can't adapt up.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // |resource_| and |other_resource_| are now most limited, so both must
+ // signal underuse to adapt up.
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
}
-TEST_F(ResourceAdaptationProcessorTest, AdaptingTriggersOnAdaptationApplied) {
- processor_.SetDegradationPreference(
+TEST_F(ResourceAdaptationProcessorTest,
+ MultipleResourcesCanTriggerMultipleAdaptations) {
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, resource_.num_adaptations_applied());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // resource_ is not most limited so can't adapt from underuse.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ // resource_ is still not most limited so can't adapt from underuse.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // However it will be after overuse
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // Now other_resource_ can't adapt up as it is not most restricted.
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // resource_ is limited at 3 adaptations and other_resource_ 2.
+ // With the most limited resource signalling underuse in the following
+ // order we get back to unrestricted video.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ // Both resource_ and other_resource_ are most limited.
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ // Again both are most limited.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
}
-TEST_F(ResourceAdaptationProcessorTest, AdaptingClearsResourceUsageState) {
- processor_.SetDegradationPreference(
+TEST_F(ResourceAdaptationProcessorTest,
+ MostLimitedResourceAdaptationWorksAfterChangingDegradataionPreference) {
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- EXPECT_FALSE(resource_.usage_state().has_value());
+ // Adapt down until we can't anymore.
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ int last_total = restrictions_listener_.adaptation_counters().Total();
+
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_RESOLUTION);
+ // resource_ can not adapt up since we have never reduced FPS.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(last_total, restrictions_listener_.adaptation_counters().Total());
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(last_total + 1,
+ restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ // other_resource_ is most limited so should be able to adapt up.
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(last_total, restrictions_listener_.adaptation_counters().Total());
}
-TEST_F(ResourceAdaptationProcessorTest,
- FailingAdaptingAlsoClearsResourceUsageState) {
- processor_.SetDegradationPreference(DegradationPreference::DISABLED);
- processor_.StartResourceAdaptation();
- resource_.set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
- EXPECT_FALSE(resource_.usage_state().has_value());
+TEST_F(ResourceAdaptationProcessorTest, AdaptingTriggersOnAdaptationApplied) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, adaptation_listener_.num_adaptations_applied());
}
TEST_F(ResourceAdaptationProcessorTest,
AdaptsDownWhenOtherResourceIsAlwaysUnderused) {
- processor_.SetDegradationPreference(
+ video_stream_adapter_->SetDegradationPreference(
DegradationPreference::MAINTAIN_FRAMERATE);
- processor_.StartResourceAdaptation();
SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- other_resource_.set_usage_state(ResourceUsageState::kUnderuse);
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
// Does not trigger adapataion because there's no restriction.
- EXPECT_EQ(0, processor_listener_.adaptation_counters().Total());
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- resource_.set_usage_state(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
// Adapts down even if other resource asked for adapting up.
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- other_resource_.set_usage_state(ResourceUsageState::kUnderuse);
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
// Doesn't adapt up because adaptation is due to another resource.
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ TriggerOveruseNotOnAdaptationTaskQueue) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ TaskQueueForTest resource_task_queue("ResourceTaskQueue");
+ resource_task_queue.PostTask(ToQueuedTask(
+ [&]() { resource_->SetUsageState(ResourceUsageState::kOveruse); }));
+
+ EXPECT_EQ_WAIT(1u, restrictions_listener_.restrictions_updated_count(),
+ kDefaultTimeoutMs);
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ DestroyProcessorWhileResourceListenerDelegateHasTaskInFlight) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ // Wait for |resource_| to signal oversue first so we know that the delegate
+ // has passed it on to the processor's task queue.
+ rtc::Event resource_event;
+ TaskQueueForTest resource_task_queue("ResourceTaskQueue");
+ resource_task_queue.PostTask(ToQueuedTask([&]() {
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ resource_event.Set();
+ }));
+
+ EXPECT_TRUE(resource_event.Wait(kDefaultTimeoutMs));
+ // Now destroy the processor while handling the overuse is in flight.
+ DestroyProcessor();
+
+ // Because the processor was destroyed by the time the delegate's task ran,
+ // the overuse signal must not have been handled.
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ ResourceOveruseIgnoredWhenSignalledDuringRemoval) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ rtc::Event overuse_event;
+ TaskQueueForTest resource_task_queue("ResourceTaskQueue");
+ // Queues task for |resource_| overuse while |processor_| is still listening.
+ resource_task_queue.PostTask(ToQueuedTask([&]() {
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ overuse_event.Set();
+ }));
+ EXPECT_TRUE(overuse_event.Wait(kDefaultTimeoutMs));
+ // Once we know the overuse task is queued, remove |resource_| so that
+ // |processor_| is not listening to it.
+ processor_->RemoveResource(resource_);
+
+ // Runs the queued task so |processor_| gets signalled kOveruse from
+ // |resource_| even though |processor_| was not listening.
+ WaitUntilTaskQueueIdle();
+
+ // No restrictions should change even though |resource_| signaled |kOveruse|.
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingOnlyAdaptedResourceResetsAdaptation) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingMostLimitedResourceSetsAdaptationToNextLimitedLevel) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::BALANCED);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ VideoSourceRestrictions next_limited_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters next_limited_counters =
+ restrictions_listener_.adaptation_counters();
+
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ // Removing most limited |resource_| should revert us back to
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions());
+ EXPECT_EQ(next_limited_counters,
+ restrictions_listener_.adaptation_counters());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingMostLimitedResourceSetsAdaptationIfInputStateUnchanged) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ VideoSourceRestrictions next_limited_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters next_limited_counters =
+ restrictions_listener_.adaptation_counters();
+
+ // Overuse twice and underuse once. After the underuse we don't restrict the
+ // source. Normally this would block future underuses.
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ // Removing most limited |resource_| should revert us back to, even though we
+ // did not call RestrictSource() after |resource_| was overused. Normally
+ // adaptation for MAINTAIN_FRAMERATE would be blocked here but for removal we
+ // allow this anyways.
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions());
+ EXPECT_EQ(next_limited_counters,
+ restrictions_listener_.adaptation_counters());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingResourceNotMostLimitedHasNoEffectOnLimitations) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::BALANCED);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ VideoSourceRestrictions current_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters current_counters =
+ restrictions_listener_.adaptation_counters();
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ // Removing most limited |resource_| should revert us back to
+ processor_->RemoveResource(other_resource_);
+ EXPECT_EQ(current_restrictions, restrictions_listener_.restrictions());
+ EXPECT_EQ(current_counters, restrictions_listener_.adaptation_counters());
+
+ // Delete |other_resource_| for cleanup.
+ other_resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingMostLimitedResourceAfterSwitchingDegradationPreferences) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ VideoSourceRestrictions next_limited_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters next_limited_counters =
+ restrictions_listener_.adaptation_counters();
+
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_RESOLUTION);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ // Revert to |other_resource_| when removing |resource_| even though the
+ // degradation preference was different when it was overused.
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(next_limited_counters,
+ restrictions_listener_.adaptation_counters());
+
+ // After switching back to MAINTAIN_FRAMERATE, the next most limited settings
+ // are restored.
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingMostLimitedResourceSetsNextLimitationsInDisabled) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ VideoSourceRestrictions next_limited_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters next_limited_counters =
+ restrictions_listener_.adaptation_counters();
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::DISABLED);
+
+ // Revert to |other_resource_| when removing |resource_| even though the
+ // current degradataion preference is disabled.
+ processor_->RemoveResource(resource_);
+
+ // After switching back to MAINTAIN_FRAMERATE, the next most limited settings
+ // are restored.
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions());
+ EXPECT_EQ(next_limited_counters,
+ restrictions_listener_.adaptation_counters());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovedResourceSignalsIgnoredByProcessor) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ processor_->RemoveResource(resource_);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingResourceWhenMultipleMostLimtedHasNoEffect) {
+ video_stream_adapter_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ // Adapt |resource_| up and then down so that both resource's are most
+ // limited at 1 adaptation.
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+
+ // Removing |resource_| has no effect since both |resource_| and
+ // |other_resource_| are most limited.
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
}
} // namespace webrtc
diff --git a/call/adaptation/resource_unittest.cc b/call/adaptation/resource_unittest.cc
index d864005a72..a2291dfdce 100644
--- a/call/adaptation/resource_unittest.cc
+++ b/call/adaptation/resource_unittest.cc
@@ -8,9 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "call/adaptation/resource.h"
+#include "api/adaptation/resource.h"
+#include <memory>
+
+#include "api/scoped_refptr.h"
#include "call/adaptation/test/fake_resource.h"
+#include "call/adaptation/test/mock_resource_listener.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -19,31 +23,33 @@ namespace webrtc {
using ::testing::_;
using ::testing::StrictMock;
-class MockResourceListener : public ResourceListener {
+class ResourceTest : public ::testing::Test {
public:
- MOCK_METHOD(void, OnResourceUsageStateMeasured, (const Resource& resource));
+ ResourceTest() : fake_resource_(FakeResource::Create("FakeResource")) {}
+
+ protected:
+ rtc::scoped_refptr<FakeResource> fake_resource_;
};
-TEST(ResourceTest, RegisteringListenerReceivesCallbacks) {
+TEST_F(ResourceTest, RegisteringListenerReceivesCallbacks) {
StrictMock<MockResourceListener> resource_listener;
- FakeResource fake_resource("FakeResource");
- fake_resource.SetResourceListener(&resource_listener);
- EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_))
+ fake_resource_->SetResourceListener(&resource_listener);
+ EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_, _))
.Times(1)
- .WillOnce([](const Resource& resource) {
- EXPECT_EQ(ResourceUsageState::kOveruse, resource.usage_state());
+ .WillOnce([](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
});
- fake_resource.set_usage_state(ResourceUsageState::kOveruse);
- fake_resource.SetResourceListener(nullptr);
+ fake_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ fake_resource_->SetResourceListener(nullptr);
}
-TEST(ResourceTest, UnregisteringListenerStopsCallbacks) {
+TEST_F(ResourceTest, UnregisteringListenerStopsCallbacks) {
StrictMock<MockResourceListener> resource_listener;
- FakeResource fake_resource("FakeResource");
- fake_resource.SetResourceListener(&resource_listener);
- fake_resource.SetResourceListener(nullptr);
- EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_)).Times(0);
- fake_resource.set_usage_state(ResourceUsageState::kOveruse);
+ fake_resource_->SetResourceListener(&resource_listener);
+ fake_resource_->SetResourceListener(nullptr);
+ EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_, _)).Times(0);
+ fake_resource_->SetUsageState(ResourceUsageState::kOveruse);
}
} // namespace webrtc
diff --git a/call/adaptation/test/fake_adaptation_constraint.cc b/call/adaptation/test/fake_adaptation_constraint.cc
new file mode 100644
index 0000000000..983885e58a
--- /dev/null
+++ b/call/adaptation/test/fake_adaptation_constraint.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/test/fake_adaptation_constraint.h"
+
+#include <utility>
+
+namespace webrtc {
+
+FakeAdaptationConstraint::FakeAdaptationConstraint(std::string name)
+ : name_(std::move(name)), is_adaptation_up_allowed_(true) {}
+
+FakeAdaptationConstraint::~FakeAdaptationConstraint() {}
+
+void FakeAdaptationConstraint::set_is_adaptation_up_allowed(
+ bool is_adaptation_up_allowed) {
+ is_adaptation_up_allowed_ = is_adaptation_up_allowed;
+}
+
+std::string FakeAdaptationConstraint::Name() const {
+ return name_;
+}
+
+bool FakeAdaptationConstraint::IsAdaptationUpAllowed(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) const {
+ return is_adaptation_up_allowed_;
+}
+
+} // namespace webrtc
diff --git a/call/adaptation/test/fake_adaptation_constraint.h b/call/adaptation/test/fake_adaptation_constraint.h
new file mode 100644
index 0000000000..74637f48fd
--- /dev/null
+++ b/call/adaptation/test/fake_adaptation_constraint.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_
+#define CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_
+
+#include <string>
+
+#include "call/adaptation/adaptation_constraint.h"
+
+namespace webrtc {
+
+class FakeAdaptationConstraint : public AdaptationConstraint {
+ public:
+ explicit FakeAdaptationConstraint(std::string name);
+ ~FakeAdaptationConstraint() override;
+
+ void set_is_adaptation_up_allowed(bool is_adaptation_up_allowed);
+
+ // AdaptationConstraint implementation.
+ std::string Name() const override;
+ bool IsAdaptationUpAllowed(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) const override;
+
+ private:
+ const std::string name_;
+ bool is_adaptation_up_allowed_;
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_
diff --git a/call/adaptation/test/fake_adaptation_listener.cc b/call/adaptation/test/fake_adaptation_listener.cc
new file mode 100644
index 0000000000..7feecd6367
--- /dev/null
+++ b/call/adaptation/test/fake_adaptation_listener.cc
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/test/fake_adaptation_listener.h"
+
+namespace webrtc {
+
+FakeAdaptationListener::FakeAdaptationListener()
+ : num_adaptations_applied_(0) {}
+
+FakeAdaptationListener::~FakeAdaptationListener() {}
+
+size_t FakeAdaptationListener::num_adaptations_applied() const {
+ return num_adaptations_applied_;
+}
+
+void FakeAdaptationListener::OnAdaptationApplied(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) {
+ ++num_adaptations_applied_;
+}
+
+} // namespace webrtc
diff --git a/call/adaptation/test/fake_adaptation_listener.h b/call/adaptation/test/fake_adaptation_listener.h
new file mode 100644
index 0000000000..c60ba3089b
--- /dev/null
+++ b/call/adaptation/test/fake_adaptation_listener.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_TEST_FAKE_ADAPTATION_LISTENER_H_
+#define CALL_ADAPTATION_TEST_FAKE_ADAPTATION_LISTENER_H_
+
+#include "call/adaptation/adaptation_listener.h"
+
+namespace webrtc {
+
+class FakeAdaptationListener : public AdaptationListener {
+ public:
+ FakeAdaptationListener();
+ ~FakeAdaptationListener() override;
+
+ size_t num_adaptations_applied() const;
+
+ // AdaptationListener implementation.
+ void OnAdaptationApplied(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) override;
+
+ private:
+ size_t num_adaptations_applied_;
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_TEST_FAKE_ADAPTATION_LISTENER_H_
diff --git a/call/adaptation/test/fake_frame_rate_provider.h b/call/adaptation/test/fake_frame_rate_provider.h
index a08e162321..3638f478f3 100644
--- a/call/adaptation/test/fake_frame_rate_provider.h
+++ b/call/adaptation/test/fake_frame_rate_provider.h
@@ -21,29 +21,41 @@ namespace webrtc {
class MockVideoStreamEncoderObserver : public VideoStreamEncoderObserver {
public:
- MOCK_METHOD2(OnEncodedFrameTimeMeasured, void(int, int));
- MOCK_METHOD2(OnIncomingFrame, void(int, int));
- MOCK_METHOD2(OnSendEncodedImage,
- void(const EncodedImage&, const CodecSpecificInfo*));
- MOCK_METHOD1(OnEncoderImplementationChanged, void(const std::string&));
- MOCK_METHOD1(OnFrameDropped, void(DropReason));
- MOCK_METHOD2(OnEncoderReconfigured,
- void(const VideoEncoderConfig&,
- const std::vector<VideoStream>&));
- MOCK_METHOD3(OnAdaptationChanged,
- void(VideoAdaptationReason,
- const VideoAdaptationCounters&,
- const VideoAdaptationCounters&));
- MOCK_METHOD0(ClearAdaptationStats, void());
- MOCK_METHOD2(UpdateAdaptationSettings,
- void(AdaptationSettings, AdaptationSettings));
- MOCK_METHOD0(OnMinPixelLimitReached, void());
- MOCK_METHOD0(OnInitialQualityResolutionAdaptDown, void());
- MOCK_METHOD1(OnSuspendChange, void(bool));
- MOCK_METHOD2(OnBitrateAllocationUpdated,
- void(const VideoCodec&, const VideoBitrateAllocation&));
- MOCK_METHOD1(OnEncoderInternalScalerUpdate, void(bool));
- MOCK_CONST_METHOD0(GetInputFrameRate, int());
+ MOCK_METHOD(void, OnEncodedFrameTimeMeasured, (int, int), (override));
+ MOCK_METHOD(void, OnIncomingFrame, (int, int), (override));
+ MOCK_METHOD(void,
+ OnSendEncodedImage,
+ (const EncodedImage&, const CodecSpecificInfo*),
+ (override));
+ MOCK_METHOD(void,
+ OnEncoderImplementationChanged,
+ (const std::string&),
+ (override));
+ MOCK_METHOD(void, OnFrameDropped, (DropReason), (override));
+ MOCK_METHOD(void,
+ OnEncoderReconfigured,
+ (const VideoEncoderConfig&, const std::vector<VideoStream>&),
+ (override));
+ MOCK_METHOD(void,
+ OnAdaptationChanged,
+ (VideoAdaptationReason,
+ const VideoAdaptationCounters&,
+ const VideoAdaptationCounters&),
+ (override));
+ MOCK_METHOD(void, ClearAdaptationStats, (), (override));
+ MOCK_METHOD(void,
+ UpdateAdaptationSettings,
+ (AdaptationSettings, AdaptationSettings),
+ (override));
+ MOCK_METHOD(void, OnMinPixelLimitReached, (), (override));
+ MOCK_METHOD(void, OnInitialQualityResolutionAdaptDown, (), (override));
+ MOCK_METHOD(void, OnSuspendChange, (bool), (override));
+ MOCK_METHOD(void,
+ OnBitrateAllocationUpdated,
+ (const VideoCodec&, const VideoBitrateAllocation&),
+ (override));
+ MOCK_METHOD(void, OnEncoderInternalScalerUpdate, (bool), (override));
+ MOCK_METHOD(int, GetInputFrameRate, (), (const, override));
};
class FakeFrameRateProvider : public MockVideoStreamEncoderObserver {
diff --git a/call/adaptation/test/fake_resource.cc b/call/adaptation/test/fake_resource.cc
index bd7ad5431f..fa69e886bf 100644
--- a/call/adaptation/test/fake_resource.cc
+++ b/call/adaptation/test/fake_resource.cc
@@ -10,44 +10,35 @@
#include "call/adaptation/test/fake_resource.h"
+#include <algorithm>
#include <utility>
+#include "rtc_base/ref_counted_object.h"
+
namespace webrtc {
+// static
+rtc::scoped_refptr<FakeResource> FakeResource::Create(std::string name) {
+ return new rtc::RefCountedObject<FakeResource>(name);
+}
+
FakeResource::FakeResource(std::string name)
- : Resource(),
- name_(std::move(name)),
- is_adaptation_up_allowed_(true),
- num_adaptations_applied_(0) {}
+ : Resource(), name_(std::move(name)), listener_(nullptr) {}
FakeResource::~FakeResource() {}
-void FakeResource::set_usage_state(ResourceUsageState usage_state) {
- OnResourceUsageStateMeasured(usage_state);
-}
-
-void FakeResource::set_is_adaptation_up_allowed(bool is_adaptation_up_allowed) {
- is_adaptation_up_allowed_ = is_adaptation_up_allowed;
-}
-
-size_t FakeResource::num_adaptations_applied() const {
- return num_adaptations_applied_;
+void FakeResource::SetUsageState(ResourceUsageState usage_state) {
+ if (listener_) {
+ listener_->OnResourceUsageStateMeasured(this, usage_state);
+ }
}
-bool FakeResource::IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const {
- return is_adaptation_up_allowed_;
+std::string FakeResource::Name() const {
+ return name_;
}
-void FakeResource::OnAdaptationApplied(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) {
- ++num_adaptations_applied_;
+void FakeResource::SetResourceListener(ResourceListener* listener) {
+ listener_ = listener;
}
} // namespace webrtc
diff --git a/call/adaptation/test/fake_resource.h b/call/adaptation/test/fake_resource.h
index 0d9b1f46bb..e88d97db7a 100644
--- a/call/adaptation/test/fake_resource.h
+++ b/call/adaptation/test/fake_resource.h
@@ -12,36 +12,31 @@
#define CALL_ADAPTATION_TEST_FAKE_RESOURCE_H_
#include <string>
+#include <vector>
-#include "call/adaptation/resource.h"
+#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
+#include "api/scoped_refptr.h"
namespace webrtc {
// Fake resource used for testing.
class FakeResource : public Resource {
public:
+ static rtc::scoped_refptr<FakeResource> Create(std::string name);
+
explicit FakeResource(std::string name);
~FakeResource() override;
- void set_usage_state(ResourceUsageState usage_state);
- void set_is_adaptation_up_allowed(bool is_adaptation_up_allowed);
- size_t num_adaptations_applied() const;
+ void SetUsageState(ResourceUsageState usage_state);
// Resource implementation.
- std::string name() const override { return name_; }
- bool IsAdaptationUpAllowed(const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const override;
- void OnAdaptationApplied(const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) override;
+ std::string Name() const override;
+ void SetResourceListener(ResourceListener* listener) override;
private:
const std::string name_;
- bool is_adaptation_up_allowed_;
- size_t num_adaptations_applied_;
+ ResourceListener* listener_;
};
} // namespace webrtc
diff --git a/call/adaptation/test/mock_resource_listener.h b/call/adaptation/test/mock_resource_listener.h
new file mode 100644
index 0000000000..f0f998f2e3
--- /dev/null
+++ b/call/adaptation/test/mock_resource_listener.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_
+#define CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_
+
+#include "api/adaptation/resource.h"
+
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockResourceListener : public ResourceListener {
+ public:
+ MOCK_METHOD(void,
+ OnResourceUsageStateMeasured,
+ (rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state),
+ (override));
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_
diff --git a/call/adaptation/video_source_restrictions.cc b/call/adaptation/video_source_restrictions.cc
index 6fbdcb42a6..e9d6c26137 100644
--- a/call/adaptation/video_source_restrictions.cc
+++ b/call/adaptation/video_source_restrictions.cc
@@ -13,6 +13,7 @@
#include <limits>
#include "rtc_base/checks.h"
+#include "rtc_base/strings/string_builder.h"
namespace webrtc {
@@ -36,6 +37,19 @@ VideoSourceRestrictions::VideoSourceRestrictions(
RTC_DCHECK(!max_frame_rate_.has_value() || max_frame_rate_.value() > 0.0);
}
+std::string VideoSourceRestrictions::ToString() const {
+ rtc::StringBuilder ss;
+ ss << "{";
+ if (max_frame_rate_)
+ ss << " max_fps=" << max_frame_rate_.value();
+ if (max_pixels_per_frame_)
+ ss << " max_pixels_per_frame=" << max_pixels_per_frame_.value();
+ if (target_pixels_per_frame_)
+ ss << " target_pixels_per_frame=" << target_pixels_per_frame_.value();
+ ss << " }";
+ return ss.Release();
+}
+
const absl::optional<size_t>& VideoSourceRestrictions::max_pixels_per_frame()
const {
return max_pixels_per_frame_;
diff --git a/call/adaptation/video_source_restrictions.h b/call/adaptation/video_source_restrictions.h
index 506bae6133..7f79a48e5d 100644
--- a/call/adaptation/video_source_restrictions.h
+++ b/call/adaptation/video_source_restrictions.h
@@ -11,6 +11,7 @@
#ifndef CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_
#define CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_
+#include <string>
#include <utility>
#include "absl/types/optional.h"
@@ -38,6 +39,8 @@ class VideoSourceRestrictions {
return !(*this == rhs);
}
+ std::string ToString() const;
+
// The source must produce a resolution less than or equal to
// max_pixels_per_frame().
const absl::optional<size_t>& max_pixels_per_frame() const;
diff --git a/call/adaptation/video_stream_adapter.cc b/call/adaptation/video_stream_adapter.cc
index b224e3e4d2..ec80a13a08 100644
--- a/call/adaptation/video_stream_adapter.cc
+++ b/call/adaptation/video_stream_adapter.cc
@@ -15,11 +15,17 @@
#include <utility>
#include "absl/types/optional.h"
+#include "absl/types/variant.h"
+#include "api/video/video_adaptation_counters.h"
#include "api/video/video_adaptation_reason.h"
#include "api/video_codecs/video_encoder.h"
+#include "call/adaptation/video_source_restrictions.h"
+#include "call/adaptation/video_stream_input_state.h"
+#include "rtc_base/checks.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/synchronization/sequence_checker.h"
namespace webrtc {
@@ -27,13 +33,6 @@ const int kMinFrameRateFps = 2;
namespace {
-// Generate suggested higher and lower frame rates and resolutions, to be
-// applied to the VideoSourceRestrictor. These are used in "maintain-resolution"
-// and "maintain-framerate". The "balanced" degradation preference also makes
-// use of BalancedDegradationPreference when generating suggestions. The
-// VideoSourceRestrictor decidedes whether or not a proposed adaptation is
-// valid.
-
// For frame rate, the steps we take are 2/3 (down) and 3/2 (up).
int GetLowerFrameRateThan(int fps) {
RTC_DCHECK(fps != std::numeric_limits<int>::max());
@@ -59,8 +58,60 @@ int GetLowerResolutionThan(int pixel_count) {
return (pixel_count * 3) / 5;
}
+int GetIncreasedMaxPixelsWanted(int target_pixels) {
+ if (target_pixels == std::numeric_limits<int>::max())
+ return std::numeric_limits<int>::max();
+ // When we decrease resolution, we go down to at most 3/5 of current pixels.
+ // Thus to increase resolution, we need 3/5 to get back to where we started.
+ // When going up, the desired max_pixels_per_frame() has to be significantly
+ // higher than the target because the source's native resolutions might not
+ // match the target. We pick 12/5 of the target.
+ //
+ // (This value was historically 4 times the old target, which is (3/5)*4 of
+ // the new target - or 12/5 - assuming the target is adjusted according to
+ // the above steps.)
+ RTC_DCHECK(target_pixels != std::numeric_limits<int>::max());
+ return (target_pixels * 12) / 5;
+}
+
+bool CanDecreaseResolutionTo(int target_pixels,
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions) {
+ int max_pixels_per_frame =
+ rtc::dchecked_cast<int>(restrictions.max_pixels_per_frame().value_or(
+ std::numeric_limits<int>::max()));
+ return target_pixels < max_pixels_per_frame &&
+ target_pixels >= input_state.min_pixels_per_frame();
+}
+
+bool CanIncreaseResolutionTo(int target_pixels,
+ const VideoSourceRestrictions& restrictions) {
+ int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels);
+ int max_pixels_per_frame =
+ rtc::dchecked_cast<int>(restrictions.max_pixels_per_frame().value_or(
+ std::numeric_limits<int>::max()));
+ return max_pixels_wanted > max_pixels_per_frame;
+}
+
+bool CanDecreaseFrameRateTo(int max_frame_rate,
+ const VideoSourceRestrictions& restrictions) {
+ const int fps_wanted = std::max(kMinFrameRateFps, max_frame_rate);
+ return fps_wanted <
+ rtc::dchecked_cast<int>(restrictions.max_frame_rate().value_or(
+ std::numeric_limits<int>::max()));
+}
+
+bool CanIncreaseFrameRateTo(int max_frame_rate,
+ const VideoSourceRestrictions& restrictions) {
+ return max_frame_rate >
+ rtc::dchecked_cast<int>(restrictions.max_frame_rate().value_or(
+ std::numeric_limits<int>::max()));
+}
+
} // namespace
+VideoSourceRestrictionsListener::~VideoSourceRestrictionsListener() = default;
+
VideoSourceRestrictions FilterRestrictionsByDegradationPreference(
VideoSourceRestrictions source_restrictions,
DegradationPreference degradation_preference) {
@@ -82,28 +133,6 @@ VideoSourceRestrictions FilterRestrictionsByDegradationPreference(
return source_restrictions;
}
-VideoAdaptationCounters FilterVideoAdaptationCountersByDegradationPreference(
- VideoAdaptationCounters counters,
- DegradationPreference degradation_preference) {
- switch (degradation_preference) {
- case DegradationPreference::BALANCED:
- break;
- case DegradationPreference::MAINTAIN_FRAMERATE:
- counters.fps_adaptations = 0;
- break;
- case DegradationPreference::MAINTAIN_RESOLUTION:
- counters.resolution_adaptations = 0;
- break;
- case DegradationPreference::DISABLED:
- counters.resolution_adaptations = 0;
- counters.fps_adaptations = 0;
- break;
- default:
- RTC_NOTREACHED();
- }
- return counters;
-}
-
// TODO(hbos): Use absl::optional<> instead?
int GetHigherResolutionThan(int pixel_count) {
return pixel_count != std::numeric_limits<int>::max()
@@ -111,38 +140,44 @@ int GetHigherResolutionThan(int pixel_count) {
: std::numeric_limits<int>::max();
}
-Adaptation::Step::Step(StepType type, int target)
- : type(type), target(target) {}
-
-Adaptation::Adaptation(int validation_id, Step step)
- : validation_id_(validation_id),
- status_(Status::kValid),
- step_(std::move(step)),
- min_pixel_limit_reached_(false) {}
+// static
+const char* Adaptation::StatusToString(Adaptation::Status status) {
+ switch (status) {
+ case Adaptation::Status::kValid:
+ return "kValid";
+ case Adaptation::Status::kLimitReached:
+ return "kLimitReached";
+ case Adaptation::Status::kAwaitingPreviousAdaptation:
+ return "kAwaitingPreviousAdaptation";
+ case Status::kInsufficientInput:
+ return "kInsufficientInput";
+ case Status::kAdaptationDisabled:
+ return "kAdaptationDisabled";
+ case Status::kRejectedByConstraint:
+ return "kRejectedByConstraint";
+ }
+}
Adaptation::Adaptation(int validation_id,
- Step step,
+ VideoSourceRestrictions restrictions,
+ VideoAdaptationCounters counters,
+ VideoStreamInputState input_state,
bool min_pixel_limit_reached)
: validation_id_(validation_id),
status_(Status::kValid),
- step_(std::move(step)),
- min_pixel_limit_reached_(min_pixel_limit_reached) {}
-
-Adaptation::Adaptation(int validation_id, Status invalid_status)
- : validation_id_(validation_id),
- status_(invalid_status),
- step_(absl::nullopt),
- min_pixel_limit_reached_(false) {
- RTC_DCHECK_NE(status_, Status::kValid);
-}
+ min_pixel_limit_reached_(min_pixel_limit_reached),
+ input_state_(std::move(input_state)),
+ restrictions_(std::move(restrictions)),
+ counters_(std::move(counters)) {}
Adaptation::Adaptation(int validation_id,
Status invalid_status,
+ VideoStreamInputState input_state,
bool min_pixel_limit_reached)
: validation_id_(validation_id),
status_(invalid_status),
- step_(absl::nullopt),
- min_pixel_limit_reached_(min_pixel_limit_reached) {
+ min_pixel_limit_reached_(min_pixel_limit_reached),
+ input_state_(std::move(input_state)) {
RTC_DCHECK_NE(status_, Status::kValid);
}
@@ -154,398 +189,513 @@ bool Adaptation::min_pixel_limit_reached() const {
return min_pixel_limit_reached_;
}
-const Adaptation::Step& Adaptation::step() const {
- RTC_DCHECK_EQ(status_, Status::kValid);
- return step_.value();
+const VideoStreamInputState& Adaptation::input_state() const {
+ return input_state_;
}
-// VideoSourceRestrictor is responsible for keeping track of current
-// VideoSourceRestrictions.
-class VideoStreamAdapter::VideoSourceRestrictor {
- public:
- VideoSourceRestrictor() {}
-
- VideoSourceRestrictions source_restrictions() const {
- return source_restrictions_;
- }
- const VideoAdaptationCounters& adaptation_counters() const {
- return adaptations_;
- }
- void ClearRestrictions() {
- source_restrictions_ = VideoSourceRestrictions();
- adaptations_ = VideoAdaptationCounters();
- }
-
- void set_min_pixels_per_frame(int min_pixels_per_frame) {
- min_pixels_per_frame_ = min_pixels_per_frame;
- }
-
- int min_pixels_per_frame() const { return min_pixels_per_frame_; }
-
- bool CanDecreaseResolutionTo(int target_pixels) {
- int max_pixels_per_frame = rtc::dchecked_cast<int>(
- source_restrictions_.max_pixels_per_frame().value_or(
- std::numeric_limits<int>::max()));
- return target_pixels < max_pixels_per_frame &&
- target_pixels >= min_pixels_per_frame_;
- }
-
- bool CanIncreaseResolutionTo(int target_pixels) {
- int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels);
- int max_pixels_per_frame = rtc::dchecked_cast<int>(
- source_restrictions_.max_pixels_per_frame().value_or(
- std::numeric_limits<int>::max()));
- return max_pixels_wanted > max_pixels_per_frame;
- }
-
- bool CanDecreaseFrameRateTo(int max_frame_rate) {
- const int fps_wanted = std::max(kMinFrameRateFps, max_frame_rate);
- return fps_wanted < rtc::dchecked_cast<int>(
- source_restrictions_.max_frame_rate().value_or(
- std::numeric_limits<int>::max()));
- }
-
- bool CanIncreaseFrameRateTo(int max_frame_rate) {
- return max_frame_rate > rtc::dchecked_cast<int>(
- source_restrictions_.max_frame_rate().value_or(
- std::numeric_limits<int>::max()));
- }
-
- void ApplyAdaptationStep(const Adaptation::Step& step,
- DegradationPreference degradation_preference) {
- switch (step.type) {
- case Adaptation::StepType::kIncreaseResolution:
- IncreaseResolutionTo(step.target);
- break;
- case Adaptation::StepType::kDecreaseResolution:
- DecreaseResolutionTo(step.target);
- break;
- case Adaptation::StepType::kIncreaseFrameRate:
- IncreaseFrameRateTo(step.target);
- // TODO(https://crbug.com/webrtc/11222): Don't adapt in two steps.
- // GetAdaptationUp() should tell us the correct value, but BALANCED
- // logic in DecrementFramerate() makes it hard to predict whether this
- // will be the last step. Remove the dependency on
- // adaptation_counters().
- if (degradation_preference == DegradationPreference::BALANCED &&
- adaptation_counters().fps_adaptations == 0 &&
- step.target != std::numeric_limits<int>::max()) {
- RTC_LOG(LS_INFO) << "Removing framerate down-scaling setting.";
- IncreaseFrameRateTo(std::numeric_limits<int>::max());
- }
- break;
- case Adaptation::StepType::kDecreaseFrameRate:
- DecreaseFrameRateTo(step.target);
- break;
- }
- }
-
- private:
- static int GetIncreasedMaxPixelsWanted(int target_pixels) {
- if (target_pixels == std::numeric_limits<int>::max())
- return std::numeric_limits<int>::max();
- // When we decrease resolution, we go down to at most 3/5 of current pixels.
- // Thus to increase resolution, we need 3/5 to get back to where we started.
- // When going up, the desired max_pixels_per_frame() has to be significantly
- // higher than the target because the source's native resolutions might not
- // match the target. We pick 12/5 of the target.
- //
- // (This value was historically 4 times the old target, which is (3/5)*4 of
- // the new target - or 12/5 - assuming the target is adjusted according to
- // the above steps.)
- RTC_DCHECK(target_pixels != std::numeric_limits<int>::max());
- return (target_pixels * 12) / 5;
- }
-
- void DecreaseResolutionTo(int target_pixels) {
- RTC_DCHECK(CanDecreaseResolutionTo(target_pixels));
- RTC_LOG(LS_INFO) << "Scaling down resolution, max pixels: "
- << target_pixels;
- source_restrictions_.set_max_pixels_per_frame(
- target_pixels != std::numeric_limits<int>::max()
- ? absl::optional<size_t>(target_pixels)
- : absl::nullopt);
- source_restrictions_.set_target_pixels_per_frame(absl::nullopt);
- ++adaptations_.resolution_adaptations;
- }
-
- void IncreaseResolutionTo(int target_pixels) {
- RTC_DCHECK(CanIncreaseResolutionTo(target_pixels));
- int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels);
- RTC_LOG(LS_INFO) << "Scaling up resolution, max pixels: "
- << max_pixels_wanted;
- source_restrictions_.set_max_pixels_per_frame(
- max_pixels_wanted != std::numeric_limits<int>::max()
- ? absl::optional<size_t>(max_pixels_wanted)
- : absl::nullopt);
- source_restrictions_.set_target_pixels_per_frame(
- max_pixels_wanted != std::numeric_limits<int>::max()
- ? absl::optional<size_t>(target_pixels)
- : absl::nullopt);
- --adaptations_.resolution_adaptations;
- RTC_DCHECK_GE(adaptations_.resolution_adaptations, 0);
- }
-
- void DecreaseFrameRateTo(int max_frame_rate) {
- RTC_DCHECK(CanDecreaseFrameRateTo(max_frame_rate));
- max_frame_rate = std::max(kMinFrameRateFps, max_frame_rate);
- RTC_LOG(LS_INFO) << "Scaling down framerate: " << max_frame_rate;
- source_restrictions_.set_max_frame_rate(
- max_frame_rate != std::numeric_limits<int>::max()
- ? absl::optional<double>(max_frame_rate)
- : absl::nullopt);
- ++adaptations_.fps_adaptations;
- }
-
- void IncreaseFrameRateTo(int max_frame_rate) {
- RTC_DCHECK(CanIncreaseFrameRateTo(max_frame_rate));
- RTC_LOG(LS_INFO) << "Scaling up framerate: " << max_frame_rate;
- source_restrictions_.set_max_frame_rate(
- max_frame_rate != std::numeric_limits<int>::max()
- ? absl::optional<double>(max_frame_rate)
- : absl::nullopt);
- --adaptations_.fps_adaptations;
- RTC_DCHECK_GE(adaptations_.fps_adaptations, 0);
- }
-
- // Needed by CanDecreaseResolutionTo().
- int min_pixels_per_frame_ = 0;
- // Current State.
- VideoSourceRestrictions source_restrictions_;
- VideoAdaptationCounters adaptations_;
-};
+const VideoSourceRestrictions& Adaptation::restrictions() const {
+ return restrictions_;
+}
-// static
-VideoStreamAdapter::AdaptationRequest::Mode
-VideoStreamAdapter::AdaptationRequest::GetModeFromAdaptationAction(
- Adaptation::StepType step_type) {
- switch (step_type) {
- case Adaptation::StepType::kIncreaseResolution:
- return AdaptationRequest::Mode::kAdaptUp;
- case Adaptation::StepType::kDecreaseResolution:
- return AdaptationRequest::Mode::kAdaptDown;
- case Adaptation::StepType::kIncreaseFrameRate:
- return AdaptationRequest::Mode::kAdaptUp;
- case Adaptation::StepType::kDecreaseFrameRate:
- return AdaptationRequest::Mode::kAdaptDown;
- }
+const VideoAdaptationCounters& Adaptation::counters() const {
+ return counters_;
}
-VideoStreamAdapter::VideoStreamAdapter()
- : source_restrictor_(std::make_unique<VideoSourceRestrictor>()),
+VideoStreamAdapter::VideoStreamAdapter(
+ VideoStreamInputStateProvider* input_state_provider)
+ : input_state_provider_(input_state_provider),
balanced_settings_(),
adaptation_validation_id_(0),
degradation_preference_(DegradationPreference::DISABLED),
- input_state_(),
- last_adaptation_request_(absl::nullopt) {}
+ awaiting_frame_size_change_(absl::nullopt),
+ last_video_source_restrictions_() {
+ sequence_checker_.Detach();
+}
-VideoStreamAdapter::~VideoStreamAdapter() {}
+VideoStreamAdapter::~VideoStreamAdapter() {
+ RTC_DCHECK(adaptation_listeners_.empty())
+ << "There are listener(s) attached to a VideoStreamAdapter being "
+ "destroyed.";
+ RTC_DCHECK(adaptation_constraints_.empty())
+ << "There are constaint(s) attached to a VideoStreamAdapter being "
+ "destroyed.";
+}
VideoSourceRestrictions VideoStreamAdapter::source_restrictions() const {
- return source_restrictor_->source_restrictions();
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return current_restrictions_.restrictions;
}
const VideoAdaptationCounters& VideoStreamAdapter::adaptation_counters() const {
- return source_restrictor_->adaptation_counters();
-}
-
-const BalancedDegradationSettings& VideoStreamAdapter::balanced_settings()
- const {
- return balanced_settings_;
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return current_restrictions_.counters;
}
void VideoStreamAdapter::ClearRestrictions() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
// Invalidate any previously returned Adaptation.
+ RTC_LOG(INFO) << "Resetting restrictions";
++adaptation_validation_id_;
- source_restrictor_->ClearRestrictions();
- last_adaptation_request_.reset();
+ current_restrictions_ = {VideoSourceRestrictions(),
+ VideoAdaptationCounters()};
+ awaiting_frame_size_change_ = absl::nullopt;
+ BroadcastVideoRestrictionsUpdate(input_state_provider_->InputState(),
+ nullptr);
+}
+
+void VideoStreamAdapter::AddRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(std::find(restrictions_listeners_.begin(),
+ restrictions_listeners_.end(),
+ restrictions_listener) == restrictions_listeners_.end());
+ restrictions_listeners_.push_back(restrictions_listener);
+}
+
+void VideoStreamAdapter::RemoveRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = std::find(restrictions_listeners_.begin(),
+ restrictions_listeners_.end(), restrictions_listener);
+ RTC_DCHECK(it != restrictions_listeners_.end());
+ restrictions_listeners_.erase(it);
+}
+
+void VideoStreamAdapter::AddAdaptationListener(
+ AdaptationListener* adaptation_listener) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(std::find(adaptation_listeners_.begin(),
+ adaptation_listeners_.end(),
+ adaptation_listener) == adaptation_listeners_.end());
+ adaptation_listeners_.push_back(adaptation_listener);
+}
+
+void VideoStreamAdapter::RemoveAdaptationListener(
+ AdaptationListener* adaptation_listener) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = std::find(adaptation_listeners_.begin(),
+ adaptation_listeners_.end(), adaptation_listener);
+ RTC_DCHECK(it != adaptation_listeners_.end());
+ adaptation_listeners_.erase(it);
+}
+
+void VideoStreamAdapter::AddAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(std::find(adaptation_constraints_.begin(),
+ adaptation_constraints_.end(),
+ adaptation_constraint) == adaptation_constraints_.end());
+ adaptation_constraints_.push_back(adaptation_constraint);
+}
+
+void VideoStreamAdapter::RemoveAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = std::find(adaptation_constraints_.begin(),
+ adaptation_constraints_.end(), adaptation_constraint);
+ RTC_DCHECK(it != adaptation_constraints_.end());
+ adaptation_constraints_.erase(it);
}
void VideoStreamAdapter::SetDegradationPreference(
DegradationPreference degradation_preference) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
if (degradation_preference_ == degradation_preference)
return;
// Invalidate any previously returned Adaptation.
++adaptation_validation_id_;
- if (degradation_preference == DegradationPreference::BALANCED ||
- degradation_preference_ == DegradationPreference::BALANCED) {
+ bool balanced_switch =
+ degradation_preference == DegradationPreference::BALANCED ||
+ degradation_preference_ == DegradationPreference::BALANCED;
+ degradation_preference_ = degradation_preference;
+ if (balanced_switch) {
+ // ClearRestrictions() calls BroadcastVideoRestrictionsUpdate(nullptr).
ClearRestrictions();
+ } else {
+ BroadcastVideoRestrictionsUpdate(input_state_provider_->InputState(),
+ nullptr);
}
- degradation_preference_ = degradation_preference;
}
-void VideoStreamAdapter::SetInput(VideoStreamInputState input_state) {
- // Invalidate any previously returned Adaptation.
+struct VideoStreamAdapter::RestrictionsOrStateVisitor {
+ Adaptation operator()(const RestrictionsWithCounters& r) const {
+ return Adaptation(adaptation_validation_id, r.restrictions, r.counters,
+ input_state, min_pixel_limit_reached());
+ }
+ Adaptation operator()(const Adaptation::Status& status) const {
+ RTC_DCHECK_NE(status, Adaptation::Status::kValid);
+ return Adaptation(adaptation_validation_id, status, input_state,
+ min_pixel_limit_reached());
+ }
+ bool min_pixel_limit_reached() const {
+ return input_state.frame_size_pixels().has_value() &&
+ GetLowerResolutionThan(input_state.frame_size_pixels().value()) <
+ input_state.min_pixels_per_frame();
+ }
+
+ const int adaptation_validation_id;
+ const VideoStreamInputState& input_state;
+};
+
+Adaptation VideoStreamAdapter::RestrictionsOrStateToAdaptation(
+ VideoStreamAdapter::RestrictionsOrState step_or_state,
+ const VideoStreamInputState& input_state) const {
+ RTC_DCHECK(!step_or_state.valueless_by_exception());
+ return absl::visit(
+ RestrictionsOrStateVisitor{adaptation_validation_id_, input_state},
+ step_or_state);
+}
+
+Adaptation VideoStreamAdapter::GetAdaptationUp(
+ const VideoStreamInputState& input_state,
+ rtc::scoped_refptr<Resource> resource) const {
+ RestrictionsOrState step = GetAdaptationUpStep(input_state);
+ // If an adaptation proposed, check with the constraints that it is ok.
+ if (absl::holds_alternative<RestrictionsWithCounters>(step)) {
+ RestrictionsWithCounters restrictions =
+ absl::get<RestrictionsWithCounters>(step);
+ for (const auto* constraint : adaptation_constraints_) {
+ if (!constraint->IsAdaptationUpAllowed(
+ input_state, current_restrictions_.restrictions,
+ restrictions.restrictions, resource)) {
+ RTC_LOG(INFO) << "Not adapting up because constraint \""
+ << constraint->Name() << "\" disallowed it";
+ step = Adaptation::Status::kRejectedByConstraint;
+ }
+ }
+ }
+ return RestrictionsOrStateToAdaptation(step, input_state);
+}
+
+Adaptation VideoStreamAdapter::GetAdaptationUp(
+ rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(resource);
+ VideoStreamInputState input_state = input_state_provider_->InputState();
++adaptation_validation_id_;
- input_state_ = input_state;
- source_restrictor_->set_min_pixels_per_frame(
- input_state_.min_pixels_per_frame());
+ Adaptation adaptation = GetAdaptationUp(input_state, resource);
+ return adaptation;
}
-Adaptation VideoStreamAdapter::GetAdaptationUp() const {
- RTC_DCHECK_NE(degradation_preference_, DegradationPreference::DISABLED);
- RTC_DCHECK(input_state_.HasInputFrameSizeAndFramesPerSecond());
+VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::GetAdaptationUpStep(
+ const VideoStreamInputState& input_state) const {
+ if (!HasSufficientInputForAdaptation(input_state)) {
+ return Adaptation::Status::kInsufficientInput;
+ }
// Don't adapt if we're awaiting a previous adaptation to have an effect.
- bool last_adaptation_was_up =
- last_adaptation_request_ &&
- last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptUp;
- if (last_adaptation_was_up &&
+ if (awaiting_frame_size_change_ &&
+ awaiting_frame_size_change_->pixels_increased &&
degradation_preference_ == DegradationPreference::MAINTAIN_FRAMERATE &&
- input_state_.frame_size_pixels().value() <=
- last_adaptation_request_->input_pixel_count_) {
- return Adaptation(adaptation_validation_id_,
- Adaptation::Status::kAwaitingPreviousAdaptation);
+ input_state.frame_size_pixels().value() <=
+ awaiting_frame_size_change_->frame_size_pixels) {
+ return Adaptation::Status::kAwaitingPreviousAdaptation;
}
// Maybe propose targets based on degradation preference.
switch (degradation_preference_) {
case DegradationPreference::BALANCED: {
// Attempt to increase target frame rate.
- int target_fps =
- balanced_settings_.MaxFps(input_state_.video_codec_type(),
- input_state_.frame_size_pixels().value());
- if (source_restrictor_->CanIncreaseFrameRateTo(target_fps)) {
- return Adaptation(
- adaptation_validation_id_,
- Adaptation::Step(Adaptation::StepType::kIncreaseFrameRate,
- target_fps));
+ RestrictionsOrState increase_frame_rate =
+ IncreaseFramerate(input_state, current_restrictions_);
+ if (absl::holds_alternative<RestrictionsWithCounters>(
+ increase_frame_rate)) {
+ return increase_frame_rate;
}
- // Scale up resolution.
+ // else, increase resolution.
ABSL_FALLTHROUGH_INTENDED;
}
case DegradationPreference::MAINTAIN_FRAMERATE: {
// Attempt to increase pixel count.
- int target_pixels = input_state_.frame_size_pixels().value();
- if (source_restrictor_->adaptation_counters().resolution_adaptations ==
- 1) {
- RTC_LOG(LS_INFO) << "Removing resolution down-scaling setting.";
- target_pixels = std::numeric_limits<int>::max();
- }
- target_pixels = GetHigherResolutionThan(target_pixels);
- if (!source_restrictor_->CanIncreaseResolutionTo(target_pixels)) {
- return Adaptation(adaptation_validation_id_,
- Adaptation::Status::kLimitReached);
- }
- return Adaptation(
- adaptation_validation_id_,
- Adaptation::Step(Adaptation::StepType::kIncreaseResolution,
- target_pixels));
+ return IncreaseResolution(input_state, current_restrictions_);
}
case DegradationPreference::MAINTAIN_RESOLUTION: {
// Scale up framerate.
- int target_fps = input_state_.frames_per_second();
- if (source_restrictor_->adaptation_counters().fps_adaptations == 1) {
- RTC_LOG(LS_INFO) << "Removing framerate down-scaling setting.";
- target_fps = std::numeric_limits<int>::max();
- }
- target_fps = GetHigherFrameRateThan(target_fps);
- if (!source_restrictor_->CanIncreaseFrameRateTo(target_fps)) {
- return Adaptation(adaptation_validation_id_,
- Adaptation::Status::kLimitReached);
- }
- return Adaptation(
- adaptation_validation_id_,
- Adaptation::Step(Adaptation::StepType::kIncreaseFrameRate,
- target_fps));
+ return IncreaseFramerate(input_state, current_restrictions_);
}
case DegradationPreference::DISABLED:
- RTC_NOTREACHED();
- return Adaptation(adaptation_validation_id_,
- Adaptation::Status::kLimitReached);
+ return Adaptation::Status::kAdaptationDisabled;
}
}
-Adaptation VideoStreamAdapter::GetAdaptationDown() const {
- RTC_DCHECK_NE(degradation_preference_, DegradationPreference::DISABLED);
- RTC_DCHECK(input_state_.HasInputFrameSizeAndFramesPerSecond());
- // Don't adapt adaptation is disabled.
- bool last_adaptation_was_down =
- last_adaptation_request_ &&
- last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptDown;
- // Don't adapt if we're awaiting a previous adaptation to have an effect.
- if (last_adaptation_was_down &&
+Adaptation VideoStreamAdapter::GetAdaptationDown() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ VideoStreamInputState input_state = input_state_provider_->InputState();
+ ++adaptation_validation_id_;
+ return RestrictionsOrStateToAdaptation(GetAdaptationDownStep(input_state),
+ input_state);
+}
+
+VideoStreamAdapter::RestrictionsOrState
+VideoStreamAdapter::GetAdaptationDownStep(
+ const VideoStreamInputState& input_state) const {
+ if (!HasSufficientInputForAdaptation(input_state)) {
+ return Adaptation::Status::kInsufficientInput;
+ }
+ // Don't adapt if we're awaiting a previous adaptation to have an effect or
+ // if we switched degradation preference.
+ if (awaiting_frame_size_change_ &&
+ !awaiting_frame_size_change_->pixels_increased &&
degradation_preference_ == DegradationPreference::MAINTAIN_FRAMERATE &&
- input_state_.frame_size_pixels().value() >=
- last_adaptation_request_->input_pixel_count_) {
- return Adaptation(adaptation_validation_id_,
- Adaptation::Status::kAwaitingPreviousAdaptation);
+ input_state.frame_size_pixels().value() >=
+ awaiting_frame_size_change_->frame_size_pixels) {
+ return Adaptation::Status::kAwaitingPreviousAdaptation;
}
-
// Maybe propose targets based on degradation preference.
switch (degradation_preference_) {
case DegradationPreference::BALANCED: {
// Try scale down framerate, if lower.
- int target_fps =
- balanced_settings_.MinFps(input_state_.video_codec_type(),
- input_state_.frame_size_pixels().value());
- if (source_restrictor_->CanDecreaseFrameRateTo(target_fps)) {
- return Adaptation(
- adaptation_validation_id_,
- Adaptation::Step(Adaptation::StepType::kDecreaseFrameRate,
- target_fps));
+ RestrictionsOrState decrease_frame_rate =
+ DecreaseFramerate(input_state, current_restrictions_);
+ if (absl::holds_alternative<RestrictionsWithCounters>(
+ decrease_frame_rate)) {
+ return decrease_frame_rate;
}
- // Scale down resolution.
+ // else, decrease resolution.
ABSL_FALLTHROUGH_INTENDED;
}
case DegradationPreference::MAINTAIN_FRAMERATE: {
- // Scale down resolution.
- int target_pixels =
- GetLowerResolutionThan(input_state_.frame_size_pixels().value());
- bool min_pixel_limit_reached =
- target_pixels < source_restrictor_->min_pixels_per_frame();
- if (!source_restrictor_->CanDecreaseResolutionTo(target_pixels)) {
- return Adaptation(adaptation_validation_id_,
- Adaptation::Status::kLimitReached,
- min_pixel_limit_reached);
- }
- return Adaptation(
- adaptation_validation_id_,
- Adaptation::Step(Adaptation::StepType::kDecreaseResolution,
- target_pixels),
- min_pixel_limit_reached);
+ return DecreaseResolution(input_state, current_restrictions_);
}
case DegradationPreference::MAINTAIN_RESOLUTION: {
- int target_fps = GetLowerFrameRateThan(input_state_.frames_per_second());
- if (!source_restrictor_->CanDecreaseFrameRateTo(target_fps)) {
- return Adaptation(adaptation_validation_id_,
- Adaptation::Status::kLimitReached);
- }
- return Adaptation(
- adaptation_validation_id_,
- Adaptation::Step(Adaptation::StepType::kDecreaseFrameRate,
- target_fps));
+ return DecreaseFramerate(input_state, current_restrictions_);
+ }
+ case DegradationPreference::DISABLED:
+ return Adaptation::Status::kAdaptationDisabled;
+ }
+}
+
+VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseResolution(
+ const VideoStreamInputState& input_state,
+ const RestrictionsWithCounters& current_restrictions) {
+ int target_pixels =
+ GetLowerResolutionThan(input_state.frame_size_pixels().value());
+ if (!CanDecreaseResolutionTo(target_pixels, input_state,
+ current_restrictions.restrictions)) {
+ return Adaptation::Status::kLimitReached;
+ }
+ RestrictionsWithCounters new_restrictions = current_restrictions;
+ RTC_LOG(LS_INFO) << "Scaling down resolution, max pixels: " << target_pixels;
+ new_restrictions.restrictions.set_max_pixels_per_frame(
+ target_pixels != std::numeric_limits<int>::max()
+ ? absl::optional<size_t>(target_pixels)
+ : absl::nullopt);
+ new_restrictions.restrictions.set_target_pixels_per_frame(absl::nullopt);
+ ++new_restrictions.counters.resolution_adaptations;
+ return new_restrictions;
+}
+
+VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseFramerate(
+ const VideoStreamInputState& input_state,
+ const RestrictionsWithCounters& current_restrictions) const {
+ int max_frame_rate;
+ if (degradation_preference_ == DegradationPreference::MAINTAIN_RESOLUTION) {
+ max_frame_rate = GetLowerFrameRateThan(input_state.frames_per_second());
+ } else if (degradation_preference_ == DegradationPreference::BALANCED) {
+ max_frame_rate =
+ balanced_settings_.MinFps(input_state.video_codec_type(),
+ input_state.frame_size_pixels().value());
+ } else {
+ RTC_NOTREACHED();
+ max_frame_rate = GetLowerFrameRateThan(input_state.frames_per_second());
+ }
+ if (!CanDecreaseFrameRateTo(max_frame_rate,
+ current_restrictions.restrictions)) {
+ return Adaptation::Status::kLimitReached;
+ }
+ RestrictionsWithCounters new_restrictions = current_restrictions;
+ max_frame_rate = std::max(kMinFrameRateFps, max_frame_rate);
+ RTC_LOG(LS_INFO) << "Scaling down framerate: " << max_frame_rate;
+ new_restrictions.restrictions.set_max_frame_rate(
+ max_frame_rate != std::numeric_limits<int>::max()
+ ? absl::optional<double>(max_frame_rate)
+ : absl::nullopt);
+ ++new_restrictions.counters.fps_adaptations;
+ return new_restrictions;
+}
+
+VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::IncreaseResolution(
+ const VideoStreamInputState& input_state,
+ const RestrictionsWithCounters& current_restrictions) {
+ int target_pixels = input_state.frame_size_pixels().value();
+ if (current_restrictions.counters.resolution_adaptations == 1) {
+ RTC_LOG(LS_INFO) << "Removing resolution down-scaling setting.";
+ target_pixels = std::numeric_limits<int>::max();
+ }
+ target_pixels = GetHigherResolutionThan(target_pixels);
+ if (!CanIncreaseResolutionTo(target_pixels,
+ current_restrictions.restrictions)) {
+ return Adaptation::Status::kLimitReached;
+ }
+ int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels);
+ RestrictionsWithCounters new_restrictions = current_restrictions;
+ RTC_LOG(LS_INFO) << "Scaling up resolution, max pixels: "
+ << max_pixels_wanted;
+ new_restrictions.restrictions.set_max_pixels_per_frame(
+ max_pixels_wanted != std::numeric_limits<int>::max()
+ ? absl::optional<size_t>(max_pixels_wanted)
+ : absl::nullopt);
+ new_restrictions.restrictions.set_target_pixels_per_frame(
+ max_pixels_wanted != std::numeric_limits<int>::max()
+ ? absl::optional<size_t>(target_pixels)
+ : absl::nullopt);
+ --new_restrictions.counters.resolution_adaptations;
+ RTC_DCHECK_GE(new_restrictions.counters.resolution_adaptations, 0);
+ return new_restrictions;
+}
+
+VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::IncreaseFramerate(
+ const VideoStreamInputState& input_state,
+ const RestrictionsWithCounters& current_restrictions) const {
+ int max_frame_rate;
+ if (degradation_preference_ == DegradationPreference::MAINTAIN_RESOLUTION) {
+ max_frame_rate = GetHigherFrameRateThan(input_state.frames_per_second());
+ } else if (degradation_preference_ == DegradationPreference::BALANCED) {
+ max_frame_rate =
+ balanced_settings_.MaxFps(input_state.video_codec_type(),
+ input_state.frame_size_pixels().value());
+ // In BALANCED, the max_frame_rate must be checked before proceeding. This
+ // is because the MaxFps might be the current Fps and so the balanced
+ // settings may want to scale up the resolution.=
+ if (!CanIncreaseFrameRateTo(max_frame_rate,
+ current_restrictions.restrictions)) {
+ return Adaptation::Status::kLimitReached;
}
+ } else {
+ RTC_NOTREACHED();
+ max_frame_rate = GetHigherFrameRateThan(input_state.frames_per_second());
+ }
+ if (current_restrictions.counters.fps_adaptations == 1) {
+ RTC_LOG(LS_INFO) << "Removing framerate down-scaling setting.";
+ max_frame_rate = std::numeric_limits<int>::max();
+ }
+ if (!CanIncreaseFrameRateTo(max_frame_rate,
+ current_restrictions.restrictions)) {
+ return Adaptation::Status::kLimitReached;
+ }
+ RTC_LOG(LS_INFO) << "Scaling up framerate: " << max_frame_rate;
+ RestrictionsWithCounters new_restrictions = current_restrictions;
+ new_restrictions.restrictions.set_max_frame_rate(
+ max_frame_rate != std::numeric_limits<int>::max()
+ ? absl::optional<double>(max_frame_rate)
+ : absl::nullopt);
+ --new_restrictions.counters.fps_adaptations;
+ RTC_DCHECK_GE(new_restrictions.counters.fps_adaptations, 0);
+ return new_restrictions;
+}
+
+Adaptation VideoStreamAdapter::GetAdaptDownResolution() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ VideoStreamInputState input_state = input_state_provider_->InputState();
+ switch (degradation_preference_) {
case DegradationPreference::DISABLED:
+ return RestrictionsOrStateToAdaptation(
+ Adaptation::Status::kAdaptationDisabled, input_state);
+ case DegradationPreference::MAINTAIN_RESOLUTION:
+ return RestrictionsOrStateToAdaptation(Adaptation::Status::kLimitReached,
+ input_state);
+ case DegradationPreference::MAINTAIN_FRAMERATE:
+ return GetAdaptationDown();
+ case DegradationPreference::BALANCED: {
+ return RestrictionsOrStateToAdaptation(
+ GetAdaptDownResolutionStepForBalanced(input_state), input_state);
+ }
+ default:
RTC_NOTREACHED();
- return Adaptation(adaptation_validation_id_,
- Adaptation::Status::kLimitReached);
}
}
-VideoSourceRestrictions VideoStreamAdapter::PeekNextRestrictions(
- const Adaptation& adaptation) const {
- RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_);
- if (adaptation.status() != Adaptation::Status::kValid)
- return source_restrictor_->source_restrictions();
- VideoSourceRestrictor restrictor_copy = *source_restrictor_;
- restrictor_copy.ApplyAdaptationStep(adaptation.step(),
- degradation_preference_);
- return restrictor_copy.source_restrictions();
+VideoStreamAdapter::RestrictionsOrState
+VideoStreamAdapter::GetAdaptDownResolutionStepForBalanced(
+ const VideoStreamInputState& input_state) const {
+ // Adapt twice if the first adaptation did not decrease resolution.
+ auto first_step = GetAdaptationDownStep(input_state);
+ if (!absl::holds_alternative<RestrictionsWithCounters>(first_step)) {
+ return first_step;
+ }
+ auto first_restrictions = absl::get<RestrictionsWithCounters>(first_step);
+ if (first_restrictions.counters.resolution_adaptations >
+ current_restrictions_.counters.resolution_adaptations) {
+ return first_step;
+ }
+ // We didn't decrease resolution so force it; amend a resolution resuction
+ // to the existing framerate reduction in |first_restrictions|.
+ auto second_step = DecreaseResolution(input_state, first_restrictions);
+ if (absl::holds_alternative<RestrictionsWithCounters>(second_step)) {
+ return second_step;
+ }
+ // If the second step was not successful then settle for the first one.
+ return first_step;
}
-void VideoStreamAdapter::ApplyAdaptation(const Adaptation& adaptation) {
+void VideoStreamAdapter::ApplyAdaptation(
+ const Adaptation& adaptation,
+ rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_);
if (adaptation.status() != Adaptation::Status::kValid)
return;
// Remember the input pixels and fps of this adaptation. Used to avoid
// adapting again before this adaptation has had an effect.
- last_adaptation_request_.emplace(AdaptationRequest{
- input_state_.frame_size_pixels().value(),
- input_state_.frames_per_second(),
- AdaptationRequest::GetModeFromAdaptationAction(adaptation.step().type)});
- // Adapt!
- source_restrictor_->ApplyAdaptationStep(adaptation.step(),
- degradation_preference_);
+ if (DidIncreaseResolution(current_restrictions_.restrictions,
+ adaptation.restrictions())) {
+ awaiting_frame_size_change_.emplace(
+ true, adaptation.input_state().frame_size_pixels().value());
+ } else if (DidDecreaseResolution(current_restrictions_.restrictions,
+ adaptation.restrictions())) {
+ awaiting_frame_size_change_.emplace(
+ false, adaptation.input_state().frame_size_pixels().value());
+ } else {
+ awaiting_frame_size_change_ = absl::nullopt;
+ }
+ current_restrictions_ = {adaptation.restrictions(), adaptation.counters()};
+ BroadcastVideoRestrictionsUpdate(adaptation.input_state(), resource);
+}
+
+Adaptation VideoStreamAdapter::GetAdaptationTo(
+ const VideoAdaptationCounters& counters,
+ const VideoSourceRestrictions& restrictions) {
+ // Adapts up/down from the current levels so counters are equal.
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ VideoStreamInputState input_state = input_state_provider_->InputState();
+ return Adaptation(adaptation_validation_id_, restrictions, counters,
+ input_state, false);
+}
+
+void VideoStreamAdapter::BroadcastVideoRestrictionsUpdate(
+ const VideoStreamInputState& input_state,
+ const rtc::scoped_refptr<Resource>& resource) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ VideoSourceRestrictions filtered = FilterRestrictionsByDegradationPreference(
+ source_restrictions(), degradation_preference_);
+ if (last_filtered_restrictions_ == filtered) {
+ return;
+ }
+ for (auto* restrictions_listener : restrictions_listeners_) {
+ restrictions_listener->OnVideoSourceRestrictionsUpdated(
+ filtered, current_restrictions_.counters, resource,
+ source_restrictions());
+ }
+ for (auto* adaptation_listener : adaptation_listeners_) {
+ adaptation_listener->OnAdaptationApplied(
+ input_state, last_video_source_restrictions_,
+ current_restrictions_.restrictions, resource);
+ }
+ last_video_source_restrictions_ = current_restrictions_.restrictions;
+ last_filtered_restrictions_ = filtered;
}
+bool VideoStreamAdapter::HasSufficientInputForAdaptation(
+ const VideoStreamInputState& input_state) const {
+ return input_state.HasInputFrameSizeAndFramesPerSecond() &&
+ (degradation_preference_ !=
+ DegradationPreference::MAINTAIN_RESOLUTION ||
+ input_state.frames_per_second() >= kMinFrameRateFps);
+}
+
+VideoStreamAdapter::AwaitingFrameSizeChange::AwaitingFrameSizeChange(
+ bool pixels_increased,
+ int frame_size_pixels)
+ : pixels_increased(pixels_increased),
+ frame_size_pixels(frame_size_pixels) {}
+
} // namespace webrtc
diff --git a/call/adaptation/video_stream_adapter.h b/call/adaptation/video_stream_adapter.h
index a2dea157bb..27699e6aa8 100644
--- a/call/adaptation/video_stream_adapter.h
+++ b/call/adaptation/video_stream_adapter.h
@@ -12,18 +12,42 @@
#define CALL_ADAPTATION_VIDEO_STREAM_ADAPTER_H_
#include <memory>
+#include <utility>
+#include <vector>
#include "absl/types/optional.h"
+#include "absl/types/variant.h"
+#include "api/adaptation/resource.h"
#include "api/rtp_parameters.h"
#include "api/video/video_adaptation_counters.h"
-#include "call/adaptation/resource.h"
+#include "call/adaptation/adaptation_constraint.h"
+#include "call/adaptation/adaptation_listener.h"
+#include "call/adaptation/degradation_preference_provider.h"
#include "call/adaptation/video_source_restrictions.h"
#include "call/adaptation/video_stream_input_state.h"
+#include "call/adaptation/video_stream_input_state_provider.h"
#include "modules/video_coding/utility/quality_scaler.h"
#include "rtc_base/experiments/balanced_degradation_settings.h"
+#include "rtc_base/thread_annotations.h"
namespace webrtc {
+// The listener is responsible for carrying out the reconfiguration of the video
+// source such that the VideoSourceRestrictions are fulfilled.
+class VideoSourceRestrictionsListener {
+ public:
+ virtual ~VideoSourceRestrictionsListener();
+
+ // The |restrictions| are filtered by degradation preference but not the
+ // |adaptation_counters|, which are currently only reported for legacy stats
+ // calculation purposes.
+ virtual void OnVideoSourceRestrictionsUpdated(
+ VideoSourceRestrictions restrictions,
+ const VideoAdaptationCounters& adaptation_counters,
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) = 0;
+};
+
class VideoStreamAdapter;
extern const int kMinFrameRateFps;
@@ -32,15 +56,11 @@ VideoSourceRestrictions FilterRestrictionsByDegradationPreference(
VideoSourceRestrictions source_restrictions,
DegradationPreference degradation_preference);
-VideoAdaptationCounters FilterVideoAdaptationCountersByDegradationPreference(
- VideoAdaptationCounters counters,
- DegradationPreference degradation_preference);
-
int GetHigherResolutionThan(int pixel_count);
-// Represents one step that the VideoStreamAdapter can take when adapting the
-// VideoSourceRestrictions up or down. Or, if adaptation is not valid, provides
-// a Status code indicating the reason for not adapting.
+// Either represents the next VideoSourceRestrictions the VideoStreamAdapter
+// will take, or provides a Status code indicating the reason for not adapting
+// if the adaptation is not valid.
class Adaptation final {
public:
enum class Status {
@@ -54,51 +74,49 @@ class Adaptation final {
// adaptation has not yet been reflected in the input resolution or frame
// rate; adaptation is refused to avoid "double-adapting".
kAwaitingPreviousAdaptation,
+ // Not enough input.
+ kInsufficientInput,
+ // Adaptation disabled via degradation preference.
+ kAdaptationDisabled,
+ // Adaptation up was rejected by a VideoAdaptationConstraint.
+ kRejectedByConstraint,
};
- // The status of this Adaptation. To find out how this Adaptation affects
- // VideoSourceRestrictions, see VideoStreamAdapter::PeekNextRestrictions().
+ static const char* StatusToString(Status status);
+
Status status() const;
+ const VideoStreamInputState& input_state() const;
+ const VideoSourceRestrictions& restrictions() const;
+ const VideoAdaptationCounters& counters() const;
// Used for stats reporting.
bool min_pixel_limit_reached() const;
private:
- // The adapter needs to know about step type and step target in order to
- // construct and perform an Adaptation, which is a detail we do not want to
- // expose to the public interface.
friend class VideoStreamAdapter;
- enum class StepType {
- kIncreaseResolution,
- kDecreaseResolution,
- kIncreaseFrameRate,
- kDecreaseFrameRate,
- };
-
- struct Step {
- Step(StepType type, int target);
- const StepType type;
- const int target; // Pixel or frame rate depending on |type|.
- };
-
- // Constructs with a valid adaptation Step. Status is kValid.
- Adaptation(int validation_id, Step step);
- Adaptation(int validation_id, Step step, bool min_pixel_limit_reached);
+ // Constructs with a valid adaptation. Status is kValid.
+ Adaptation(int validation_id,
+ VideoSourceRestrictions restrictions,
+ VideoAdaptationCounters counters,
+ VideoStreamInputState input_state,
+ bool min_pixel_limit_reached);
// Constructor when adaptation is not valid. Status MUST NOT be kValid.
- Adaptation(int validation_id, Status invalid_status);
Adaptation(int validation_id,
Status invalid_status,
+ VideoStreamInputState input_state,
bool min_pixel_limit_reached);
- const Step& step() const; // Only callable if |status_| is kValid.
-
// An Adaptation can become invalidated if the state of VideoStreamAdapter is
// modified before the Adaptation is applied. To guard against this, this ID
// has to match VideoStreamAdapter::adaptation_validation_id_ when applied.
+ // TODO(https://crbug.com/webrtc/11700): Remove the validation_id_.
const int validation_id_;
const Status status_;
- const absl::optional<Step> step_; // Only present if |status_| is kValid.
const bool min_pixel_limit_reached_;
+ // Input state when adaptation was made.
+ const VideoStreamInputState input_state_;
+ const VideoSourceRestrictions restrictions_;
+ const VideoAdaptationCounters counters_;
};
// Owns the VideoSourceRestriction for a single stream and is responsible for
@@ -109,77 +127,144 @@ class Adaptation final {
// 3. Modify the stream's restrictions in one of the valid ways.
class VideoStreamAdapter {
public:
- VideoStreamAdapter();
+ explicit VideoStreamAdapter(
+ VideoStreamInputStateProvider* input_state_provider);
~VideoStreamAdapter();
VideoSourceRestrictions source_restrictions() const;
const VideoAdaptationCounters& adaptation_counters() const;
- // TODO(hbos): Can we get rid of any external dependencies on
- // BalancedDegradationPreference? How the adaptor generates possible next
- // steps for adaptation should be an implementation detail. Can the relevant
- // information be inferred from AdaptationTargetOrReason?
- const BalancedDegradationSettings& balanced_settings() const;
void ClearRestrictions();
+ void AddRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener);
+ void RemoveRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener);
+ void AddAdaptationListener(AdaptationListener* adaptation_listener);
+ void RemoveAdaptationListener(AdaptationListener* adaptation_listener);
+ void AddAdaptationConstraint(AdaptationConstraint* adaptation_constraint);
+ void RemoveAdaptationConstraint(AdaptationConstraint* adaptation_constraint);
+
// TODO(hbos): Setting the degradation preference should not clear
// restrictions! This is not defined in the spec and is unexpected, there is a
// tiny risk that people would discover and rely on this behavior.
void SetDegradationPreference(DegradationPreference degradation_preference);
- // The adaptaiton logic depends on these inputs.
- void SetInput(VideoStreamInputState input_state);
// Returns an adaptation that we are guaranteed to be able to apply, or a
// status code indicating the reason why we cannot adapt.
- Adaptation GetAdaptationUp() const;
- Adaptation GetAdaptationDown() const;
- // Returns the restrictions that result from applying the adaptation, without
- // actually applying it. If the adaptation is not valid, current restrictions
- // are returned.
- VideoSourceRestrictions PeekNextRestrictions(
- const Adaptation& adaptation) const;
- // Updates source_restrictions() based according to the Adaptation.
- void ApplyAdaptation(const Adaptation& adaptation);
+ // TODO(https://crbug.com/webrtc/11771) |resource| is needed by the
+ // AdaptationConstraint resources. Remove this parameter when it's removed.
+ Adaptation GetAdaptationUp(rtc::scoped_refptr<Resource> resource);
+ Adaptation GetAdaptationDown();
+ Adaptation GetAdaptationTo(const VideoAdaptationCounters& counters,
+ const VideoSourceRestrictions& restrictions);
+ // Tries to adapt the resolution one step. This is used for initial frame
+ // dropping. Does nothing if the degradation preference is not BALANCED or
+ // MAINTAIN_FRAMERATE. In the case of BALANCED, it will try twice to reduce
+ // the resolution. If it fails twice it gives up.
+ Adaptation GetAdaptDownResolution();
- private:
- class VideoSourceRestrictor;
-
- // The input frame rate and resolution at the time of an adaptation in the
- // direction described by |mode_| (up or down).
- // TODO(https://crbug.com/webrtc/11393): Can this be renamed? Can this be
- // merged with AdaptationTarget?
- struct AdaptationRequest {
- // The pixel count produced by the source at the time of the adaptation.
- int input_pixel_count_;
- // Framerate received from the source at the time of the adaptation.
- int framerate_fps_;
- // Indicates if request was to adapt up or down.
- enum class Mode { kAdaptUp, kAdaptDown } mode_;
-
- // This is a static method rather than an anonymous namespace function due
- // to namespace visiblity.
- static Mode GetModeFromAdaptationAction(Adaptation::StepType step_type);
+ // Updates source_restrictions() the Adaptation.
+ void ApplyAdaptation(const Adaptation& adaptation,
+ rtc::scoped_refptr<Resource> resource);
+
+ struct RestrictionsWithCounters {
+ VideoSourceRestrictions restrictions;
+ VideoAdaptationCounters counters;
};
- // Owner and modifier of the VideoSourceRestriction of this stream adaptor.
- const std::unique_ptr<VideoSourceRestrictor> source_restrictor_;
+ private:
+ void BroadcastVideoRestrictionsUpdate(
+ const VideoStreamInputState& input_state,
+ const rtc::scoped_refptr<Resource>& resource);
+
+ bool HasSufficientInputForAdaptation(const VideoStreamInputState& input_state)
+ const RTC_RUN_ON(&sequence_checker_);
+
+ using RestrictionsOrState =
+ absl::variant<RestrictionsWithCounters, Adaptation::Status>;
+ RestrictionsOrState GetAdaptationUpStep(
+ const VideoStreamInputState& input_state) const
+ RTC_RUN_ON(&sequence_checker_);
+ RestrictionsOrState GetAdaptationDownStep(
+ const VideoStreamInputState& input_state) const
+ RTC_RUN_ON(&sequence_checker_);
+ RestrictionsOrState GetAdaptDownResolutionStepForBalanced(
+ const VideoStreamInputState& input_state) const
+ RTC_RUN_ON(&sequence_checker_);
+
+ // TODO(https://crbug.com/webrtc/11771) |resource| is needed by the
+ // AdaptationConstraint resources. Remove this parameter when it's removed.
+ Adaptation GetAdaptationUp(const VideoStreamInputState& input_state,
+ rtc::scoped_refptr<Resource> resource) const
+ RTC_RUN_ON(&sequence_checker_);
+ Adaptation GetAdaptationDown(const VideoStreamInputState& input_state) const
+ RTC_RUN_ON(&sequence_checker_);
+
+ static RestrictionsOrState DecreaseResolution(
+ const VideoStreamInputState& input_state,
+ const RestrictionsWithCounters& current_restrictions);
+ static RestrictionsOrState IncreaseResolution(
+ const VideoStreamInputState& input_state,
+ const RestrictionsWithCounters& current_restrictions);
+ // Framerate methods are member functions because they need internal state
+ // if the degradation preference is BALANCED.
+ RestrictionsOrState DecreaseFramerate(
+ const VideoStreamInputState& input_state,
+ const RestrictionsWithCounters& current_restrictions) const
+ RTC_RUN_ON(&sequence_checker_);
+ RestrictionsOrState IncreaseFramerate(
+ const VideoStreamInputState& input_state,
+ const RestrictionsWithCounters& current_restrictions) const
+ RTC_RUN_ON(&sequence_checker_);
+
+ struct RestrictionsOrStateVisitor;
+ Adaptation RestrictionsOrStateToAdaptation(
+ RestrictionsOrState step_or_state,
+ const VideoStreamInputState& input_state) const
+ RTC_RUN_ON(&sequence_checker_);
+
+ SequenceChecker sequence_checker_ RTC_GUARDED_BY(&sequence_checker_);
+ // Gets the input state which is the basis of all adaptations.
+ // Thread safe.
+ VideoStreamInputStateProvider* input_state_provider_;
// Decides the next adaptation target in DegradationPreference::BALANCED.
const BalancedDegradationSettings balanced_settings_;
// To guard against applying adaptations that have become invalidated, an
// Adaptation that is applied has to have a matching validation ID.
- int adaptation_validation_id_;
+ int adaptation_validation_id_ RTC_GUARDED_BY(&sequence_checker_);
// When deciding the next target up or down, different strategies are used
// depending on the DegradationPreference.
// https://w3c.github.io/mst-content-hint/#dom-rtcdegradationpreference
- DegradationPreference degradation_preference_;
- VideoStreamInputState input_state_;
- // The input frame rate, resolution and adaptation direction of the last
- // ApplyAdaptationTarget(). Used to avoid adapting twice if a recent
- // adaptation has not had an effect on the input frame rate or resolution yet.
+ DegradationPreference degradation_preference_
+ RTC_GUARDED_BY(&sequence_checker_);
+ // Used to avoid adapting twice. Stores the resolution at the time of the last
+ // adaptation.
// TODO(hbos): Can we implement a more general "cooldown" mechanism of
// resources intead? If we already have adapted it seems like we should wait
// a while before adapting again, so that we are not acting on usage
// measurements that are made obsolete/unreliable by an "ongoing" adaptation.
- absl::optional<AdaptationRequest> last_adaptation_request_;
+ struct AwaitingFrameSizeChange {
+ AwaitingFrameSizeChange(bool pixels_increased, int frame_size);
+ const bool pixels_increased;
+ const int frame_size_pixels;
+ };
+ absl::optional<AwaitingFrameSizeChange> awaiting_frame_size_change_
+ RTC_GUARDED_BY(&sequence_checker_);
+ // The previous restrictions value. Starts as unrestricted.
+ VideoSourceRestrictions last_video_source_restrictions_
+ RTC_GUARDED_BY(&sequence_checker_);
+ VideoSourceRestrictions last_filtered_restrictions_
+ RTC_GUARDED_BY(&sequence_checker_);
+
+ std::vector<VideoSourceRestrictionsListener*> restrictions_listeners_
+ RTC_GUARDED_BY(&sequence_checker_);
+ std::vector<AdaptationListener*> adaptation_listeners_
+ RTC_GUARDED_BY(&sequence_checker_);
+ std::vector<AdaptationConstraint*> adaptation_constraints_
+ RTC_GUARDED_BY(&sequence_checker_);
+
+ RestrictionsWithCounters current_restrictions_
+ RTC_GUARDED_BY(&sequence_checker_);
};
} // namespace webrtc
diff --git a/call/adaptation/video_stream_adapter_unittest.cc b/call/adaptation/video_stream_adapter_unittest.cc
index 55d604e917..a6c8f6ece3 100644
--- a/call/adaptation/video_stream_adapter_unittest.cc
+++ b/call/adaptation/video_stream_adapter_unittest.cc
@@ -14,12 +14,18 @@
#include <utility>
#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
#include "api/video/video_adaptation_reason.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_config.h"
+#include "call/adaptation/adaptation_constraint.h"
+#include "call/adaptation/adaptation_listener.h"
#include "call/adaptation/encoder_settings.h"
+#include "call/adaptation/test/fake_adaptation_listener.h"
+#include "call/adaptation/test/fake_resource.h"
#include "call/adaptation/video_source_restrictions.h"
+#include "call/adaptation/video_stream_input_state.h"
#include "rtc_base/string_encode.h"
#include "test/field_trial.h"
#include "test/gmock.h"
@@ -28,6 +34,11 @@
namespace webrtc {
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Return;
+using ::testing::SaveArg;
+
namespace {
const int kBalancedHighResolutionPixels = 1280 * 720;
@@ -49,16 +60,27 @@ std::string BalancedFieldTrialConfig() {
rtc::ToString(kBalancedHighFrameRateFps) + "/";
}
-VideoStreamInputState InputState(int input_pixels,
- int input_fps,
- int min_pixels_per_frame) {
- VideoStreamInputState input_state;
- input_state.set_has_input(true);
- input_state.set_frame_size_pixels(input_pixels);
- input_state.set_frames_per_second(input_fps);
- input_state.set_min_pixels_per_frame(min_pixels_per_frame);
- return input_state;
-}
+class FakeVideoStreamInputStateProvider : public VideoStreamInputStateProvider {
+ public:
+ FakeVideoStreamInputStateProvider()
+ : VideoStreamInputStateProvider(nullptr) {}
+ virtual ~FakeVideoStreamInputStateProvider() = default;
+
+ void SetInputState(int input_pixels,
+ int input_fps,
+ int min_pixels_per_frame) {
+ VideoStreamInputState input_state;
+ input_state.set_has_input(true);
+ input_state.set_frame_size_pixels(input_pixels);
+ input_state.set_frames_per_second(input_fps);
+ input_state.set_min_pixels_per_frame(min_pixels_per_frame);
+ fake_input_state_ = input_state;
+ }
+ VideoStreamInputState InputState() override { return fake_input_state_; }
+
+ private:
+ VideoStreamInputState fake_input_state_;
+};
// Responsible for adjusting the inputs to VideoStreamAdapter (SetInput), such
// as pixels and frame rate, according to the most recent source restrictions.
@@ -68,15 +90,16 @@ VideoStreamInputState InputState(int input_pixels,
class FakeVideoStream {
public:
FakeVideoStream(VideoStreamAdapter* adapter,
+ FakeVideoStreamInputStateProvider* provider,
int input_pixels,
int input_fps,
int min_pixels_per_frame)
: adapter_(adapter),
+ provider_(provider),
input_pixels_(input_pixels),
input_fps_(input_fps),
min_pixels_per_frame_(min_pixels_per_frame) {
- adapter_->SetInput(
- InputState(input_pixels_, input_fps_, min_pixels_per_frame_));
+ provider_->SetInputState(input_pixels_, input_fps_, min_pixels_per_frame_);
}
int input_pixels() const { return input_pixels_; }
@@ -85,7 +108,7 @@ class FakeVideoStream {
// Performs ApplyAdaptation() followed by SetInput() with input pixels and
// frame rate adjusted according to the resulting restrictions.
void ApplyAdaptation(Adaptation adaptation) {
- adapter_->ApplyAdaptation(adaptation);
+ adapter_->ApplyAdaptation(adaptation, nullptr);
// Update input pixels and fps according to the resulting restrictions.
auto restrictions = adapter_->source_restrictions();
if (restrictions.target_pixels_per_frame().has_value()) {
@@ -99,201 +122,253 @@ class FakeVideoStream {
if (restrictions.max_frame_rate().has_value()) {
input_fps_ = restrictions.max_frame_rate().value();
}
- adapter_->SetInput(
- InputState(input_pixels_, input_fps_, min_pixels_per_frame_));
+ provider_->SetInputState(input_pixels_, input_fps_, min_pixels_per_frame_);
}
private:
VideoStreamAdapter* adapter_;
+ FakeVideoStreamInputStateProvider* provider_;
int input_pixels_;
int input_fps_;
int min_pixels_per_frame_;
};
+class FakeVideoStreamAdapterListner : public VideoSourceRestrictionsListener {
+ public:
+ void OnVideoSourceRestrictionsUpdated(
+ VideoSourceRestrictions restrictions,
+ const VideoAdaptationCounters& adaptation_counters,
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) {
+ calls_++;
+ last_restrictions_ = unfiltered_restrictions;
+ }
+
+ int calls() const { return calls_; }
+
+ VideoSourceRestrictions last_restrictions() const {
+ return last_restrictions_;
+ }
+
+ private:
+ int calls_ = 0;
+ VideoSourceRestrictions last_restrictions_;
+};
+
+class MockAdaptationListener : public AdaptationListener {
+ public:
+ MOCK_METHOD(void,
+ OnAdaptationApplied,
+ (const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource),
+ (override));
+};
+
+class MockAdaptationConstraint : public AdaptationConstraint {
+ public:
+ MOCK_METHOD(bool,
+ IsAdaptationUpAllowed,
+ (const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource),
+ (const, override));
+
+ // MOCK_METHOD(std::string, Name, (), (const, override));
+ std::string Name() const override { return "MockAdaptationConstraint"; }
+};
+
} // namespace
-TEST(VideoStreamAdapterTest, NoRestrictionsByDefault) {
- VideoStreamAdapter adapter;
- EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions());
- EXPECT_EQ(0, adapter.adaptation_counters().Total());
+class VideoStreamAdapterTest : public ::testing::Test {
+ public:
+ VideoStreamAdapterTest()
+ : field_trials_(BalancedFieldTrialConfig()),
+ input_state_provider_(),
+ resource_(FakeResource::Create("FakeResource")),
+ adapter_(&input_state_provider_) {}
+
+ protected:
+ webrtc::test::ScopedFieldTrials field_trials_;
+ FakeVideoStreamInputStateProvider input_state_provider_;
+ rtc::scoped_refptr<Resource> resource_;
+ VideoStreamAdapter adapter_;
+};
+
+TEST_F(VideoStreamAdapterTest, NoRestrictionsByDefault) {
+ EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions());
+ EXPECT_EQ(0, adapter_.adaptation_counters().Total());
}
-TEST(VideoStreamAdapterTest, MaintainFramerate_DecreasesPixelsToThreeFifths) {
+TEST_F(VideoStreamAdapterTest, MaintainFramerate_DecreasesPixelsToThreeFifths) {
const int kInputPixels = 1280 * 720;
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
- adapter.SetInput(InputState(kInputPixels, 30, kDefaultMinPixelsPerFrame));
- Adaptation adaptation = adapter.GetAdaptationDown();
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ input_state_provider_.SetInputState(kInputPixels, 30,
+ kDefaultMinPixelsPerFrame);
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
EXPECT_FALSE(adaptation.min_pixel_limit_reached());
- adapter.ApplyAdaptation(adaptation);
+ adapter_.ApplyAdaptation(adaptation, nullptr);
EXPECT_EQ(static_cast<size_t>((kInputPixels * 3) / 5),
- adapter.source_restrictions().max_pixels_per_frame());
+ adapter_.source_restrictions().max_pixels_per_frame());
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().target_pixels_per_frame());
- EXPECT_EQ(absl::nullopt, adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations);
+ adapter_.source_restrictions().target_pixels_per_frame());
+ EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
}
-TEST(VideoStreamAdapterTest, MaintainFramerate_DecreasesPixelsToLimitReached) {
+TEST_F(VideoStreamAdapterTest,
+ MaintainFramerate_DecreasesPixelsToLimitReached) {
const int kMinPixelsPerFrame = 640 * 480;
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
- adapter.SetInput(InputState(kMinPixelsPerFrame + 1, 30, kMinPixelsPerFrame));
+
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ input_state_provider_.SetInputState(kMinPixelsPerFrame + 1, 30,
+ kMinPixelsPerFrame);
// Even though we are above kMinPixelsPerFrame, because adapting down would
// have exceeded the limit, we are said to have reached the limit already.
// This differs from the frame rate adaptation logic, which would have clamped
// to the limit in the first step and reported kLimitReached in the second
// step.
- Adaptation adaptation = adapter.GetAdaptationDown();
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kLimitReached, adaptation.status());
EXPECT_TRUE(adaptation.min_pixel_limit_reached());
}
-TEST(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToFiveThirds) {
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
- FakeVideoStream fake_stream(&adapter, 1280 * 720, 30,
+TEST_F(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToFiveThirds) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
kDefaultMinPixelsPerFrame);
// Go down twice, ensuring going back up is still a restricted resolution.
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
- EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations);
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations);
int input_pixels = fake_stream.input_pixels();
// Go up once. The target is 5/3 and the max is 12/5 of the target.
const int target = (input_pixels * 5) / 3;
- fake_stream.ApplyAdaptation(adapter.GetAdaptationUp());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_));
EXPECT_EQ(static_cast<size_t>((target * 12) / 5),
- adapter.source_restrictions().max_pixels_per_frame());
+ adapter_.source_restrictions().max_pixels_per_frame());
EXPECT_EQ(static_cast<size_t>(target),
- adapter.source_restrictions().target_pixels_per_frame());
- EXPECT_EQ(absl::nullopt, adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations);
+ adapter_.source_restrictions().target_pixels_per_frame());
+ EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
}
-TEST(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToUnrestricted) {
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
- FakeVideoStream fake_stream(&adapter, 1280 * 720, 30,
+TEST_F(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToUnrestricted) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
kDefaultMinPixelsPerFrame);
// We are unrestricted by default and should not be able to adapt up.
EXPECT_EQ(Adaptation::Status::kLimitReached,
- adapter.GetAdaptationUp().status());
+ adapter_.GetAdaptationUp(resource_).status());
// If we go down once and then back up we should not have any restrictions.
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
- EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations);
- fake_stream.ApplyAdaptation(adapter.GetAdaptationUp());
- EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions());
- EXPECT_EQ(0, adapter.adaptation_counters().Total());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_));
+ EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions());
+ EXPECT_EQ(0, adapter_.adaptation_counters().Total());
}
-TEST(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToTwoThirds) {
+TEST_F(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToTwoThirds) {
const int kInputFps = 30;
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
- adapter.SetInput(
- InputState(1280 * 720, kInputFps, kDefaultMinPixelsPerFrame));
- Adaptation adaptation = adapter.GetAdaptationDown();
+
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ input_state_provider_.SetInputState(1280 * 720, kInputFps,
+ kDefaultMinPixelsPerFrame);
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
- adapter.ApplyAdaptation(adaptation);
+ adapter_.ApplyAdaptation(adaptation, nullptr);
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().max_pixels_per_frame());
+ adapter_.source_restrictions().max_pixels_per_frame());
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().target_pixels_per_frame());
+ adapter_.source_restrictions().target_pixels_per_frame());
EXPECT_EQ(static_cast<double>((kInputFps * 2) / 3),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
}
-TEST(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToLimitReached) {
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
- FakeVideoStream fake_stream(&adapter, 1280 * 720, kMinFrameRateFps + 1,
- kDefaultMinPixelsPerFrame);
+TEST_F(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToLimitReached) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720,
+ kMinFrameRateFps + 1, kDefaultMinPixelsPerFrame);
// If we are not yet at the limit and the next step would exceed it, the step
// is clamped such that we end up exactly on the limit.
- Adaptation adaptation = adapter.GetAdaptationDown();
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
EXPECT_EQ(static_cast<double>(kMinFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
// Having reached the limit, the next adaptation down is not valid.
EXPECT_EQ(Adaptation::Status::kLimitReached,
- adapter.GetAdaptationDown().status());
+ adapter_.GetAdaptationDown().status());
}
-TEST(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToThreeHalves) {
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
- FakeVideoStream fake_stream(&adapter, 1280 * 720, 30,
+TEST_F(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToThreeHalves) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
kDefaultMinPixelsPerFrame);
// Go down twice, ensuring going back up is still a restricted frame rate.
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
- EXPECT_EQ(2, adapter.adaptation_counters().fps_adaptations);
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ EXPECT_EQ(2, adapter_.adaptation_counters().fps_adaptations);
int input_fps = fake_stream.input_fps();
// Go up once. The target is 3/2 of the input.
- Adaptation adaptation = adapter.GetAdaptationUp();
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().max_pixels_per_frame());
+ adapter_.source_restrictions().max_pixels_per_frame());
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().target_pixels_per_frame());
+ adapter_.source_restrictions().target_pixels_per_frame());
EXPECT_EQ(static_cast<double>((input_fps * 3) / 2),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
}
-TEST(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToUnrestricted) {
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
- FakeVideoStream fake_stream(&adapter, 1280 * 720, 30,
+TEST_F(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToUnrestricted) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
kDefaultMinPixelsPerFrame);
// We are unrestricted by default and should not be able to adapt up.
EXPECT_EQ(Adaptation::Status::kLimitReached,
- adapter.GetAdaptationUp().status());
+ adapter_.GetAdaptationUp(resource_).status());
// If we go down once and then back up we should not have any restrictions.
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
- fake_stream.ApplyAdaptation(adapter.GetAdaptationUp());
- EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions());
- EXPECT_EQ(0, adapter.adaptation_counters().Total());
-}
-
-TEST(VideoStreamAdapterTest, Balanced_DecreaseFrameRate) {
- webrtc::test::ScopedFieldTrials balanced_field_trials(
- BalancedFieldTrialConfig());
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::BALANCED);
- adapter.SetInput(InputState(kBalancedMediumResolutionPixels,
- kBalancedHighFrameRateFps,
- kDefaultMinPixelsPerFrame));
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_));
+ EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions());
+ EXPECT_EQ(0, adapter_.adaptation_counters().Total());
+}
+
+TEST_F(VideoStreamAdapterTest, Balanced_DecreaseFrameRate) {
+ adapter_.SetDegradationPreference(DegradationPreference::BALANCED);
+ input_state_provider_.SetInputState(kBalancedMediumResolutionPixels,
+ kBalancedHighFrameRateFps,
+ kDefaultMinPixelsPerFrame);
// If our frame rate is higher than the frame rate associated with our
// resolution we should try to adapt to the frame rate associated with our
// resolution: kBalancedMediumFrameRateFps.
- Adaptation adaptation = adapter.GetAdaptationDown();
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
- adapter.ApplyAdaptation(adaptation);
+ adapter_.ApplyAdaptation(adaptation, nullptr);
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().max_pixels_per_frame());
+ adapter_.source_restrictions().max_pixels_per_frame());
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().target_pixels_per_frame());
+ adapter_.source_restrictions().target_pixels_per_frame());
EXPECT_EQ(static_cast<double>(kBalancedMediumFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(0, adapter.adaptation_counters().resolution_adaptations);
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
-}
-
-TEST(VideoStreamAdapterTest, Balanced_DecreaseResolution) {
- webrtc::test::ScopedFieldTrials balanced_field_trials(
- BalancedFieldTrialConfig());
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::BALANCED);
- FakeVideoStream fake_stream(&adapter, kBalancedHighResolutionPixels,
- kBalancedHighFrameRateFps,
- kDefaultMinPixelsPerFrame);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
+}
+
+TEST_F(VideoStreamAdapterTest, Balanced_DecreaseResolution) {
+ adapter_.SetDegradationPreference(DegradationPreference::BALANCED);
+ FakeVideoStream fake_stream(
+ &adapter_, &input_state_provider_, kBalancedHighResolutionPixels,
+ kBalancedHighFrameRateFps, kDefaultMinPixelsPerFrame);
// If we are not below the current resolution's frame rate limit, we should
// adapt resolution according to "maintain-framerate" logic (three fifths).
//
@@ -303,35 +378,35 @@ TEST(VideoStreamAdapterTest, Balanced_DecreaseResolution) {
// does prevent the source from going higher, though, so it's technically not
// a NO-OP.
{
- Adaptation adaptation = adapter.GetAdaptationDown();
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
}
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().max_pixels_per_frame());
+ adapter_.source_restrictions().max_pixels_per_frame());
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().target_pixels_per_frame());
+ adapter_.source_restrictions().target_pixels_per_frame());
EXPECT_EQ(static_cast<double>(kBalancedHighFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(0, adapter.adaptation_counters().resolution_adaptations);
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
// Verify "maintain-framerate" logic the second time we adapt: Frame rate
// restrictions remains the same and resolution goes down.
{
- Adaptation adaptation = adapter.GetAdaptationDown();
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
}
constexpr size_t kReducedPixelsFirstStep =
static_cast<size_t>((kBalancedHighResolutionPixels * 3) / 5);
EXPECT_EQ(kReducedPixelsFirstStep,
- adapter.source_restrictions().max_pixels_per_frame());
+ adapter_.source_restrictions().max_pixels_per_frame());
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().target_pixels_per_frame());
+ adapter_.source_restrictions().target_pixels_per_frame());
EXPECT_EQ(static_cast<double>(kBalancedHighFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations);
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
// If we adapt again, because the balanced settings' proposed frame rate is
// still kBalancedHighFrameRateFps, "maintain-framerate" will trigger again.
static_assert(kReducedPixelsFirstStep > kBalancedMediumResolutionPixels,
@@ -339,18 +414,18 @@ TEST(VideoStreamAdapterTest, Balanced_DecreaseResolution) {
"balanced setting resolution");
constexpr size_t kReducedPixelsSecondStep = (kReducedPixelsFirstStep * 3) / 5;
{
- Adaptation adaptation = adapter.GetAdaptationDown();
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
}
EXPECT_EQ(kReducedPixelsSecondStep,
- adapter.source_restrictions().max_pixels_per_frame());
+ adapter_.source_restrictions().max_pixels_per_frame());
EXPECT_EQ(absl::nullopt,
- adapter.source_restrictions().target_pixels_per_frame());
+ adapter_.source_restrictions().target_pixels_per_frame());
EXPECT_EQ(static_cast<double>(kBalancedHighFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations);
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
}
// Testing when to adapt frame rate and when to adapt resolution is quite
@@ -360,14 +435,11 @@ TEST(VideoStreamAdapterTest, Balanced_DecreaseResolution) {
// adapt up we don't do it in the reverse order. Instead we always try to adapt
// frame rate first according to balanced settings' configs and only when the
// frame rate is already achieved do we adjust the resolution.
-TEST(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) {
- webrtc::test::ScopedFieldTrials balanced_field_trials(
- BalancedFieldTrialConfig());
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::BALANCED);
- FakeVideoStream fake_stream(&adapter, kBalancedHighResolutionPixels,
- kBalancedHighFrameRateFps,
- kDefaultMinPixelsPerFrame);
+TEST_F(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) {
+ adapter_.SetDegradationPreference(DegradationPreference::BALANCED);
+ FakeVideoStream fake_stream(
+ &adapter_, &input_state_provider_, kBalancedHighResolutionPixels,
+ kBalancedHighFrameRateFps, kDefaultMinPixelsPerFrame);
// The desired starting point of this test is having adapted frame rate twice.
// This requires performing a number of adaptations.
constexpr size_t kReducedPixelsFirstStep =
@@ -385,41 +457,41 @@ TEST(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) {
"settings' medium pixel configuration");
// The first adaptation should affect the frame rate: See
// Balanced_DecreaseResolution for explanation why.
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
EXPECT_EQ(static_cast<double>(kBalancedHighFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
+ adapter_.source_restrictions().max_frame_rate());
// The next three adaptations affects the resolution, because we have to reach
// kBalancedMediumResolutionPixels before a lower frame rate is considered by
// BalancedDegradationSettings. The number three is derived from the
// static_asserts above.
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
EXPECT_EQ(kReducedPixelsFirstStep,
- adapter.source_restrictions().max_pixels_per_frame());
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
+ adapter_.source_restrictions().max_pixels_per_frame());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
EXPECT_EQ(kReducedPixelsSecondStep,
- adapter.source_restrictions().max_pixels_per_frame());
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
+ adapter_.source_restrictions().max_pixels_per_frame());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
EXPECT_EQ(kReducedPixelsThirdStep,
- adapter.source_restrictions().max_pixels_per_frame());
+ adapter_.source_restrictions().max_pixels_per_frame());
// Thus, the next adaptation will reduce frame rate to
// kBalancedMediumFrameRateFps.
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
EXPECT_EQ(static_cast<double>(kBalancedMediumFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(3, adapter.adaptation_counters().resolution_adaptations);
- EXPECT_EQ(2, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(3, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(2, adapter_.adaptation_counters().fps_adaptations);
// Adapt up!
// While our resolution is in the medium-range, the frame rate associated with
// the next resolution configuration up ("high") is kBalancedHighFrameRateFps
// and "balanced" prefers adapting frame rate if not already applied.
{
- Adaptation adaptation = adapter.GetAdaptationUp();
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
EXPECT_EQ(static_cast<double>(kBalancedHighFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(3, adapter.adaptation_counters().resolution_adaptations);
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(3, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
}
// Now that we have already achieved the next frame rate up, we act according
// to "maintain-framerate". We go back up in resolution. Due to rounding
@@ -429,63 +501,60 @@ TEST(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) {
constexpr size_t kReducedPixelsSecondStepUp =
(kReducedPixelsThirdStep * 5) / 3;
{
- Adaptation adaptation = adapter.GetAdaptationUp();
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
EXPECT_EQ(kReducedPixelsSecondStepUp,
- adapter.source_restrictions().target_pixels_per_frame());
- EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations);
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().target_pixels_per_frame());
+ EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
}
// Now that our resolution is back in the high-range, the next frame rate to
// try out is "unlimited".
{
- Adaptation adaptation = adapter.GetAdaptationUp();
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
- EXPECT_EQ(absl::nullopt, adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations);
- EXPECT_EQ(0, adapter.adaptation_counters().fps_adaptations);
+ EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations);
}
// Now only adapting resolution remains.
constexpr size_t kReducedPixelsFirstStepUp =
(kReducedPixelsSecondStepUp * 5) / 3;
{
- Adaptation adaptation = adapter.GetAdaptationUp();
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
EXPECT_EQ(kReducedPixelsFirstStepUp,
- adapter.source_restrictions().target_pixels_per_frame());
- EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations);
- EXPECT_EQ(0, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().target_pixels_per_frame());
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations);
}
// The last step up should make us entirely unrestricted.
{
- Adaptation adaptation = adapter.GetAdaptationUp();
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
- EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions());
- EXPECT_EQ(0, adapter.adaptation_counters().Total());
+ EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions());
+ EXPECT_EQ(0, adapter_.adaptation_counters().Total());
}
}
-TEST(VideoStreamAdapterTest, Balanced_LimitReached) {
- webrtc::test::ScopedFieldTrials balanced_field_trials(
- BalancedFieldTrialConfig());
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::BALANCED);
- FakeVideoStream fake_stream(&adapter, kBalancedLowResolutionPixels,
- kBalancedLowFrameRateFps,
- kDefaultMinPixelsPerFrame);
+TEST_F(VideoStreamAdapterTest, Balanced_LimitReached) {
+ adapter_.SetDegradationPreference(DegradationPreference::BALANCED);
+ FakeVideoStream fake_stream(
+ &adapter_, &input_state_provider_, kBalancedLowResolutionPixels,
+ kBalancedLowFrameRateFps, kDefaultMinPixelsPerFrame);
// Attempting to adapt up while unrestricted should result in kLimitReached.
EXPECT_EQ(Adaptation::Status::kLimitReached,
- adapter.GetAdaptationUp().status());
+ adapter_.GetAdaptationUp(resource_).status());
// Adapting down once result in restricted frame rate, in this case we reach
// the lowest possible frame rate immediately: kBalancedLowFrameRateFps.
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
EXPECT_EQ(static_cast<double>(kBalancedLowFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
// Any further adaptation must follow "maintain-framerate" rules (these are
// covered in more depth by the MaintainFramerate tests). This test does not
// assert exactly how resolution is adjusted, only that resolution always
@@ -494,117 +563,410 @@ TEST(VideoStreamAdapterTest, Balanced_LimitReached) {
bool did_reach_limit = false;
// If we have not reached the limit within 5 adaptations something is wrong...
for (int i = 0; i < 5; i++) {
- Adaptation adaptation = adapter.GetAdaptationDown();
+ Adaptation adaptation = adapter_.GetAdaptationDown();
if (adaptation.status() == Adaptation::Status::kLimitReached) {
did_reach_limit = true;
break;
}
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
fake_stream.ApplyAdaptation(adaptation);
- EXPECT_LT(adapter.source_restrictions().max_pixels_per_frame().value(),
+ EXPECT_LT(adapter_.source_restrictions().max_pixels_per_frame().value(),
previous_resolution);
previous_resolution =
- adapter.source_restrictions().max_pixels_per_frame().value();
+ adapter_.source_restrictions().max_pixels_per_frame().value();
}
EXPECT_TRUE(did_reach_limit);
// Frame rate restrictions are the same as before.
EXPECT_EQ(static_cast<double>(kBalancedLowFrameRateFps),
- adapter.source_restrictions().max_frame_rate());
- EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations);
+ adapter_.source_restrictions().max_frame_rate());
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
}
// kAwaitingPreviousAdaptation is only supported in "maintain-framerate".
-TEST(VideoStreamAdapterTest, MaintainFramerate_AwaitingPreviousAdaptationDown) {
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
- adapter.SetInput(InputState(1280 * 720, 30, kDefaultMinPixelsPerFrame));
+TEST_F(VideoStreamAdapterTest,
+ MaintainFramerate_AwaitingPreviousAdaptationDown) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ input_state_provider_.SetInputState(1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
// Adapt down once, but don't update the input.
- adapter.ApplyAdaptation(adapter.GetAdaptationDown());
- EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations);
+ adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr);
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
{
// Having performed the adaptation, but not updated the input based on the
// new restrictions, adapting again in the same direction will not work.
- Adaptation adaptation = adapter.GetAdaptationDown();
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation,
adaptation.status());
}
}
// kAwaitingPreviousAdaptation is only supported in "maintain-framerate".
-TEST(VideoStreamAdapterTest, MaintainFramerate_AwaitingPreviousAdaptationUp) {
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
- FakeVideoStream fake_stream(&adapter, 1280 * 720, 30,
+TEST_F(VideoStreamAdapterTest, MaintainFramerate_AwaitingPreviousAdaptationUp) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
kDefaultMinPixelsPerFrame);
// Perform two adaptation down so that adapting up twice is possible.
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
- fake_stream.ApplyAdaptation(adapter.GetAdaptationDown());
- EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations);
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations);
// Adapt up once, but don't update the input.
- adapter.ApplyAdaptation(adapter.GetAdaptationUp());
- EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations);
+ adapter_.ApplyAdaptation(adapter_.GetAdaptationUp(resource_), nullptr);
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
{
// Having performed the adaptation, but not updated the input based on the
// new restrictions, adapting again in the same direction will not work.
- Adaptation adaptation = adapter.GetAdaptationUp();
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation,
adaptation.status());
}
}
-TEST(VideoStreamAdapterTest, PeekNextRestrictions) {
- VideoStreamAdapter adapter;
+TEST_F(VideoStreamAdapterTest,
+ MaintainResolution_AdaptsUpAfterSwitchingDegradationPreference) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ // Adapt down in fps for later.
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
+
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_));
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
+ EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations);
+
+ // We should be able to adapt in framerate one last time after the change of
+ // degradation preference.
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_));
+ EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations);
+}
+
+TEST_F(VideoStreamAdapterTest,
+ MaintainFramerate_AdaptsUpAfterSwitchingDegradationPreference) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ // Adapt down in resolution for later.
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
+
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_));
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
+ EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations);
+
+ // We should be able to adapt in framerate one last time after the change of
+ // degradation preference.
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_));
+ EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations);
+}
+
+TEST_F(VideoStreamAdapterTest,
+ PendingResolutionIncreaseAllowsAdaptUpAfterSwitchToMaintainResolution) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ // Adapt fps down so we can adapt up later in the test.
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ // Apply adaptation up but don't update input.
+ adapter_.ApplyAdaptation(adapter_.GetAdaptationUp(resource_), nullptr);
+ EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation,
+ adapter_.GetAdaptationUp(resource_).status());
+
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+}
+
+TEST_F(VideoStreamAdapterTest,
+ MaintainFramerate_AdaptsDownAfterSwitchingDegradationPreference) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ // Adapt down once, should change FPS.
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
+
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ // Adaptation down should apply after the degradation prefs change.
+ Adaptation adaptation = adapter_.GetAdaptationDown();
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+ fake_stream.ApplyAdaptation(adaptation);
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
+}
+
+TEST_F(VideoStreamAdapterTest,
+ MaintainResolution_AdaptsDownAfterSwitchingDegradationPreference) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ // Adapt down once, should change FPS.
+ fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown());
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
+
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ Adaptation adaptation = adapter_.GetAdaptationDown();
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+ fake_stream.ApplyAdaptation(adaptation);
+
+ EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations);
+ EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations);
+}
+
+TEST_F(
+ VideoStreamAdapterTest,
+ PendingResolutionDecreaseAllowsAdaptDownAfterSwitchToMaintainResolution) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ // Apply adaptation but don't update the input.
+ adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr);
+ EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation,
+ adapter_.GetAdaptationDown().status());
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ Adaptation adaptation = adapter_.GetAdaptationDown();
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+}
+
+TEST_F(VideoStreamAdapterTest, RestrictionBroadcasted) {
+ FakeVideoStreamAdapterListner listener;
+ adapter_.AddRestrictionsListener(&listener);
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ // Not broadcast on invalid ApplyAdaptation.
+ {
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
+ adapter_.ApplyAdaptation(adaptation, nullptr);
+ EXPECT_EQ(0, listener.calls());
+ }
+
+ // Broadcast on ApplyAdaptation.
+ {
+ Adaptation adaptation = adapter_.GetAdaptationDown();
+ fake_stream.ApplyAdaptation(adaptation);
+ EXPECT_EQ(1, listener.calls());
+ EXPECT_EQ(adaptation.restrictions(), listener.last_restrictions());
+ }
+
+ // Broadcast on ClearRestrictions().
+ adapter_.ClearRestrictions();
+ EXPECT_EQ(2, listener.calls());
+ EXPECT_EQ(VideoSourceRestrictions(), listener.last_restrictions());
+}
+
+TEST_F(VideoStreamAdapterTest, AdaptationHasNextRestrcitions) {
// Any non-disabled DegradationPreference will do.
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
- FakeVideoStream fake_stream(&adapter, 1280 * 720, 30,
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
kDefaultMinPixelsPerFrame);
// When adaptation is not possible.
{
- Adaptation adaptation = adapter.GetAdaptationUp();
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
EXPECT_EQ(Adaptation::Status::kLimitReached, adaptation.status());
- EXPECT_EQ(adapter.PeekNextRestrictions(adaptation),
- adapter.source_restrictions());
+ EXPECT_EQ(adaptation.restrictions(), adapter_.source_restrictions());
+ EXPECT_EQ(0, adaptation.counters().Total());
}
// When we adapt down.
{
- Adaptation adaptation = adapter.GetAdaptationDown();
+ Adaptation adaptation = adapter_.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
- VideoSourceRestrictions next_restrictions =
- adapter.PeekNextRestrictions(adaptation);
fake_stream.ApplyAdaptation(adaptation);
- EXPECT_EQ(next_restrictions, adapter.source_restrictions());
+ EXPECT_EQ(adaptation.restrictions(), adapter_.source_restrictions());
+ EXPECT_EQ(adaptation.counters(), adapter_.adaptation_counters());
}
// When we adapt up.
{
- Adaptation adaptation = adapter.GetAdaptationUp();
+ Adaptation adaptation = adapter_.GetAdaptationUp(resource_);
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
- VideoSourceRestrictions next_restrictions =
- adapter.PeekNextRestrictions(adaptation);
fake_stream.ApplyAdaptation(adaptation);
- EXPECT_EQ(next_restrictions, adapter.source_restrictions());
+ EXPECT_EQ(adaptation.restrictions(), adapter_.source_restrictions());
+ EXPECT_EQ(adaptation.counters(), adapter_.adaptation_counters());
}
}
-TEST(VideoStreamAdapterTest,
- SetDegradationPreferenceToOrFromBalancedClearsRestrictions) {
- VideoStreamAdapter adapter;
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
- adapter.SetInput(InputState(1280 * 720, 30, kDefaultMinPixelsPerFrame));
- adapter.ApplyAdaptation(adapter.GetAdaptationDown());
- EXPECT_NE(VideoSourceRestrictions(), adapter.source_restrictions());
- EXPECT_NE(0, adapter.adaptation_counters().Total());
+TEST_F(VideoStreamAdapterTest,
+ SetDegradationPreferenceToOrFromBalancedClearsRestrictions) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ input_state_provider_.SetInputState(1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr);
+ EXPECT_NE(VideoSourceRestrictions(), adapter_.source_restrictions());
+ EXPECT_NE(0, adapter_.adaptation_counters().Total());
// Changing from non-balanced to balanced clears the restrictions.
- adapter.SetDegradationPreference(DegradationPreference::BALANCED);
- EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions());
- EXPECT_EQ(0, adapter.adaptation_counters().Total());
+ adapter_.SetDegradationPreference(DegradationPreference::BALANCED);
+ EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions());
+ EXPECT_EQ(0, adapter_.adaptation_counters().Total());
// Apply adaptation again.
- adapter.ApplyAdaptation(adapter.GetAdaptationDown());
- EXPECT_NE(VideoSourceRestrictions(), adapter.source_restrictions());
- EXPECT_NE(0, adapter.adaptation_counters().Total());
+ adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr);
+ EXPECT_NE(VideoSourceRestrictions(), adapter_.source_restrictions());
+ EXPECT_NE(0, adapter_.adaptation_counters().Total());
// Changing from balanced to non-balanced clears the restrictions.
- adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
- EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions());
- EXPECT_EQ(0, adapter.adaptation_counters().Total());
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions());
+ EXPECT_EQ(0, adapter_.adaptation_counters().Total());
+}
+
+TEST_F(VideoStreamAdapterTest,
+ GetAdaptDownResolutionAdaptsResolutionInMaintainFramerate) {
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ input_state_provider_.SetInputState(1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+
+ auto adaptation = adapter_.GetAdaptDownResolution();
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+ EXPECT_EQ(1, adaptation.counters().resolution_adaptations);
+ EXPECT_EQ(0, adaptation.counters().fps_adaptations);
+}
+
+TEST_F(VideoStreamAdapterTest,
+ GetAdaptDownResolutionReturnsWithStatusInDisabledAndMaintainResolution) {
+ adapter_.SetDegradationPreference(DegradationPreference::DISABLED);
+ input_state_provider_.SetInputState(1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ EXPECT_EQ(Adaptation::Status::kAdaptationDisabled,
+ adapter_.GetAdaptDownResolution().status());
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
+ EXPECT_EQ(Adaptation::Status::kLimitReached,
+ adapter_.GetAdaptDownResolution().status());
+}
+
+TEST_F(VideoStreamAdapterTest,
+ GetAdaptDownResolutionAdaptsFpsAndResolutionInBalanced) {
+ // Note: This test depends on BALANCED implementation, but with current
+ // implementation and input state settings, BALANCED will adapt resolution and
+ // frame rate once.
+ adapter_.SetDegradationPreference(DegradationPreference::BALANCED);
+ input_state_provider_.SetInputState(1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+
+ auto adaptation = adapter_.GetAdaptDownResolution();
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+ EXPECT_EQ(1, adaptation.counters().resolution_adaptations);
+ EXPECT_EQ(1, adaptation.counters().fps_adaptations);
+}
+
+TEST_F(
+ VideoStreamAdapterTest,
+ GetAdaptDownResolutionAdaptsOnlyResolutionIfFpsAlreadyAdapterInBalanced) {
+ // Note: This test depends on BALANCED implementation, but with current
+ // implementation and input state settings, BALANCED will adapt resolution
+ // only.
+ adapter_.SetDegradationPreference(DegradationPreference::BALANCED);
+ input_state_provider_.SetInputState(1280 * 720, 5, kDefaultMinPixelsPerFrame);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+
+ auto first_adaptation = adapter_.GetAdaptationDown();
+ fake_stream.ApplyAdaptation(first_adaptation);
+
+ auto adaptation = adapter_.GetAdaptDownResolution();
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+ EXPECT_EQ(1, adaptation.counters().resolution_adaptations);
+ EXPECT_EQ(first_adaptation.counters().fps_adaptations,
+ adaptation.counters().fps_adaptations);
+}
+
+TEST_F(VideoStreamAdapterTest,
+ GetAdaptDownResolutionAdaptsOnlyFpsIfResolutionLowInBalanced) {
+ // Note: This test depends on BALANCED implementation, but with current
+ // implementation and input state settings, BALANCED will adapt resolution
+ // only.
+ adapter_.SetDegradationPreference(DegradationPreference::BALANCED);
+ input_state_provider_.SetInputState(kDefaultMinPixelsPerFrame, 30,
+ kDefaultMinPixelsPerFrame);
+
+ auto adaptation = adapter_.GetAdaptDownResolution();
+ EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
+ EXPECT_EQ(0, adaptation.counters().resolution_adaptations);
+ EXPECT_EQ(1, adaptation.counters().fps_adaptations);
+}
+
+TEST_F(VideoStreamAdapterTest,
+ AdaptationDisabledStatusAlwaysWhenDegradationPreferenceDisabled) {
+ adapter_.SetDegradationPreference(DegradationPreference::DISABLED);
+ input_state_provider_.SetInputState(1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ EXPECT_EQ(Adaptation::Status::kAdaptationDisabled,
+ adapter_.GetAdaptationDown().status());
+ EXPECT_EQ(Adaptation::Status::kAdaptationDisabled,
+ adapter_.GetAdaptationUp(resource_).status());
+ EXPECT_EQ(Adaptation::Status::kAdaptationDisabled,
+ adapter_.GetAdaptDownResolution().status());
+}
+
+TEST_F(VideoStreamAdapterTest, AdaptationListenerReceivesSignalOnAdaptation) {
+ testing::StrictMock<MockAdaptationListener> adaptation_listener;
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ adapter_.AddAdaptationListener(&adaptation_listener);
+ input_state_provider_.SetInputState(1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ VideoSourceRestrictions restrictions_before;
+ VideoSourceRestrictions restrictions_after;
+ EXPECT_CALL(adaptation_listener, OnAdaptationApplied)
+ .WillOnce(DoAll(SaveArg<1>(&restrictions_before),
+ SaveArg<2>(&restrictions_after)));
+ auto adaptation = adapter_.GetAdaptationDown();
+ adapter_.ApplyAdaptation(adaptation, nullptr);
+ EXPECT_EQ(VideoSourceRestrictions(), restrictions_before);
+ EXPECT_EQ(adaptation.restrictions(), restrictions_after);
+
+ // Clean up.
+ adapter_.RemoveAdaptationListener(&adaptation_listener);
+}
+
+TEST_F(VideoStreamAdapterTest, AdaptationConstraintAllowsAdaptationsUp) {
+ testing::StrictMock<MockAdaptationConstraint> adaptation_constraint;
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ adapter_.AddAdaptationConstraint(&adaptation_constraint);
+ input_state_provider_.SetInputState(1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ // Adapt down once so we can adapt up later.
+ auto first_adaptation = adapter_.GetAdaptationDown();
+ fake_stream.ApplyAdaptation(first_adaptation);
+
+ EXPECT_CALL(
+ adaptation_constraint,
+ IsAdaptationUpAllowed(_, first_adaptation.restrictions(), _, resource_))
+ .WillOnce(Return(true));
+ EXPECT_EQ(Adaptation::Status::kValid,
+ adapter_.GetAdaptationUp(resource_).status());
+ adapter_.RemoveAdaptationConstraint(&adaptation_constraint);
+}
+
+TEST_F(VideoStreamAdapterTest, AdaptationConstraintDisallowsAdaptationsUp) {
+ testing::StrictMock<MockAdaptationConstraint> adaptation_constraint;
+ adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
+ adapter_.AddAdaptationConstraint(&adaptation_constraint);
+ input_state_provider_.SetInputState(1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30,
+ kDefaultMinPixelsPerFrame);
+ // Adapt down once so we can adapt up later.
+ auto first_adaptation = adapter_.GetAdaptationDown();
+ fake_stream.ApplyAdaptation(first_adaptation);
+
+ EXPECT_CALL(
+ adaptation_constraint,
+ IsAdaptationUpAllowed(_, first_adaptation.restrictions(), _, resource_))
+ .WillOnce(Return(false));
+ EXPECT_EQ(Adaptation::Status::kRejectedByConstraint,
+ adapter_.GetAdaptationUp(resource_).status());
+ adapter_.RemoveAdaptationConstraint(&adaptation_constraint);
}
// Death tests.
@@ -614,21 +976,23 @@ TEST(VideoStreamAdapterTest,
TEST(VideoStreamAdapterDeathTest,
SetDegradationPreferenceInvalidatesAdaptations) {
- VideoStreamAdapter adapter;
+ FakeVideoStreamInputStateProvider input_state_provider;
+ VideoStreamAdapter adapter(&input_state_provider);
adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE);
- adapter.SetInput(InputState(1280 * 720, 30, kDefaultMinPixelsPerFrame));
+ input_state_provider.SetInputState(1280 * 720, 30, kDefaultMinPixelsPerFrame);
Adaptation adaptation = adapter.GetAdaptationDown();
adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
- EXPECT_DEATH(adapter.ApplyAdaptation(adaptation), "");
+ EXPECT_DEATH(adapter.ApplyAdaptation(adaptation, nullptr), "");
}
-TEST(VideoStreamAdapterDeathTest, SetInputInvalidatesAdaptations) {
- VideoStreamAdapter adapter;
+TEST(VideoStreamAdapterDeathTest, AdaptDownInvalidatesAdaptations) {
+ FakeVideoStreamInputStateProvider input_state_provider;
+ VideoStreamAdapter adapter(&input_state_provider);
adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION);
- adapter.SetInput(InputState(1280 * 720, 30, kDefaultMinPixelsPerFrame));
+ input_state_provider.SetInputState(1280 * 720, 30, kDefaultMinPixelsPerFrame);
Adaptation adaptation = adapter.GetAdaptationDown();
- adapter.SetInput(InputState(1280 * 720, 31, kDefaultMinPixelsPerFrame));
- EXPECT_DEATH(adapter.PeekNextRestrictions(adaptation), "");
+ adapter.GetAdaptationDown();
+ EXPECT_DEATH(adapter.ApplyAdaptation(adaptation, nullptr), "");
}
#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
diff --git a/call/adaptation/video_stream_input_state_provider.cc b/call/adaptation/video_stream_input_state_provider.cc
index eac30bbfac..3c0a7e3fa2 100644
--- a/call/adaptation/video_stream_input_state_provider.cc
+++ b/call/adaptation/video_stream_input_state_provider.cc
@@ -16,20 +16,22 @@ VideoStreamInputStateProvider::VideoStreamInputStateProvider(
VideoStreamEncoderObserver* frame_rate_provider)
: frame_rate_provider_(frame_rate_provider) {}
+VideoStreamInputStateProvider::~VideoStreamInputStateProvider() {}
+
void VideoStreamInputStateProvider::OnHasInputChanged(bool has_input) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
input_state_.set_has_input(has_input);
}
void VideoStreamInputStateProvider::OnFrameSizeObserved(int frame_size_pixels) {
RTC_DCHECK_GT(frame_size_pixels, 0);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
input_state_.set_frame_size_pixels(frame_size_pixels);
}
void VideoStreamInputStateProvider::OnEncoderSettingsChanged(
EncoderSettings encoder_settings) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
input_state_.set_video_codec_type(
encoder_settings.encoder_config().codec_type);
input_state_.set_min_pixels_per_frame(
@@ -39,7 +41,7 @@ void VideoStreamInputStateProvider::OnEncoderSettingsChanged(
VideoStreamInputState VideoStreamInputStateProvider::InputState() {
// GetInputFrameRate() is thread-safe.
int input_fps = frame_rate_provider_->GetInputFrameRate();
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
input_state_.set_frames_per_second(input_fps);
return input_state_;
}
diff --git a/call/adaptation/video_stream_input_state_provider.h b/call/adaptation/video_stream_input_state_provider.h
index 7093e97fdd..f4a3e0bfa0 100644
--- a/call/adaptation/video_stream_input_state_provider.h
+++ b/call/adaptation/video_stream_input_state_provider.h
@@ -14,7 +14,7 @@
#include "api/video/video_stream_encoder_observer.h"
#include "call/adaptation/encoder_settings.h"
#include "call/adaptation/video_stream_input_state.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -22,17 +22,18 @@ class VideoStreamInputStateProvider {
public:
VideoStreamInputStateProvider(
VideoStreamEncoderObserver* frame_rate_provider);
+ virtual ~VideoStreamInputStateProvider();
void OnHasInputChanged(bool has_input);
void OnFrameSizeObserved(int frame_size_pixels);
void OnEncoderSettingsChanged(EncoderSettings encoder_settings);
- VideoStreamInputState InputState();
+ virtual VideoStreamInputState InputState();
private:
- mutable rtc::CriticalSection crit_;
+ Mutex mutex_;
VideoStreamEncoderObserver* const frame_rate_provider_;
- VideoStreamInputState input_state_ RTC_GUARDED_BY(crit_);
+ VideoStreamInputState input_state_ RTC_GUARDED_BY(mutex_);
};
} // namespace webrtc
diff --git a/call/audio_send_stream.cc b/call/audio_send_stream.cc
index ddcba031a7..765ece7eb9 100644
--- a/call/audio_send_stream.cc
+++ b/call/audio_send_stream.cc
@@ -75,6 +75,8 @@ std::string AudioSendStream::Config::SendCodecSpec::ToString() const {
ss << ", transport_cc_enabled: " << (transport_cc_enabled ? "true" : "false");
ss << ", cng_payload_type: "
<< (cng_payload_type ? rtc::ToString(*cng_payload_type) : "<unset>");
+ ss << ", red_payload_type: "
+ << (red_payload_type ? rtc::ToString(*red_payload_type) : "<unset>");
ss << ", payload_type: " << payload_type;
ss << ", format: " << rtc::ToString(format);
ss << '}';
diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h
index 86cea38938..d21dff4889 100644
--- a/call/audio_send_stream.h
+++ b/call/audio_send_stream.h
@@ -140,6 +140,7 @@ class AudioSendStream : public AudioSender {
bool nack_enabled = false;
bool transport_cc_enabled = false;
absl::optional<int> cng_payload_type;
+ absl::optional<int> red_payload_type;
// If unset, use the encoder's default target bitrate.
absl::optional<int> target_bitrate_bps;
};
diff --git a/call/bitrate_allocator_unittest.cc b/call/bitrate_allocator_unittest.cc
index 1479a4714a..00fb236948 100644
--- a/call/bitrate_allocator_unittest.cc
+++ b/call/bitrate_allocator_unittest.cc
@@ -47,7 +47,10 @@ auto AllocationLimitsEq(uint32_t min_allocatable_rate_bps,
class MockLimitObserver : public BitrateAllocator::LimitObserver {
public:
- MOCK_METHOD1(OnAllocationLimitsChanged, void(BitrateAllocationLimits));
+ MOCK_METHOD(void,
+ OnAllocationLimitsChanged,
+ (BitrateAllocationLimits),
+ (override));
};
class TestBitrateObserver : public BitrateAllocatorObserver {
diff --git a/call/bitrate_estimator_tests.cc b/call/bitrate_estimator_tests.cc
index 50da12bbdf..cd052dc331 100644
--- a/call/bitrate_estimator_tests.cc
+++ b/call/bitrate_estimator_tests.cc
@@ -19,6 +19,7 @@
#include "rtc_base/checks.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "rtc_base/thread_annotations.h"
#include "test/call_test.h"
@@ -49,7 +50,7 @@ class LogObserver {
class Callback : public rtc::LogSink {
public:
void OnLogMessage(const std::string& message) override {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
// Ignore log lines that are due to missing AST extensions, these are
// logged when we switch back from AST to TOF until the wrapping bitrate
// estimator gives up on using AST.
@@ -78,15 +79,15 @@ class LogObserver {
bool Wait() { return done_.Wait(test::CallTest::kDefaultTimeoutMs); }
void PushExpectedLogLine(const std::string& expected_log_line) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
expected_log_lines_.push_back(expected_log_line);
}
private:
typedef std::list<std::string> Strings;
- rtc::CriticalSection crit_sect_;
- Strings received_log_lines_ RTC_GUARDED_BY(crit_sect_);
- Strings expected_log_lines_ RTC_GUARDED_BY(crit_sect_);
+ Mutex mutex_;
+ Strings received_log_lines_ RTC_GUARDED_BY(mutex_);
+ Strings expected_log_lines_ RTC_GUARDED_BY(mutex_);
rtc::Event done_;
};
diff --git a/call/call.cc b/call/call.cc
index 4068db9f00..ace83bee9f 100644
--- a/call/call.cc
+++ b/call/call.cc
@@ -25,6 +25,7 @@
#include "audio/audio_receive_stream.h"
#include "audio/audio_send_stream.h"
#include "audio/audio_state.h"
+#include "call/adaptation/broadcast_resource_listener.h"
#include "call/bitrate_allocator.h"
#include "call/flexfec_receive_stream_impl.h"
#include "call/receive_time_calculator.h"
@@ -49,8 +50,8 @@
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
-#include "rtc_base/synchronization/rw_lock_wrapper.h"
#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/trace_event.h"
@@ -168,6 +169,47 @@ TaskQueueBase* GetCurrentTaskQueueOrThread() {
namespace internal {
+// Wraps an injected resource in a BroadcastResourceListener and handles adding
+// and removing adapter resources to individual VideoSendStreams.
+class ResourceVideoSendStreamForwarder {
+ public:
+ ResourceVideoSendStreamForwarder(
+ rtc::scoped_refptr<webrtc::Resource> resource)
+ : broadcast_resource_listener_(resource) {
+ broadcast_resource_listener_.StartListening();
+ }
+ ~ResourceVideoSendStreamForwarder() {
+ RTC_DCHECK(adapter_resources_.empty());
+ broadcast_resource_listener_.StopListening();
+ }
+
+ rtc::scoped_refptr<webrtc::Resource> Resource() const {
+ return broadcast_resource_listener_.SourceResource();
+ }
+
+ void OnCreateVideoSendStream(VideoSendStream* video_send_stream) {
+ RTC_DCHECK(adapter_resources_.find(video_send_stream) ==
+ adapter_resources_.end());
+ auto adapter_resource =
+ broadcast_resource_listener_.CreateAdapterResource();
+ video_send_stream->AddAdaptationResource(adapter_resource);
+ adapter_resources_.insert(
+ std::make_pair(video_send_stream, adapter_resource));
+ }
+
+ void OnDestroyVideoSendStream(VideoSendStream* video_send_stream) {
+ auto it = adapter_resources_.find(video_send_stream);
+ RTC_DCHECK(it != adapter_resources_.end());
+ broadcast_resource_listener_.RemoveAdapterResource(it->second);
+ adapter_resources_.erase(it);
+ }
+
+ private:
+ BroadcastResourceListener broadcast_resource_listener_;
+ std::map<VideoSendStream*, rtc::scoped_refptr<webrtc::Resource>>
+ adapter_resources_;
+};
+
class Call final : public webrtc::Call,
public PacketReceiver,
public RecoveredPacketReceiver,
@@ -177,7 +219,7 @@ class Call final : public webrtc::Call,
Call(Clock* clock,
const Call::Config& config,
std::unique_ptr<RtpTransportControllerSendInterface> transport_send,
- std::unique_ptr<ProcessThread> module_process_thread,
+ rtc::scoped_refptr<SharedModuleThread> module_process_thread,
TaskQueueFactory* task_queue_factory);
~Call() override;
@@ -212,6 +254,8 @@ class Call final : public webrtc::Call,
void DestroyFlexfecReceiveStream(
FlexfecReceiveStream* receive_stream) override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+
RtpTransportControllerSendInterface* GetTransportControllerSend() override;
Stats GetStats() const override;
@@ -243,54 +287,54 @@ class Call final : public webrtc::Call,
private:
DeliveryStatus DeliverRtcp(MediaType media_type,
const uint8_t* packet,
- size_t length);
+ size_t length)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
DeliveryStatus DeliverRtp(MediaType media_type,
rtc::CopyOnWriteBuffer packet,
- int64_t packet_time_us);
+ int64_t packet_time_us)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
void ConfigureSync(const std::string& sync_group)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(receive_crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
void NotifyBweOfReceivedPacket(const RtpPacketReceived& packet,
MediaType media_type)
- RTC_SHARED_LOCKS_REQUIRED(receive_crit_);
+ RTC_SHARED_LOCKS_REQUIRED(worker_thread_);
void UpdateSendHistograms(Timestamp first_sent_packet)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(&bitrate_crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
void UpdateReceiveHistograms();
void UpdateHistograms();
void UpdateAggregateNetworkState();
void RegisterRateObserver();
- rtc::TaskQueue* network_queue() const {
+ rtc::TaskQueue* send_transport_queue() const {
return transport_send_ptr_->GetWorkerQueue();
}
Clock* const clock_;
TaskQueueFactory* const task_queue_factory_;
+ TaskQueueBase* const worker_thread_;
const int num_cpu_cores_;
- const std::unique_ptr<ProcessThread> module_process_thread_;
+ const rtc::scoped_refptr<SharedModuleThread> module_process_thread_;
const std::unique_ptr<CallStats> call_stats_;
const std::unique_ptr<BitrateAllocator> bitrate_allocator_;
Call::Config config_;
- SequenceChecker configuration_sequence_checker_;
- SequenceChecker worker_sequence_checker_;
NetworkState audio_network_state_;
NetworkState video_network_state_;
- bool aggregate_network_up_ RTC_GUARDED_BY(configuration_sequence_checker_);
+ bool aggregate_network_up_ RTC_GUARDED_BY(worker_thread_);
- std::unique_ptr<RWLockWrapper> receive_crit_;
// Audio, Video, and FlexFEC receive streams are owned by the client that
// creates them.
std::set<AudioReceiveStream*> audio_receive_streams_
- RTC_GUARDED_BY(receive_crit_);
+ RTC_GUARDED_BY(worker_thread_);
std::set<VideoReceiveStream2*> video_receive_streams_
- RTC_GUARDED_BY(receive_crit_);
+ RTC_GUARDED_BY(worker_thread_);
std::map<std::string, AudioReceiveStream*> sync_stream_mapping_
- RTC_GUARDED_BY(receive_crit_);
+ RTC_GUARDED_BY(worker_thread_);
// TODO(nisse): Should eventually be injected at creation,
// with a single object in the bundled case.
@@ -324,25 +368,26 @@ class Call final : public webrtc::Call,
const bool use_send_side_bwe;
};
std::map<uint32_t, ReceiveRtpConfig> receive_rtp_config_
- RTC_GUARDED_BY(receive_crit_);
+ RTC_GUARDED_BY(worker_thread_);
- std::unique_ptr<RWLockWrapper> send_crit_;
// Audio and Video send streams are owned by the client that creates them.
std::map<uint32_t, AudioSendStream*> audio_send_ssrcs_
- RTC_GUARDED_BY(send_crit_);
+ RTC_GUARDED_BY(worker_thread_);
std::map<uint32_t, VideoSendStream*> video_send_ssrcs_
- RTC_GUARDED_BY(send_crit_);
- std::set<VideoSendStream*> video_send_streams_ RTC_GUARDED_BY(send_crit_);
+ RTC_GUARDED_BY(worker_thread_);
+ std::set<VideoSendStream*> video_send_streams_ RTC_GUARDED_BY(worker_thread_);
+
+ // Each forwarder wraps an adaptation resource that was added to the call.
+ std::vector<std::unique_ptr<ResourceVideoSendStreamForwarder>>
+ adaptation_resource_forwarders_ RTC_GUARDED_BY(worker_thread_);
using RtpStateMap = std::map<uint32_t, RtpState>;
- RtpStateMap suspended_audio_send_ssrcs_
- RTC_GUARDED_BY(configuration_sequence_checker_);
- RtpStateMap suspended_video_send_ssrcs_
- RTC_GUARDED_BY(configuration_sequence_checker_);
+ RtpStateMap suspended_audio_send_ssrcs_ RTC_GUARDED_BY(worker_thread_);
+ RtpStateMap suspended_video_send_ssrcs_ RTC_GUARDED_BY(worker_thread_);
using RtpPayloadStateMap = std::map<uint32_t, RtpPayloadState>;
RtpPayloadStateMap suspended_video_payload_states_
- RTC_GUARDED_BY(configuration_sequence_checker_);
+ RTC_GUARDED_BY(worker_thread_);
webrtc::RtcEventLog* event_log_;
@@ -358,17 +403,14 @@ class Call final : public webrtc::Call,
absl::optional<int64_t> first_received_rtp_video_ms_;
absl::optional<int64_t> last_received_rtp_video_ms_;
- rtc::CriticalSection last_bandwidth_bps_crit_;
- uint32_t last_bandwidth_bps_ RTC_GUARDED_BY(&last_bandwidth_bps_crit_);
+ uint32_t last_bandwidth_bps_ RTC_GUARDED_BY(worker_thread_);
// TODO(holmer): Remove this lock once BitrateController no longer calls
// OnNetworkChanged from multiple threads.
- rtc::CriticalSection bitrate_crit_;
- uint32_t min_allocated_send_bitrate_bps_
- RTC_GUARDED_BY(&worker_sequence_checker_);
- uint32_t configured_max_padding_bitrate_bps_ RTC_GUARDED_BY(&bitrate_crit_);
+ uint32_t min_allocated_send_bitrate_bps_ RTC_GUARDED_BY(worker_thread_);
+ uint32_t configured_max_padding_bitrate_bps_ RTC_GUARDED_BY(worker_thread_);
AvgCounter estimated_send_bitrate_kbps_counter_
- RTC_GUARDED_BY(&bitrate_crit_);
- AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(&bitrate_crit_);
+ RTC_GUARDED_BY(worker_thread_);
+ AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(worker_thread_);
ReceiveSideCongestionController receive_side_cc_;
@@ -377,6 +419,11 @@ class Call final : public webrtc::Call,
const std::unique_ptr<SendDelayStats> video_send_delay_stats_;
const int64_t start_ms_;
+ // Note that |task_safety_| needs to be at a greater scope than the task queue
+ // owned by |transport_send_| since calls might arrive on the network thread
+ // while Call is being deleted and the task queue is being torn down.
+ ScopedTaskSafety task_safety_;
+
// Caches transport_send_.get(), to avoid racing with destructor.
// Note that this is declared before transport_send_ to ensure that it is not
// invalidated until no more tasks can be running on the transport_send_ task
@@ -386,8 +433,8 @@ class Call final : public webrtc::Call,
// last ensures that it is destroyed first and any running tasks are finished.
std::unique_ptr<RtpTransportControllerSendInterface> transport_send_;
- bool is_target_rate_observer_registered_
- RTC_GUARDED_BY(&configuration_sequence_checker_) = false;
+ bool is_target_rate_observer_registered_ RTC_GUARDED_BY(worker_thread_) =
+ false;
RTC_DISALLOW_COPY_AND_ASSIGN(Call);
};
@@ -407,14 +454,21 @@ std::string Call::Stats::ToString(int64_t time_ms) const {
}
Call* Call::Create(const Call::Config& config) {
- return Create(config, Clock::GetRealTimeClock(),
- ProcessThread::Create("ModuleProcessThread"),
+ rtc::scoped_refptr<SharedModuleThread> call_thread =
+ SharedModuleThread::Create(ProcessThread::Create("ModuleProcessThread"),
+ nullptr);
+ return Create(config, std::move(call_thread));
+}
+
+Call* Call::Create(const Call::Config& config,
+ rtc::scoped_refptr<SharedModuleThread> call_thread) {
+ return Create(config, Clock::GetRealTimeClock(), std::move(call_thread),
ProcessThread::Create("PacerThread"));
}
Call* Call::Create(const Call::Config& config,
Clock* clock,
- std::unique_ptr<ProcessThread> call_thread,
+ rtc::scoped_refptr<SharedModuleThread> call_thread,
std::unique_ptr<ProcessThread> pacer_thread) {
RTC_DCHECK(config.task_queue_factory);
return new internal::Call(
@@ -426,6 +480,98 @@ Call* Call::Create(const Call::Config& config,
std::move(call_thread), config.task_queue_factory);
}
+class SharedModuleThread::Impl {
+ public:
+ Impl(std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining)
+ : module_thread_(std::move(process_thread)),
+ on_one_ref_remaining_(std::move(on_one_ref_remaining)) {}
+
+ void EnsureStarted() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ if (started_)
+ return;
+ started_ = true;
+ module_thread_->Start();
+ }
+
+ ProcessThread* process_thread() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return module_thread_.get();
+ }
+
+ void AddRef() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ ++ref_count_;
+ }
+
+ rtc::RefCountReleaseStatus Release() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ --ref_count_;
+
+ if (ref_count_ == 0) {
+ module_thread_->Stop();
+ return rtc::RefCountReleaseStatus::kDroppedLastRef;
+ }
+
+ if (ref_count_ == 1 && on_one_ref_remaining_) {
+ auto moved_fn = std::move(on_one_ref_remaining_);
+ // NOTE: after this function returns, chances are that |this| has been
+ // deleted - do not touch any member variables.
+ // If the owner of the last reference implements a lambda that releases
+ // that last reference inside of the callback (which is legal according
+ // to this implementation), we will recursively enter Release() above,
+ // call Stop() and release the last reference.
+ moved_fn();
+ }
+
+ return rtc::RefCountReleaseStatus::kOtherRefsRemained;
+ }
+
+ private:
+ SequenceChecker sequence_checker_;
+ mutable int ref_count_ RTC_GUARDED_BY(sequence_checker_) = 0;
+ std::unique_ptr<ProcessThread> const module_thread_;
+ std::function<void()> const on_one_ref_remaining_;
+ bool started_ = false;
+};
+
+SharedModuleThread::SharedModuleThread(
+ std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining)
+ : impl_(std::make_unique<Impl>(std::move(process_thread),
+ std::move(on_one_ref_remaining))) {}
+
+SharedModuleThread::~SharedModuleThread() = default;
+
+// static
+
+rtc::scoped_refptr<SharedModuleThread> SharedModuleThread::Create(
+ std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining) {
+ return new SharedModuleThread(std::move(process_thread),
+ std::move(on_one_ref_remaining));
+}
+
+void SharedModuleThread::EnsureStarted() {
+ impl_->EnsureStarted();
+}
+
+ProcessThread* SharedModuleThread::process_thread() {
+ return impl_->process_thread();
+}
+
+void SharedModuleThread::AddRef() const {
+ impl_->AddRef();
+}
+
+rtc::RefCountReleaseStatus SharedModuleThread::Release() const {
+ auto ret = impl_->Release();
+ if (ret == rtc::RefCountReleaseStatus::kDroppedLastRef)
+ delete this;
+ return ret;
+}
+
// This method here to avoid subclasses has to implement this method.
// Call perf test will use Internal::Call::CreateVideoSendStream() to inject
// FecController.
@@ -441,20 +587,19 @@ namespace internal {
Call::Call(Clock* clock,
const Call::Config& config,
std::unique_ptr<RtpTransportControllerSendInterface> transport_send,
- std::unique_ptr<ProcessThread> module_process_thread,
+ rtc::scoped_refptr<SharedModuleThread> module_process_thread,
TaskQueueFactory* task_queue_factory)
: clock_(clock),
task_queue_factory_(task_queue_factory),
+ worker_thread_(GetCurrentTaskQueueOrThread()),
num_cpu_cores_(CpuInfo::DetectNumberOfCores()),
module_process_thread_(std::move(module_process_thread)),
- call_stats_(new CallStats(clock_, GetCurrentTaskQueueOrThread())),
+ call_stats_(new CallStats(clock_, worker_thread_)),
bitrate_allocator_(new BitrateAllocator(this)),
config_(config),
audio_network_state_(kNetworkDown),
video_network_state_(kNetworkDown),
aggregate_network_up_(false),
- receive_crit_(RWLockWrapper::CreateRWLock()),
- send_crit_(RWLockWrapper::CreateRWLock()),
event_log_(config.event_log),
received_bytes_per_second_counter_(clock_, nullptr, true),
received_audio_bytes_per_second_counter_(clock_, nullptr, true),
@@ -473,17 +618,18 @@ Call::Call(Clock* clock,
transport_send_(std::move(transport_send)) {
RTC_DCHECK(config.event_log != nullptr);
RTC_DCHECK(config.trials != nullptr);
- worker_sequence_checker_.Detach();
+ RTC_DCHECK(worker_thread_->IsCurrent());
call_stats_->RegisterStatsObserver(&receive_side_cc_);
- module_process_thread_->RegisterModule(
+ module_process_thread_->process_thread()->RegisterModule(
receive_side_cc_.GetRemoteBitrateEstimator(true), RTC_FROM_HERE);
- module_process_thread_->RegisterModule(&receive_side_cc_, RTC_FROM_HERE);
+ module_process_thread_->process_thread()->RegisterModule(&receive_side_cc_,
+ RTC_FROM_HERE);
}
Call::~Call() {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_CHECK(audio_send_ssrcs_.empty());
RTC_CHECK(video_send_ssrcs_.empty());
@@ -491,10 +637,9 @@ Call::~Call() {
RTC_CHECK(audio_receive_streams_.empty());
RTC_CHECK(video_receive_streams_.empty());
- module_process_thread_->Stop();
- module_process_thread_->DeRegisterModule(
+ module_process_thread_->process_thread()->DeRegisterModule(
receive_side_cc_.GetRemoteBitrateEstimator(true));
- module_process_thread_->DeRegisterModule(&receive_side_cc_);
+ module_process_thread_->process_thread()->DeRegisterModule(&receive_side_cc_);
call_stats_->DeregisterStatsObserver(&receive_side_cc_);
absl::optional<Timestamp> first_sent_packet_ms =
@@ -503,7 +648,6 @@ Call::~Call() {
// Only update histograms after process threads have been shut down, so that
// they won't try to concurrently update stats.
if (first_sent_packet_ms) {
- rtc::CritScope lock(&bitrate_crit_);
UpdateSendHistograms(*first_sent_packet_ms);
}
@@ -512,7 +656,7 @@ Call::~Call() {
}
void Call::RegisterRateObserver() {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
if (is_target_rate_observer_registered_)
return;
@@ -523,11 +667,11 @@ void Call::RegisterRateObserver() {
// off being kicked off on request rather than in the ctor.
transport_send_ptr_->RegisterTargetTransferRateObserver(this);
- module_process_thread_->Start();
+ module_process_thread_->EnsureStarted();
}
void Call::SetClientBitratePreferences(const BitrateSettings& preferences) {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
GetTransportControllerSend()->SetClientBitratePreferences(preferences);
}
@@ -609,14 +753,14 @@ void Call::UpdateReceiveHistograms() {
}
PacketReceiver* Call::Receiver() {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
return this;
}
webrtc::AudioSendStream* Call::CreateAudioSendStream(
const webrtc::AudioSendStream::Config& config) {
TRACE_EVENT0("webrtc", "Call::CreateAudioSendStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RegisterRateObserver();
@@ -632,30 +776,26 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream(
AudioSendStream* send_stream = new AudioSendStream(
clock_, config, config_.audio_state, task_queue_factory_,
- module_process_thread_.get(), transport_send_ptr_,
+ module_process_thread_->process_thread(), transport_send_ptr_,
bitrate_allocator_.get(), event_log_, call_stats_->AsRtcpRttStats(),
suspended_rtp_state);
- {
- WriteLockScoped write_lock(*send_crit_);
- RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) ==
- audio_send_ssrcs_.end());
- audio_send_ssrcs_[config.rtp.ssrc] = send_stream;
- }
- {
- ReadLockScoped read_lock(*receive_crit_);
- for (AudioReceiveStream* stream : audio_receive_streams_) {
- if (stream->config().rtp.local_ssrc == config.rtp.ssrc) {
- stream->AssociateSendStream(send_stream);
- }
+ RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) ==
+ audio_send_ssrcs_.end());
+ audio_send_ssrcs_[config.rtp.ssrc] = send_stream;
+
+ for (AudioReceiveStream* stream : audio_receive_streams_) {
+ if (stream->config().rtp.local_ssrc == config.rtp.ssrc) {
+ stream->AssociateSendStream(send_stream);
}
}
+
UpdateAggregateNetworkState();
return send_stream;
}
void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyAudioSendStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(send_stream != nullptr);
send_stream->Stop();
@@ -664,19 +804,16 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) {
webrtc::internal::AudioSendStream* audio_send_stream =
static_cast<webrtc::internal::AudioSendStream*>(send_stream);
suspended_audio_send_ssrcs_[ssrc] = audio_send_stream->GetRtpState();
- {
- WriteLockScoped write_lock(*send_crit_);
- size_t num_deleted = audio_send_ssrcs_.erase(ssrc);
- RTC_DCHECK_EQ(1, num_deleted);
- }
- {
- ReadLockScoped read_lock(*receive_crit_);
- for (AudioReceiveStream* stream : audio_receive_streams_) {
- if (stream->config().rtp.local_ssrc == ssrc) {
- stream->AssociateSendStream(nullptr);
- }
+
+ size_t num_deleted = audio_send_ssrcs_.erase(ssrc);
+ RTC_DCHECK_EQ(1, num_deleted);
+
+ for (AudioReceiveStream* stream : audio_receive_streams_) {
+ if (stream->config().rtp.local_ssrc == ssrc) {
+ stream->AssociateSendStream(nullptr);
}
}
+
UpdateAggregateNetworkState();
delete send_stream;
}
@@ -684,29 +821,25 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) {
webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config) {
TRACE_EVENT0("webrtc", "Call::CreateAudioReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RegisterRateObserver();
event_log_->Log(std::make_unique<RtcEventAudioReceiveStreamConfig>(
CreateRtcLogStreamConfig(config)));
AudioReceiveStream* receive_stream = new AudioReceiveStream(
clock_, &audio_receiver_controller_, transport_send_ptr_->packet_router(),
- module_process_thread_.get(), config_.neteq_factory, config,
+ module_process_thread_->process_thread(), config_.neteq_factory, config,
config_.audio_state, event_log_);
- {
- WriteLockScoped write_lock(*receive_crit_);
- receive_rtp_config_.emplace(config.rtp.remote_ssrc,
- ReceiveRtpConfig(config));
- audio_receive_streams_.insert(receive_stream);
- ConfigureSync(config.sync_group);
- }
- {
- ReadLockScoped read_lock(*send_crit_);
- auto it = audio_send_ssrcs_.find(config.rtp.local_ssrc);
- if (it != audio_send_ssrcs_.end()) {
- receive_stream->AssociateSendStream(it->second);
- }
+ receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config));
+ audio_receive_streams_.insert(receive_stream);
+
+ ConfigureSync(config.sync_group);
+
+ auto it = audio_send_ssrcs_.find(config.rtp.local_ssrc);
+ if (it != audio_send_ssrcs_.end()) {
+ receive_stream->AssociateSendStream(it->second);
}
+
UpdateAggregateNetworkState();
return receive_stream;
}
@@ -714,26 +847,24 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream(
void Call::DestroyAudioReceiveStream(
webrtc::AudioReceiveStream* receive_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyAudioReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(receive_stream != nullptr);
webrtc::internal::AudioReceiveStream* audio_receive_stream =
static_cast<webrtc::internal::AudioReceiveStream*>(receive_stream);
- {
- WriteLockScoped write_lock(*receive_crit_);
- const AudioReceiveStream::Config& config = audio_receive_stream->config();
- uint32_t ssrc = config.rtp.remote_ssrc;
- receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
- ->RemoveStream(ssrc);
- audio_receive_streams_.erase(audio_receive_stream);
- const std::string& sync_group = audio_receive_stream->config().sync_group;
- const auto it = sync_stream_mapping_.find(sync_group);
- if (it != sync_stream_mapping_.end() &&
- it->second == audio_receive_stream) {
- sync_stream_mapping_.erase(it);
- ConfigureSync(sync_group);
- }
- receive_rtp_config_.erase(ssrc);
+
+ const AudioReceiveStream::Config& config = audio_receive_stream->config();
+ uint32_t ssrc = config.rtp.remote_ssrc;
+ receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
+ ->RemoveStream(ssrc);
+ audio_receive_streams_.erase(audio_receive_stream);
+ const std::string& sync_group = audio_receive_stream->config().sync_group;
+ const auto it = sync_stream_mapping_.find(sync_group);
+ if (it != sync_stream_mapping_.end() && it->second == audio_receive_stream) {
+ sync_stream_mapping_.erase(it);
+ ConfigureSync(sync_group);
}
+ receive_rtp_config_.erase(ssrc);
+
UpdateAggregateNetworkState();
delete audio_receive_stream;
}
@@ -744,7 +875,7 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
VideoEncoderConfig encoder_config,
std::unique_ptr<FecController> fec_controller) {
TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RegisterRateObserver();
@@ -761,20 +892,22 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
std::vector<uint32_t> ssrcs = config.rtp.ssrcs;
VideoSendStream* send_stream = new VideoSendStream(
- clock_, num_cpu_cores_, module_process_thread_.get(), task_queue_factory_,
- call_stats_->AsRtcpRttStats(), transport_send_ptr_,
+ clock_, num_cpu_cores_, module_process_thread_->process_thread(),
+ task_queue_factory_, call_stats_->AsRtcpRttStats(), transport_send_ptr_,
bitrate_allocator_.get(), video_send_delay_stats_.get(), event_log_,
std::move(config), std::move(encoder_config), suspended_video_send_ssrcs_,
suspended_video_payload_states_, std::move(fec_controller));
- {
- WriteLockScoped write_lock(*send_crit_);
- for (uint32_t ssrc : ssrcs) {
- RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end());
- video_send_ssrcs_[ssrc] = send_stream;
- }
- video_send_streams_.insert(send_stream);
+ for (uint32_t ssrc : ssrcs) {
+ RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end());
+ video_send_ssrcs_[ssrc] = send_stream;
}
+ video_send_streams_.insert(send_stream);
+ // Forward resources that were previously added to the call to the new stream.
+ for (const auto& resource_forwarder : adaptation_resource_forwarders_) {
+ resource_forwarder->OnCreateVideoSendStream(send_stream);
+ }
+
UpdateAggregateNetworkState();
return send_stream;
@@ -797,24 +930,27 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyVideoSendStream");
RTC_DCHECK(send_stream != nullptr);
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
send_stream->Stop();
VideoSendStream* send_stream_impl = nullptr;
- {
- WriteLockScoped write_lock(*send_crit_);
- auto it = video_send_ssrcs_.begin();
- while (it != video_send_ssrcs_.end()) {
- if (it->second == static_cast<VideoSendStream*>(send_stream)) {
- send_stream_impl = it->second;
- video_send_ssrcs_.erase(it++);
- } else {
- ++it;
- }
+
+ auto it = video_send_ssrcs_.begin();
+ while (it != video_send_ssrcs_.end()) {
+ if (it->second == static_cast<VideoSendStream*>(send_stream)) {
+ send_stream_impl = it->second;
+ video_send_ssrcs_.erase(it++);
+ } else {
+ ++it;
}
- video_send_streams_.erase(send_stream_impl);
}
+ // Stop forwarding resources to the stream being destroyed.
+ for (const auto& resource_forwarder : adaptation_resource_forwarders_) {
+ resource_forwarder->OnDestroyVideoSendStream(send_stream_impl);
+ }
+ video_send_streams_.erase(send_stream_impl);
+
RTC_CHECK(send_stream_impl != nullptr);
VideoSendStream::RtpStateMap rtp_states;
@@ -835,7 +971,7 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
webrtc::VideoReceiveStream::Config configuration) {
TRACE_EVENT0("webrtc", "Call::CreateVideoReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
receive_side_cc_.SetSendPeriodicFeedback(
SendPeriodicFeedback(configuration.rtp.extensions));
@@ -847,25 +983,21 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
VideoReceiveStream2* receive_stream = new VideoReceiveStream2(
task_queue_factory_, current, &video_receiver_controller_, num_cpu_cores_,
transport_send_ptr_->packet_router(), std::move(configuration),
- module_process_thread_.get(), call_stats_.get(), clock_,
+ module_process_thread_->process_thread(), call_stats_.get(), clock_,
new VCMTiming(clock_));
const webrtc::VideoReceiveStream::Config& config = receive_stream->config();
- {
- WriteLockScoped write_lock(*receive_crit_);
- if (config.rtp.rtx_ssrc) {
- // We record identical config for the rtx stream as for the main
- // stream. Since the transport_send_cc negotiation is per payload
- // type, we may get an incorrect value for the rtx stream, but
- // that is unlikely to matter in practice.
- receive_rtp_config_.emplace(config.rtp.rtx_ssrc,
- ReceiveRtpConfig(config));
- }
- receive_rtp_config_.emplace(config.rtp.remote_ssrc,
- ReceiveRtpConfig(config));
- video_receive_streams_.insert(receive_stream);
- ConfigureSync(config.sync_group);
+ if (config.rtp.rtx_ssrc) {
+ // We record identical config for the rtx stream as for the main
+ // stream. Since the transport_send_cc negotiation is per payload
+ // type, we may get an incorrect value for the rtx stream, but
+ // that is unlikely to matter in practice.
+ receive_rtp_config_.emplace(config.rtp.rtx_ssrc, ReceiveRtpConfig(config));
}
+ receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config));
+ video_receive_streams_.insert(receive_stream);
+ ConfigureSync(config.sync_group);
+
receive_stream->SignalNetworkState(video_network_state_);
UpdateAggregateNetworkState();
event_log_->Log(std::make_unique<RtcEventVideoReceiveStreamConfig>(
@@ -876,22 +1008,20 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
void Call::DestroyVideoReceiveStream(
webrtc::VideoReceiveStream* receive_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyVideoReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(receive_stream != nullptr);
VideoReceiveStream2* receive_stream_impl =
static_cast<VideoReceiveStream2*>(receive_stream);
const VideoReceiveStream::Config& config = receive_stream_impl->config();
- {
- WriteLockScoped write_lock(*receive_crit_);
- // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a
- // separate SSRC there can be either one or two.
- receive_rtp_config_.erase(config.rtp.remote_ssrc);
- if (config.rtp.rtx_ssrc) {
- receive_rtp_config_.erase(config.rtp.rtx_ssrc);
- }
- video_receive_streams_.erase(receive_stream_impl);
- ConfigureSync(config.sync_group);
+
+ // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a
+ // separate SSRC there can be either one or two.
+ receive_rtp_config_.erase(config.rtp.remote_ssrc);
+ if (config.rtp.rtx_ssrc) {
+ receive_rtp_config_.erase(config.rtp.rtx_ssrc);
}
+ video_receive_streams_.erase(receive_stream_impl);
+ ConfigureSync(config.sync_group);
receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
->RemoveStream(config.rtp.remote_ssrc);
@@ -903,30 +1033,25 @@ void Call::DestroyVideoReceiveStream(
FlexfecReceiveStream* Call::CreateFlexfecReceiveStream(
const FlexfecReceiveStream::Config& config) {
TRACE_EVENT0("webrtc", "Call::CreateFlexfecReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RecoveredPacketReceiver* recovered_packet_receiver = this;
FlexfecReceiveStreamImpl* receive_stream;
- {
- WriteLockScoped write_lock(*receive_crit_);
- // Unlike the video and audio receive streams,
- // FlexfecReceiveStream implements RtpPacketSinkInterface itself,
- // and hence its constructor passes its |this| pointer to
- // video_receiver_controller_->CreateStream(). Calling the
- // constructor while holding |receive_crit_| ensures that we don't
- // call OnRtpPacket until the constructor is finished and the
- // object is in a valid state.
- // TODO(nisse): Fix constructor so that it can be moved outside of
- // this locked scope.
- receive_stream = new FlexfecReceiveStreamImpl(
- clock_, &video_receiver_controller_, config, recovered_packet_receiver,
- call_stats_->AsRtcpRttStats(), module_process_thread_.get());
-
- RTC_DCHECK(receive_rtp_config_.find(config.remote_ssrc) ==
- receive_rtp_config_.end());
- receive_rtp_config_.emplace(config.remote_ssrc, ReceiveRtpConfig(config));
- }
+
+ // Unlike the video and audio receive streams, FlexfecReceiveStream implements
+ // RtpPacketSinkInterface itself, and hence its constructor passes its |this|
+ // pointer to video_receiver_controller_->CreateStream(). Calling the
+ // constructor while on the worker thread ensures that we don't call
+ // OnRtpPacket until the constructor is finished and the object is
+ // in a valid state, since OnRtpPacket runs on the same thread.
+ receive_stream = new FlexfecReceiveStreamImpl(
+ clock_, &video_receiver_controller_, config, recovered_packet_receiver,
+ call_stats_->AsRtcpRttStats(), module_process_thread_->process_thread());
+
+ RTC_DCHECK(receive_rtp_config_.find(config.remote_ssrc) ==
+ receive_rtp_config_.end());
+ receive_rtp_config_.emplace(config.remote_ssrc, ReceiveRtpConfig(config));
// TODO(brandtr): Store config in RtcEventLog here.
@@ -935,39 +1060,37 @@ FlexfecReceiveStream* Call::CreateFlexfecReceiveStream(
void Call::DestroyFlexfecReceiveStream(FlexfecReceiveStream* receive_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyFlexfecReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(receive_stream != nullptr);
- {
- WriteLockScoped write_lock(*receive_crit_);
-
- const FlexfecReceiveStream::Config& config = receive_stream->GetConfig();
- uint32_t ssrc = config.remote_ssrc;
- receive_rtp_config_.erase(ssrc);
+ const FlexfecReceiveStream::Config& config = receive_stream->GetConfig();
+ uint32_t ssrc = config.remote_ssrc;
+ receive_rtp_config_.erase(ssrc);
- // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be
- // destroyed.
- receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
- ->RemoveStream(ssrc);
- }
+ // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be
+ // destroyed.
+ receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
+ ->RemoveStream(ssrc);
delete receive_stream;
}
+void Call::AddAdaptationResource(rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ adaptation_resource_forwarders_.push_back(
+ std::make_unique<ResourceVideoSendStreamForwarder>(resource));
+ const auto& resource_forwarder = adaptation_resource_forwarders_.back();
+ for (VideoSendStream* send_stream : video_send_streams_) {
+ resource_forwarder->OnCreateVideoSendStream(send_stream);
+ }
+}
+
RtpTransportControllerSendInterface* Call::GetTransportControllerSend() {
return transport_send_ptr_;
}
Call::Stats Call::GetStats() const {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
-
- // TODO(tommi): The following stats are managed on the process thread:
- // - pacer_delay_ms (PacedSender::Process)
- // - rtt_ms
- // - recv_bandwidth_bps
- // These are delivered on the network TQ:
- // - send_bandwidth_bps (see OnTargetTransferRate)
- // - max_padding_bitrate_bps (see OnAllocationLimitsChanged)
+ RTC_DCHECK_RUN_ON(worker_thread_);
Stats stats;
// TODO(srte): It is unclear if we only want to report queues if network is
@@ -983,22 +1106,14 @@ Call::Stats Call::GetStats() const {
receive_side_cc_.GetRemoteBitrateEstimator(false)->LatestEstimate(
&ssrcs, &recv_bandwidth);
stats.recv_bandwidth_bps = recv_bandwidth;
-
- {
- rtc::CritScope cs(&last_bandwidth_bps_crit_);
- stats.send_bandwidth_bps = last_bandwidth_bps_;
- }
-
- {
- rtc::CritScope cs(&bitrate_crit_);
- stats.max_padding_bitrate_bps = configured_max_padding_bitrate_bps_;
- }
+ stats.send_bandwidth_bps = last_bandwidth_bps_;
+ stats.max_padding_bitrate_bps = configured_max_padding_bitrate_bps_;
return stats;
}
void Call::SignalChannelNetworkState(MediaType media, NetworkState state) {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
switch (media) {
case MediaType::AUDIO:
audio_network_state_ = state;
@@ -1013,40 +1128,25 @@ void Call::SignalChannelNetworkState(MediaType media, NetworkState state) {
}
UpdateAggregateNetworkState();
- {
- ReadLockScoped read_lock(*receive_crit_);
- for (VideoReceiveStream2* video_receive_stream : video_receive_streams_) {
- video_receive_stream->SignalNetworkState(video_network_state_);
- }
+ for (VideoReceiveStream2* video_receive_stream : video_receive_streams_) {
+ video_receive_stream->SignalNetworkState(video_network_state_);
}
}
void Call::OnAudioTransportOverheadChanged(int transport_overhead_per_packet) {
- ReadLockScoped read_lock(*send_crit_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
for (auto& kv : audio_send_ssrcs_) {
kv.second->SetTransportOverhead(transport_overhead_per_packet);
}
}
void Call::UpdateAggregateNetworkState() {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
- bool have_audio = false;
- bool have_video = false;
- {
- ReadLockScoped read_lock(*send_crit_);
- if (!audio_send_ssrcs_.empty())
- have_audio = true;
- if (!video_send_ssrcs_.empty())
- have_video = true;
- }
- {
- ReadLockScoped read_lock(*receive_crit_);
- if (!audio_receive_streams_.empty())
- have_audio = true;
- if (!video_receive_streams_.empty())
- have_video = true;
- }
+ bool have_audio =
+ !audio_send_ssrcs_.empty() || !audio_receive_streams_.empty();
+ bool have_video =
+ !video_send_ssrcs_.empty() || !video_receive_streams_.empty();
bool aggregate_network_up =
((have_video && video_network_state_ == kNetworkUp) ||
@@ -1073,61 +1173,50 @@ void Call::OnSentPacket(const rtc::SentPacket& sent_packet) {
}
void Call::OnStartRateUpdate(DataRate start_rate) {
- RTC_DCHECK(network_queue()->IsCurrent());
+ RTC_DCHECK_RUN_ON(send_transport_queue());
bitrate_allocator_->UpdateStartRate(start_rate.bps<uint32_t>());
}
void Call::OnTargetTransferRate(TargetTransferRate msg) {
- RTC_DCHECK(network_queue()->IsCurrent());
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
- {
- rtc::CritScope cs(&last_bandwidth_bps_crit_);
- last_bandwidth_bps_ = msg.target_rate.bps();
- }
+ RTC_DCHECK_RUN_ON(send_transport_queue());
uint32_t target_bitrate_bps = msg.target_rate.bps();
// For controlling the rate of feedback messages.
receive_side_cc_.OnBitrateChanged(target_bitrate_bps);
bitrate_allocator_->OnNetworkEstimateChanged(msg);
- // Ignore updates if bitrate is zero (the aggregate network state is down).
- if (target_bitrate_bps == 0) {
- rtc::CritScope lock(&bitrate_crit_);
- estimated_send_bitrate_kbps_counter_.ProcessAndPause();
- pacer_bitrate_kbps_counter_.ProcessAndPause();
- return;
- }
-
- bool sending_video;
- {
- ReadLockScoped read_lock(*send_crit_);
- sending_video = !video_send_streams_.empty();
- }
+ worker_thread_->PostTask(
+ ToQueuedTask(task_safety_, [this, target_bitrate_bps]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ last_bandwidth_bps_ = target_bitrate_bps;
+
+ // Ignore updates if bitrate is zero (the aggregate network state is
+ // down) or if we're not sending video.
+ if (target_bitrate_bps == 0 || video_send_streams_.empty()) {
+ estimated_send_bitrate_kbps_counter_.ProcessAndPause();
+ pacer_bitrate_kbps_counter_.ProcessAndPause();
+ return;
+ }
- rtc::CritScope lock(&bitrate_crit_);
- if (!sending_video) {
- // Do not update the stats if we are not sending video.
- estimated_send_bitrate_kbps_counter_.ProcessAndPause();
- pacer_bitrate_kbps_counter_.ProcessAndPause();
- return;
- }
- estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000);
- // Pacer bitrate may be higher than bitrate estimate if enforcing min bitrate.
- uint32_t pacer_bitrate_bps =
- std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_);
- pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000);
+ estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000);
+ // Pacer bitrate may be higher than bitrate estimate if enforcing min
+ // bitrate.
+ uint32_t pacer_bitrate_bps =
+ std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_);
+ pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000);
+ }));
}
void Call::OnAllocationLimitsChanged(BitrateAllocationLimits limits) {
- RTC_DCHECK(network_queue()->IsCurrent());
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ RTC_DCHECK_RUN_ON(send_transport_queue());
transport_send_ptr_->SetAllocatedSendBitrateLimits(limits);
- min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps();
-
- rtc::CritScope lock(&bitrate_crit_);
- configured_max_padding_bitrate_bps_ = limits.max_padding_rate.bps();
+ worker_thread_->PostTask(ToQueuedTask(task_safety_, [this, limits]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps();
+ configured_max_padding_bitrate_bps_ = limits.max_padding_rate.bps();
+ }));
}
void Call::ConfigureSync(const std::string& sync_group) {
@@ -1194,28 +1283,24 @@ PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type,
}
bool rtcp_delivered = false;
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
- ReadLockScoped read_lock(*receive_crit_);
for (VideoReceiveStream2* stream : video_receive_streams_) {
if (stream->DeliverRtcp(packet, length))
rtcp_delivered = true;
}
}
if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
- ReadLockScoped read_lock(*receive_crit_);
for (AudioReceiveStream* stream : audio_receive_streams_) {
stream->DeliverRtcp(packet, length);
rtcp_delivered = true;
}
}
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
- ReadLockScoped read_lock(*send_crit_);
for (VideoSendStream* stream : video_send_streams_) {
stream->DeliverRtcp(packet, length);
rtcp_delivered = true;
}
}
if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
- ReadLockScoped read_lock(*send_crit_);
for (auto& kv : audio_send_ssrcs_) {
kv.second->DeliverRtcp(packet, length);
rtcp_delivered = true;
@@ -1259,17 +1344,15 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
RTC_DCHECK(media_type == MediaType::AUDIO || media_type == MediaType::VIDEO ||
is_keep_alive_packet);
- ReadLockScoped read_lock(*receive_crit_);
auto it = receive_rtp_config_.find(parsed_packet.Ssrc());
if (it == receive_rtp_config_.end()) {
RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc "
<< parsed_packet.Ssrc();
// Destruction of the receive stream, including deregistering from the
- // RtpDemuxer, is not protected by the |receive_crit_| lock. But
- // deregistering in the |receive_rtp_config_| map is protected by that lock.
- // So by not passing the packet on to demuxing in this case, we prevent
- // incoming packets to be passed on via the demuxer to a receive stream
- // which is being torned down.
+ // RtpDemuxer, is not protected by the |worker_thread_|.
+ // But deregistering in the |receive_rtp_config_| map is. So by not passing
+ // the packet on to demuxing in this case, we prevent incoming packets to be
+ // passed on via the demuxer to a receive stream which is being torned down.
return DELIVERY_UNKNOWN_SSRC;
}
@@ -1315,7 +1398,8 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket(
MediaType media_type,
rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
+
if (IsRtcp(packet.cdata(), packet.size()))
return DeliverRtcp(media_type, packet.cdata(), packet.size());
@@ -1323,20 +1407,20 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket(
}
void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
RtpPacketReceived parsed_packet;
if (!parsed_packet.Parse(packet, length))
return;
parsed_packet.set_recovered(true);
- ReadLockScoped read_lock(*receive_crit_);
auto it = receive_rtp_config_.find(parsed_packet.Ssrc());
if (it == receive_rtp_config_.end()) {
RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc "
<< parsed_packet.Ssrc();
// Destruction of the receive stream, including deregistering from the
- // RtpDemuxer, is not protected by the |receive_crit_| lock. But
- // deregistering in the |receive_rtp_config_| map is protected by that lock.
+ // RtpDemuxer, is not protected by the |worker_thread_|.
+ // But deregistering in the |receive_rtp_config_| map is.
// So by not passing the packet on to demuxing in this case, we prevent
// incoming packets to be passed on via the demuxer to a receive stream
// which is being torn down.
diff --git a/call/call.h b/call/call.h
index 77cd3d2690..75272248c4 100644
--- a/call/call.h
+++ b/call/call.h
@@ -15,6 +15,7 @@
#include <string>
#include <vector>
+#include "api/adaptation/resource.h"
#include "api/media_types.h"
#include "call/audio_receive_stream.h"
#include "call/audio_send_stream.h"
@@ -28,9 +29,41 @@
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/network/sent_packet.h"
#include "rtc_base/network_route.h"
+#include "rtc_base/ref_count.h"
namespace webrtc {
+// A restricted way to share the module process thread across multiple instances
+// of Call that are constructed on the same worker thread (which is what the
+// peer connection factory guarantees).
+// SharedModuleThread supports a callback that is issued when only one reference
+// remains, which is used to indicate to the original owner that the thread may
+// be discarded.
+class SharedModuleThread : public rtc::RefCountInterface {
+ protected:
+ SharedModuleThread(std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining);
+ friend class rtc::scoped_refptr<SharedModuleThread>;
+ ~SharedModuleThread() override;
+
+ public:
+ // Allows injection of an externally created process thread.
+ static rtc::scoped_refptr<SharedModuleThread> Create(
+ std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining);
+
+ void EnsureStarted();
+
+ ProcessThread* process_thread();
+
+ private:
+ void AddRef() const override;
+ rtc::RefCountReleaseStatus Release() const override;
+
+ class Impl;
+ mutable std::unique_ptr<Impl> impl_;
+};
+
// A Call instance can contain several send and/or receive streams. All streams
// are assumed to have the same remote endpoint and will share bitrate estimates
// etc.
@@ -50,8 +83,10 @@ class Call {
static Call* Create(const Call::Config& config);
static Call* Create(const Call::Config& config,
+ rtc::scoped_refptr<SharedModuleThread> call_thread);
+ static Call* Create(const Call::Config& config,
Clock* clock,
- std::unique_ptr<ProcessThread> call_thread,
+ rtc::scoped_refptr<SharedModuleThread> call_thread,
std::unique_ptr<ProcessThread> pacer_thread);
virtual AudioSendStream* CreateAudioSendStream(
@@ -86,6 +121,11 @@ class Call {
virtual void DestroyFlexfecReceiveStream(
FlexfecReceiveStream* receive_stream) = 0;
+ // When a resource is overused, the Call will try to reduce the load on the
+ // sysem, for example by reducing the resolution or frame rate of encoded
+ // streams.
+ virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) = 0;
+
// All received RTP and RTCP packets for the call should be inserted to this
// PacketReceiver. The PacketReceiver pointer is valid as long as the
// Call instance exists.
diff --git a/call/call_factory.cc b/call/call_factory.cc
index 6b4f419742..cc02c02835 100644
--- a/call/call_factory.cc
+++ b/call/call_factory.cc
@@ -70,7 +70,12 @@ absl::optional<webrtc::BuiltInNetworkBehaviorConfig> ParseDegradationConfig(
}
} // namespace
+CallFactory::CallFactory() {
+ call_thread_.Detach();
+}
+
Call* CallFactory::CreateCall(const Call::Config& config) {
+ RTC_DCHECK_RUN_ON(&call_thread_);
absl::optional<webrtc::BuiltInNetworkBehaviorConfig> send_degradation_config =
ParseDegradationConfig(true);
absl::optional<webrtc::BuiltInNetworkBehaviorConfig>
@@ -82,7 +87,15 @@ Call* CallFactory::CreateCall(const Call::Config& config) {
config.task_queue_factory);
}
- return Call::Create(config);
+ if (!module_thread_) {
+ module_thread_ = SharedModuleThread::Create(
+ ProcessThread::Create("SharedModThread"), [this]() {
+ RTC_DCHECK_RUN_ON(&call_thread_);
+ module_thread_ = nullptr;
+ });
+ }
+
+ return Call::Create(config, module_thread_);
}
std::unique_ptr<CallFactoryInterface> CreateCallFactory() {
diff --git a/call/call_factory.h b/call/call_factory.h
index f0d695c915..65c0b6532a 100644
--- a/call/call_factory.h
+++ b/call/call_factory.h
@@ -14,13 +14,22 @@
#include "api/call/call_factory_interface.h"
#include "call/call.h"
#include "call/call_config.h"
+#include "rtc_base/synchronization/sequence_checker.h"
namespace webrtc {
class CallFactory : public CallFactoryInterface {
+ public:
+ CallFactory();
+
+ private:
~CallFactory() override {}
Call* CreateCall(const CallConfig& config) override;
+
+ SequenceChecker call_thread_;
+ rtc::scoped_refptr<SharedModuleThread> module_thread_
+ RTC_GUARDED_BY(call_thread_);
};
} // namespace webrtc
diff --git a/call/call_perf_tests.cc b/call/call_perf_tests.cc
index 123be7da4c..9214ae5d14 100644
--- a/call/call_perf_tests.cc
+++ b/call/call_perf_tests.cc
@@ -29,6 +29,7 @@
#include "modules/audio_mixer/audio_mixer_impl.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "rtc_base/checks.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_annotations.h"
@@ -409,7 +410,7 @@ void CallPerfTest::TestCaptureNtpTime(
}
void OnFrame(const VideoFrame& video_frame) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (video_frame.ntp_time_ms() <= 0) {
// Haven't got enough RTCP SR in order to calculate the capture ntp
// time.
@@ -445,7 +446,7 @@ void CallPerfTest::TestCaptureNtpTime(
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -488,7 +489,7 @@ void CallPerfTest::TestCaptureNtpTime(
time_offset_ms_list_, "ms", true);
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
const BuiltInNetworkBehaviorConfig net_config_;
Clock* const clock_;
int threshold_ms_;
@@ -499,7 +500,7 @@ void CallPerfTest::TestCaptureNtpTime(
bool rtp_start_timestamp_set_;
uint32_t rtp_start_timestamp_;
typedef std::map<uint32_t, uint32_t> FrameCaptureTimeList;
- FrameCaptureTimeList capture_time_list_ RTC_GUARDED_BY(&crit_);
+ FrameCaptureTimeList capture_time_list_ RTC_GUARDED_BY(&mutex_);
std::vector<double> time_offset_ms_list_;
} test(net_config, threshold_ms, start_time_ms, run_time_ms);
diff --git a/call/call_unittest.cc b/call/call_unittest.cc
index 8afcf25121..e165107d98 100644
--- a/call/call_unittest.cc
+++ b/call/call_unittest.cc
@@ -20,13 +20,17 @@
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "api/test/mock_audio_mixer.h"
+#include "api/test/video/function_video_encoder_factory.h"
#include "api/transport/field_trial_based_config.h"
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "audio/audio_receive_stream.h"
#include "audio/audio_send_stream.h"
+#include "call/adaptation/test/fake_resource.h"
+#include "call/adaptation/test/mock_resource_listener.h"
#include "call/audio_state.h"
#include "modules/audio_device/include/mock_audio_device.h"
#include "modules/audio_processing/include/mock_audio_processing.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "test/fake_encoder.h"
#include "test/gtest.h"
#include "test/mock_audio_decoder_factory.h"
@@ -35,6 +39,10 @@
namespace {
+using ::testing::_;
+using ::testing::Contains;
+using ::testing::StrictMock;
+
struct CallHelper {
explicit CallHelper(bool use_null_audio_processing) {
task_queue_factory_ = webrtc::CreateDefaultTaskQueueFactory();
@@ -67,6 +75,20 @@ struct CallHelper {
namespace webrtc {
+namespace {
+
+rtc::scoped_refptr<Resource> FindResourceWhoseNameContains(
+ const std::vector<rtc::scoped_refptr<Resource>>& resources,
+ const std::string& name_contains) {
+ for (const auto& resource : resources) {
+ if (resource->Name().find(name_contains) != std::string::npos)
+ return resource;
+ }
+ return nullptr;
+}
+
+} // namespace
+
TEST(CallTest, ConstructDestruct) {
for (bool use_null_audio_processing : {false, true}) {
CallHelper call(use_null_audio_processing);
@@ -321,8 +343,186 @@ TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) {
EXPECT_EQ(rtp_state1.capture_time_ms, rtp_state2.capture_time_ms);
EXPECT_EQ(rtp_state1.last_timestamp_time_ms,
rtp_state2.last_timestamp_time_ms);
- EXPECT_EQ(rtp_state1.media_has_been_sent, rtp_state2.media_has_been_sent);
}
}
+TEST(CallTest, AddAdaptationResourceAfterCreatingVideoSendStream) {
+ CallHelper call(true);
+ // Create a VideoSendStream.
+ test::FunctionVideoEncoderFactory fake_encoder_factory([]() {
+ return std::make_unique<test::FakeEncoder>(Clock::GetRealTimeClock());
+ });
+ auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory();
+ MockTransport send_transport;
+ VideoSendStream::Config config(&send_transport);
+ config.rtp.payload_type = 110;
+ config.rtp.ssrcs = {42};
+ config.encoder_settings.encoder_factory = &fake_encoder_factory;
+ config.encoder_settings.bitrate_allocator_factory =
+ bitrate_allocator_factory.get();
+ VideoEncoderConfig encoder_config;
+ encoder_config.max_bitrate_bps = 1337;
+ VideoSendStream* stream1 =
+ call->CreateVideoSendStream(config.Copy(), encoder_config.Copy());
+ EXPECT_NE(stream1, nullptr);
+ config.rtp.ssrcs = {43};
+ VideoSendStream* stream2 =
+ call->CreateVideoSendStream(config.Copy(), encoder_config.Copy());
+ EXPECT_NE(stream2, nullptr);
+ // Add a fake resource.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ call->AddAdaptationResource(fake_resource);
+ // An adapter resource mirroring the |fake_resource| should now be present on
+ // both streams.
+ auto injected_resource1 = FindResourceWhoseNameContains(
+ stream1->GetAdaptationResources(), fake_resource->Name());
+ EXPECT_TRUE(injected_resource1);
+ auto injected_resource2 = FindResourceWhoseNameContains(
+ stream2->GetAdaptationResources(), fake_resource->Name());
+ EXPECT_TRUE(injected_resource2);
+ // Overwrite the real resource listeners with mock ones to verify the signal
+ // gets through.
+ injected_resource1->SetResourceListener(nullptr);
+ StrictMock<MockResourceListener> resource_listener1;
+ EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([injected_resource1](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(injected_resource1, resource);
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
+ });
+ injected_resource1->SetResourceListener(&resource_listener1);
+ injected_resource2->SetResourceListener(nullptr);
+ StrictMock<MockResourceListener> resource_listener2;
+ EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([injected_resource2](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(injected_resource2, resource);
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
+ });
+ injected_resource2->SetResourceListener(&resource_listener2);
+ // The kOveruse signal should get to our resource listeners.
+ fake_resource->SetUsageState(ResourceUsageState::kOveruse);
+ call->DestroyVideoSendStream(stream1);
+ call->DestroyVideoSendStream(stream2);
+}
+
+TEST(CallTest, AddAdaptationResourceBeforeCreatingVideoSendStream) {
+ CallHelper call(true);
+ // Add a fake resource.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ call->AddAdaptationResource(fake_resource);
+ // Create a VideoSendStream.
+ test::FunctionVideoEncoderFactory fake_encoder_factory([]() {
+ return std::make_unique<test::FakeEncoder>(Clock::GetRealTimeClock());
+ });
+ auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory();
+ MockTransport send_transport;
+ VideoSendStream::Config config(&send_transport);
+ config.rtp.payload_type = 110;
+ config.rtp.ssrcs = {42};
+ config.encoder_settings.encoder_factory = &fake_encoder_factory;
+ config.encoder_settings.bitrate_allocator_factory =
+ bitrate_allocator_factory.get();
+ VideoEncoderConfig encoder_config;
+ encoder_config.max_bitrate_bps = 1337;
+ VideoSendStream* stream1 =
+ call->CreateVideoSendStream(config.Copy(), encoder_config.Copy());
+ EXPECT_NE(stream1, nullptr);
+ config.rtp.ssrcs = {43};
+ VideoSendStream* stream2 =
+ call->CreateVideoSendStream(config.Copy(), encoder_config.Copy());
+ EXPECT_NE(stream2, nullptr);
+ // An adapter resource mirroring the |fake_resource| should be present on both
+ // streams.
+ auto injected_resource1 = FindResourceWhoseNameContains(
+ stream1->GetAdaptationResources(), fake_resource->Name());
+ EXPECT_TRUE(injected_resource1);
+ auto injected_resource2 = FindResourceWhoseNameContains(
+ stream2->GetAdaptationResources(), fake_resource->Name());
+ EXPECT_TRUE(injected_resource2);
+ // Overwrite the real resource listeners with mock ones to verify the signal
+ // gets through.
+ injected_resource1->SetResourceListener(nullptr);
+ StrictMock<MockResourceListener> resource_listener1;
+ EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([injected_resource1](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(injected_resource1, resource);
+ EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state);
+ });
+ injected_resource1->SetResourceListener(&resource_listener1);
+ injected_resource2->SetResourceListener(nullptr);
+ StrictMock<MockResourceListener> resource_listener2;
+ EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([injected_resource2](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(injected_resource2, resource);
+ EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state);
+ });
+ injected_resource2->SetResourceListener(&resource_listener2);
+ // The kUnderuse signal should get to our resource listeners.
+ fake_resource->SetUsageState(ResourceUsageState::kUnderuse);
+ call->DestroyVideoSendStream(stream1);
+ call->DestroyVideoSendStream(stream2);
+}
+
+TEST(CallTest, SharedModuleThread) {
+ class SharedModuleThreadUser : public Module {
+ public:
+ SharedModuleThreadUser(ProcessThread* expected_thread,
+ rtc::scoped_refptr<SharedModuleThread> thread)
+ : expected_thread_(expected_thread), thread_(std::move(thread)) {
+ thread_->EnsureStarted();
+ thread_->process_thread()->RegisterModule(this, RTC_FROM_HERE);
+ }
+
+ ~SharedModuleThreadUser() override {
+ thread_->process_thread()->DeRegisterModule(this);
+ EXPECT_TRUE(thread_was_checked_);
+ }
+
+ private:
+ int64_t TimeUntilNextProcess() override { return 1000; }
+ void Process() override {}
+ void ProcessThreadAttached(ProcessThread* process_thread) override {
+ if (!process_thread) {
+ // Being detached.
+ return;
+ }
+ EXPECT_EQ(process_thread, expected_thread_);
+ thread_was_checked_ = true;
+ }
+
+ bool thread_was_checked_ = false;
+ ProcessThread* const expected_thread_;
+ rtc::scoped_refptr<SharedModuleThread> thread_;
+ };
+
+ // Create our test instance and pass a lambda to it that gets executed when
+ // the reference count goes back to 1 - meaning |shared| again is the only
+ // reference, which means we can free the variable and deallocate the thread.
+ rtc::scoped_refptr<SharedModuleThread> shared;
+ shared =
+ SharedModuleThread::Create(ProcessThread::Create("MySharedProcessThread"),
+ [&shared]() { shared = nullptr; });
+ ProcessThread* process_thread = shared->process_thread();
+
+ ASSERT_TRUE(shared.get());
+
+ {
+ // Create a couple of users of the thread.
+ // These instances are in a separate scope to trigger the callback to our
+ // lambda, which will run when these go out of scope.
+ SharedModuleThreadUser user1(process_thread, shared);
+ SharedModuleThreadUser user2(process_thread, shared);
+ }
+
+ // The thread should now have been stopped and freed.
+ EXPECT_FALSE(shared);
+}
+
} // namespace webrtc
diff --git a/call/degraded_call.cc b/call/degraded_call.cc
index 9c8d2be508..007e0af360 100644
--- a/call/degraded_call.cc
+++ b/call/degraded_call.cc
@@ -245,6 +245,11 @@ void DegradedCall::DestroyFlexfecReceiveStream(
call_->DestroyFlexfecReceiveStream(receive_stream);
}
+void DegradedCall::AddAdaptationResource(
+ rtc::scoped_refptr<Resource> resource) {
+ call_->AddAdaptationResource(std::move(resource));
+}
+
PacketReceiver* DegradedCall::Receiver() {
if (receive_config_) {
return this;
diff --git a/call/degraded_call.h b/call/degraded_call.h
index 49230ca1ed..ac072b7159 100644
--- a/call/degraded_call.h
+++ b/call/degraded_call.h
@@ -77,6 +77,8 @@ class DegradedCall : public Call, private PacketReceiver {
void DestroyFlexfecReceiveStream(
FlexfecReceiveStream* receive_stream) override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+
PacketReceiver* Receiver() override;
RtpTransportControllerSendInterface* GetTransportControllerSend() override;
diff --git a/call/fake_network_pipe.cc b/call/fake_network_pipe.cc
index 8844700e67..324a7bd793 100644
--- a/call/fake_network_pipe.cc
+++ b/call/fake_network_pipe.cc
@@ -122,17 +122,17 @@ FakeNetworkPipe::~FakeNetworkPipe() {
}
void FakeNetworkPipe::SetReceiver(PacketReceiver* receiver) {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
receiver_ = receiver;
}
void FakeNetworkPipe::AddActiveTransport(Transport* transport) {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
active_transports_[transport]++;
}
void FakeNetworkPipe::RemoveActiveTransport(Transport* transport) {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
auto it = active_transports_.find(transport);
RTC_CHECK(it != active_transports_.end());
if (--(it->second) == 0) {
@@ -186,7 +186,7 @@ PacketReceiver::DeliveryStatus FakeNetworkPipe::DeliverPacket(
}
void FakeNetworkPipe::SetClockOffset(int64_t offset_ms) {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
clock_offset_ms_ = offset_ms;
}
@@ -198,7 +198,7 @@ bool FakeNetworkPipe::EnqueuePacket(rtc::CopyOnWriteBuffer packet,
bool is_rtcp,
MediaType media_type,
absl::optional<int64_t> packet_time_us) {
- rtc::CritScope crit(&process_lock_);
+ MutexLock lock(&process_lock_);
int64_t time_now_us = clock_->TimeInMicroseconds();
return EnqueuePacket(NetworkPacket(std::move(packet), time_now_us,
time_now_us, options, is_rtcp, media_type,
@@ -209,7 +209,7 @@ bool FakeNetworkPipe::EnqueuePacket(rtc::CopyOnWriteBuffer packet,
absl::optional<PacketOptions> options,
bool is_rtcp,
Transport* transport) {
- rtc::CritScope crit(&process_lock_);
+ MutexLock lock(&process_lock_);
int64_t time_now_us = clock_->TimeInMicroseconds();
return EnqueuePacket(NetworkPacket(std::move(packet), time_now_us,
time_now_us, options, is_rtcp,
@@ -233,7 +233,7 @@ bool FakeNetworkPipe::EnqueuePacket(NetworkPacket&& net_packet) {
}
float FakeNetworkPipe::PercentageLoss() {
- rtc::CritScope crit(&process_lock_);
+ MutexLock lock(&process_lock_);
if (sent_packets_ == 0)
return 0;
@@ -242,7 +242,7 @@ float FakeNetworkPipe::PercentageLoss() {
}
int FakeNetworkPipe::AverageDelay() {
- rtc::CritScope crit(&process_lock_);
+ MutexLock lock(&process_lock_);
if (sent_packets_ == 0)
return 0;
@@ -251,12 +251,12 @@ int FakeNetworkPipe::AverageDelay() {
}
size_t FakeNetworkPipe::DroppedPackets() {
- rtc::CritScope crit(&process_lock_);
+ MutexLock lock(&process_lock_);
return dropped_packets_;
}
size_t FakeNetworkPipe::SentPackets() {
- rtc::CritScope crit(&process_lock_);
+ MutexLock lock(&process_lock_);
return sent_packets_;
}
@@ -264,7 +264,7 @@ void FakeNetworkPipe::Process() {
int64_t time_now_us;
std::queue<NetworkPacket> packets_to_deliver;
{
- rtc::CritScope crit(&process_lock_);
+ MutexLock lock(&process_lock_);
time_now_us = clock_->TimeInMicroseconds();
if (time_now_us - last_log_time_us_ > kLogIntervalMs * 1000) {
int64_t queueing_delay_us = 0;
@@ -318,7 +318,7 @@ void FakeNetworkPipe::Process() {
}
}
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
while (!packets_to_deliver.empty()) {
NetworkPacket packet = std::move(packets_to_deliver.front());
packets_to_deliver.pop();
@@ -354,7 +354,7 @@ void FakeNetworkPipe::DeliverNetworkPacket(NetworkPacket* packet) {
}
absl::optional<int64_t> FakeNetworkPipe::TimeUntilNextProcess() {
- rtc::CritScope crit(&process_lock_);
+ MutexLock lock(&process_lock_);
absl::optional<int64_t> delivery_us = network_behavior_->NextDeliveryTimeUs();
if (delivery_us) {
int64_t delay_us = *delivery_us - clock_->TimeInMicroseconds();
@@ -364,17 +364,17 @@ absl::optional<int64_t> FakeNetworkPipe::TimeUntilNextProcess() {
}
bool FakeNetworkPipe::HasReceiver() const {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
return receiver_ != nullptr;
}
void FakeNetworkPipe::DeliverPacketWithLock(NetworkPacket* packet) {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
DeliverNetworkPacket(packet);
}
void FakeNetworkPipe::ResetStats() {
- rtc::CritScope crit(&process_lock_);
+ MutexLock lock(&process_lock_);
dropped_packets_ = 0;
sent_packets_ = 0;
total_packet_delay_us_ = 0;
diff --git a/call/fake_network_pipe.h b/call/fake_network_pipe.h
index 24340a2f29..1e5bb513bf 100644
--- a/call/fake_network_pipe.h
+++ b/call/fake_network_pipe.h
@@ -24,7 +24,7 @@
#include "call/call.h"
#include "call/simulated_packet_receiver.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -204,14 +204,14 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface {
Clock* const clock_;
// |config_lock| guards the mostly constant things like the callbacks.
- rtc::CriticalSection config_lock_;
+ mutable Mutex config_lock_;
const std::unique_ptr<NetworkBehaviorInterface> network_behavior_;
PacketReceiver* receiver_ RTC_GUARDED_BY(config_lock_);
Transport* const global_transport_;
// |process_lock| guards the data structures involved in delay and loss
// processes, such as the packet queues.
- rtc::CriticalSection process_lock_;
+ Mutex process_lock_;
// Packets are added at the back of the deque, this makes the deque ordered
// by increasing send time. The common case when removing packets from the
// deque is removing early packets, which will be close to the front of the
diff --git a/call/fake_network_pipe_unittest.cc b/call/fake_network_pipe_unittest.cc
index 9c4a3bf755..852a427222 100644
--- a/call/fake_network_pipe_unittest.cc
+++ b/call/fake_network_pipe_unittest.cc
@@ -24,8 +24,10 @@ namespace webrtc {
class MockReceiver : public PacketReceiver {
public:
- MOCK_METHOD3(DeliverPacket,
- DeliveryStatus(MediaType, rtc::CopyOnWriteBuffer, int64_t));
+ MOCK_METHOD(DeliveryStatus,
+ DeliverPacket,
+ (MediaType, rtc::CopyOnWriteBuffer, int64_t),
+ (override));
virtual ~MockReceiver() = default;
};
diff --git a/call/flexfec_receive_stream_impl.cc b/call/flexfec_receive_stream_impl.cc
index 40005efe83..e629bca347 100644
--- a/call/flexfec_receive_stream_impl.cc
+++ b/call/flexfec_receive_stream_impl.cc
@@ -22,7 +22,6 @@
#include "call/rtp_stream_receiver_controller_interface.h"
#include "modules/rtp_rtcp/include/flexfec_receiver.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/checks.h"
@@ -119,12 +118,12 @@ std::unique_ptr<FlexfecReceiver> MaybeCreateFlexfecReceiver(
recovered_packet_receiver));
}
-std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
+std::unique_ptr<ModuleRtpRtcpImpl2> CreateRtpRtcpModule(
Clock* clock,
ReceiveStatistics* receive_statistics,
const FlexfecReceiveStreamImpl::Config& config,
RtcpRttStats* rtt_stats) {
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.audio = false;
configuration.receiver_only = true;
configuration.clock = clock;
@@ -132,7 +131,7 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
configuration.outgoing_transport = config.rtcp_send_transport;
configuration.rtt_stats = rtt_stats;
configuration.local_media_ssrc = config.local_ssrc;
- return RtpRtcp::Create(configuration);
+ return ModuleRtpRtcpImpl2::Create(configuration);
}
} // namespace
diff --git a/call/flexfec_receive_stream_impl.h b/call/flexfec_receive_stream_impl.h
index d4fdc7431a..888dae9ebd 100644
--- a/call/flexfec_receive_stream_impl.h
+++ b/call/flexfec_receive_stream_impl.h
@@ -15,6 +15,7 @@
#include "call/flexfec_receive_stream.h"
#include "call/rtp_packet_sink_interface.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -55,7 +56,7 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream {
// RTCP reporting.
const std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
- const std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ const std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
ProcessThread* process_thread_;
std::unique_ptr<RtpStreamReceiverInterface> rtp_stream_receiver_;
diff --git a/call/rampup_tests.cc b/call/rampup_tests.cc
index 64eab050cb..89fbe3dde7 100644
--- a/call/rampup_tests.cc
+++ b/call/rampup_tests.cc
@@ -362,14 +362,14 @@ void RampUpTester::AccumulateStats(const VideoSendStream::StreamStats& stream,
void RampUpTester::TriggerTestDone() {
RTC_DCHECK_GE(test_start_ms_, 0);
- // TODO(holmer): Add audio send stats here too when those APIs are available.
- if (!send_stream_)
- return;
-
// Stop polling stats.
// Corner case for field_trials=WebRTC-QuickPerfTest/Enabled/
SendTask(RTC_FROM_HERE, task_queue_, [this] { pending_task_.Stop(); });
+ // TODO(holmer): Add audio send stats here too when those APIs are available.
+ if (!send_stream_)
+ return;
+
VideoSendStream::Stats send_stats = send_stream_->GetStats();
send_stream_ = nullptr; // To avoid dereferencing a bad pointer.
diff --git a/call/rtcp_demuxer.cc b/call/rtcp_demuxer.cc
deleted file mode 100644
index 738109fa43..0000000000
--- a/call/rtcp_demuxer.cc
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "call/rtcp_demuxer.h"
-
-#include <stddef.h>
-
-#include <algorithm>
-#include <utility>
-
-#include "absl/types/optional.h"
-#include "api/rtp_headers.h"
-#include "call/rtcp_packet_sink_interface.h"
-#include "call/rtp_rtcp_demuxer_helper.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "rtc_base/checks.h"
-
-namespace webrtc {
-
-RtcpDemuxer::RtcpDemuxer() = default;
-
-RtcpDemuxer::~RtcpDemuxer() {
- RTC_DCHECK(ssrc_sinks_.empty());
- RTC_DCHECK(rsid_sinks_.empty());
- RTC_DCHECK(broadcast_sinks_.empty());
-}
-
-void RtcpDemuxer::AddSink(uint32_t sender_ssrc, RtcpPacketSinkInterface* sink) {
- RTC_DCHECK(sink);
- RTC_DCHECK(!ContainerHasKey(broadcast_sinks_, sink));
- RTC_DCHECK(!MultimapAssociationExists(ssrc_sinks_, sender_ssrc, sink));
- ssrc_sinks_.emplace(sender_ssrc, sink);
-}
-
-void RtcpDemuxer::AddSink(const std::string& rsid,
- RtcpPacketSinkInterface* sink) {
- RTC_DCHECK(IsLegalRsidName(rsid));
- RTC_DCHECK(sink);
- RTC_DCHECK(!ContainerHasKey(broadcast_sinks_, sink));
- RTC_DCHECK(!MultimapAssociationExists(rsid_sinks_, rsid, sink));
- rsid_sinks_.emplace(rsid, sink);
-}
-
-void RtcpDemuxer::AddBroadcastSink(RtcpPacketSinkInterface* sink) {
- RTC_DCHECK(sink);
- RTC_DCHECK(!MultimapHasValue(ssrc_sinks_, sink));
- RTC_DCHECK(!MultimapHasValue(rsid_sinks_, sink));
- RTC_DCHECK(!ContainerHasKey(broadcast_sinks_, sink));
- broadcast_sinks_.push_back(sink);
-}
-
-void RtcpDemuxer::RemoveSink(const RtcpPacketSinkInterface* sink) {
- RTC_DCHECK(sink);
- size_t removal_count = RemoveFromMultimapByValue(&ssrc_sinks_, sink) +
- RemoveFromMultimapByValue(&rsid_sinks_, sink);
- RTC_DCHECK_GT(removal_count, 0);
-}
-
-void RtcpDemuxer::RemoveBroadcastSink(const RtcpPacketSinkInterface* sink) {
- RTC_DCHECK(sink);
- auto it = std::find(broadcast_sinks_.begin(), broadcast_sinks_.end(), sink);
- RTC_DCHECK(it != broadcast_sinks_.end());
- broadcast_sinks_.erase(it);
-}
-
-void RtcpDemuxer::OnRtcpPacket(rtc::ArrayView<const uint8_t> packet) {
- // Perform sender-SSRC-based demuxing for packets with a sender-SSRC.
- absl::optional<uint32_t> sender_ssrc = ParseRtcpPacketSenderSsrc(packet);
- if (sender_ssrc) {
- auto it_range = ssrc_sinks_.equal_range(*sender_ssrc);
- for (auto it = it_range.first; it != it_range.second; ++it) {
- it->second->OnRtcpPacket(packet);
- }
- }
-
- // All packets, even those without a sender-SSRC, are broadcast to sinks
- // which listen to broadcasts.
- for (RtcpPacketSinkInterface* sink : broadcast_sinks_) {
- sink->OnRtcpPacket(packet);
- }
-}
-
-void RtcpDemuxer::OnSsrcBoundToRsid(const std::string& rsid, uint32_t ssrc) {
- // Record the new SSRC association for all of the sinks that were associated
- // with the RSID.
- auto it_range = rsid_sinks_.equal_range(rsid);
- for (auto it = it_range.first; it != it_range.second; ++it) {
- RtcpPacketSinkInterface* sink = it->second;
- // Watch out for pre-existing SSRC-based associations.
- if (!MultimapAssociationExists(ssrc_sinks_, ssrc, sink)) {
- AddSink(ssrc, sink);
- }
- }
-
- // RSIDs are uniquely associated with SSRCs; no need to keep in memory
- // the RSID-to-sink association of resolved RSIDs.
- rsid_sinks_.erase(it_range.first, it_range.second);
-}
-
-} // namespace webrtc
diff --git a/call/rtcp_demuxer.h b/call/rtcp_demuxer.h
deleted file mode 100644
index 494e0cea4b..0000000000
--- a/call/rtcp_demuxer.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef CALL_RTCP_DEMUXER_H_
-#define CALL_RTCP_DEMUXER_H_
-
-#include <map>
-#include <string>
-#include <vector>
-
-#include "api/array_view.h"
-#include "call/ssrc_binding_observer.h"
-
-namespace webrtc {
-
-class RtcpPacketSinkInterface;
-
-// This class represents the RTCP demuxing, for a single RTP session (i.e., one
-// SSRC space, see RFC 7656). It isn't thread aware, leaving responsibility of
-// multithreading issues to the user of this class.
-class RtcpDemuxer : public SsrcBindingObserver {
- public:
- RtcpDemuxer();
- ~RtcpDemuxer() override;
-
- // Registers a sink. The sink will be notified of incoming RTCP packets with
- // that sender-SSRC. The same sink can be registered for multiple SSRCs, and
- // the same SSRC can have multiple sinks. Null pointer is not allowed.
- // Sinks may be associated with both an SSRC and an RSID.
- // Sinks may be registered as SSRC/RSID-specific or broadcast, but not both.
- void AddSink(uint32_t sender_ssrc, RtcpPacketSinkInterface* sink);
-
- // Registers a sink. Once the RSID is resolved to an SSRC, the sink will be
- // notified of all RTCP packets with that sender-SSRC.
- // The same sink can be registered for multiple RSIDs, and
- // the same RSID can have multiple sinks. Null pointer is not allowed.
- // Sinks may be associated with both an SSRC and an RSID.
- // Sinks may be registered as SSRC/RSID-specific or broadcast, but not both.
- void AddSink(const std::string& rsid, RtcpPacketSinkInterface* sink);
-
- // Registers a sink. The sink will be notified of any incoming RTCP packet.
- // Null pointer is not allowed.
- // Sinks may be registered as SSRC/RSID-specific or broadcast, but not both.
- void AddBroadcastSink(RtcpPacketSinkInterface* sink);
-
- // Undo previous AddSink() calls with the given sink.
- void RemoveSink(const RtcpPacketSinkInterface* sink);
-
- // Undo AddBroadcastSink().
- void RemoveBroadcastSink(const RtcpPacketSinkInterface* sink);
-
- // Process a new RTCP packet and forward it to the appropriate sinks.
- void OnRtcpPacket(rtc::ArrayView<const uint8_t> packet);
-
- // Implement SsrcBindingObserver - become notified whenever RSIDs resolve to
- // an SSRC.
- void OnSsrcBoundToRsid(const std::string& rsid, uint32_t ssrc) override;
-
- // TODO(eladalon): Add the ability to resolve RSIDs and inform observers,
- // like in the RtpDemuxer case, once the relevant standard is finalized.
-
- private:
- // Records the association SSRCs to sinks.
- std::multimap<uint32_t, RtcpPacketSinkInterface*> ssrc_sinks_;
-
- // Records the association RSIDs to sinks.
- std::multimap<std::string, RtcpPacketSinkInterface*> rsid_sinks_;
-
- // Sinks which will receive notifications of all incoming RTCP packets.
- // Additional/removal of sinks is expected to be significantly less frequent
- // than RTCP message reception; container chosen for iteration performance.
- std::vector<RtcpPacketSinkInterface*> broadcast_sinks_;
-};
-
-} // namespace webrtc
-
-#endif // CALL_RTCP_DEMUXER_H_
diff --git a/call/rtcp_demuxer_unittest.cc b/call/rtcp_demuxer_unittest.cc
deleted file mode 100644
index 23c305c900..0000000000
--- a/call/rtcp_demuxer_unittest.cc
+++ /dev/null
@@ -1,505 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "call/rtcp_demuxer.h"
-
-#include <stddef.h>
-
-#include <set>
-
-#include "api/rtp_headers.h"
-#include "call/rtcp_packet_sink_interface.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/bye.h"
-#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
-#include "rtc_base/arraysize.h"
-#include "rtc_base/buffer.h"
-#include "rtc_base/checks.h"
-#include "test/gmock.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-
-namespace {
-
-using ::testing::_;
-using ::testing::AtLeast;
-using ::testing::ElementsAreArray;
-using ::testing::InSequence;
-using ::testing::Matcher;
-using ::testing::NiceMock;
-
-class MockRtcpPacketSink : public RtcpPacketSinkInterface {
- public:
- MOCK_METHOD1(OnRtcpPacket, void(rtc::ArrayView<const uint8_t>));
-};
-
-class RtcpDemuxerTest : public ::testing::Test {
- protected:
- ~RtcpDemuxerTest() {
- for (auto* sink : sinks_to_tear_down_) {
- demuxer_.RemoveSink(sink);
- }
- for (auto* sink : broadcast_sinks_to_tear_down_) {
- demuxer_.RemoveBroadcastSink(sink);
- }
- }
-
- void AddSsrcSink(uint32_t ssrc, RtcpPacketSinkInterface* sink) {
- demuxer_.AddSink(ssrc, sink);
- sinks_to_tear_down_.insert(sink);
- }
-
- void AddRsidSink(const std::string& rsid, RtcpPacketSinkInterface* sink) {
- demuxer_.AddSink(rsid, sink);
- sinks_to_tear_down_.insert(sink);
- }
-
- void RemoveSink(RtcpPacketSinkInterface* sink) {
- sinks_to_tear_down_.erase(sink);
- demuxer_.RemoveSink(sink);
- }
-
- void AddBroadcastSink(RtcpPacketSinkInterface* sink) {
- demuxer_.AddBroadcastSink(sink);
- broadcast_sinks_to_tear_down_.insert(sink);
- }
-
- void RemoveBroadcastSink(RtcpPacketSinkInterface* sink) {
- broadcast_sinks_to_tear_down_.erase(sink);
- demuxer_.RemoveBroadcastSink(sink);
- }
-
- RtcpDemuxer demuxer_;
- std::set<RtcpPacketSinkInterface*> sinks_to_tear_down_;
- std::set<RtcpPacketSinkInterface*> broadcast_sinks_to_tear_down_;
-};
-
-// Produces a packet buffer representing an RTCP packet with a given SSRC,
-// as it would look when sent over the wire.
-// |distinguishing_string| allows different RTCP packets with the same SSRC
-// to be distinguished. How this is set into the actual packet is
-// unimportant, and depends on which RTCP message we choose to use.
-rtc::Buffer CreateRtcpPacket(uint32_t ssrc,
- const std::string& distinguishing_string = "") {
- rtcp::Bye packet;
- packet.SetSenderSsrc(ssrc);
- if (distinguishing_string != "") {
- // Actual way we use |distinguishing_string| is unimportant, so long
- // as it ends up in the packet.
- packet.SetReason(distinguishing_string);
- }
- return packet.Build();
-}
-
-static Matcher<rtc::ArrayView<const uint8_t>> SamePacketAs(
- const rtc::Buffer& other) {
- return ElementsAreArray(other.cbegin(), other.cend());
-}
-
-} // namespace
-
-TEST_F(RtcpDemuxerTest, OnRtcpPacketCalledOnCorrectSinkBySsrc) {
- constexpr uint32_t ssrcs[] = {101, 202, 303};
- MockRtcpPacketSink sinks[arraysize(ssrcs)];
- for (size_t i = 0; i < arraysize(ssrcs); i++) {
- AddSsrcSink(ssrcs[i], &sinks[i]);
- }
-
- for (size_t i = 0; i < arraysize(ssrcs); i++) {
- auto packet = CreateRtcpPacket(ssrcs[i]);
- EXPECT_CALL(sinks[i], OnRtcpPacket(SamePacketAs(packet))).Times(1);
- demuxer_.OnRtcpPacket(packet);
- }
-}
-
-TEST_F(RtcpDemuxerTest, OnRtcpPacketCalledOnResolvedRsidSink) {
- // Set up some RSID sinks.
- const std::string rsids[] = {"a", "b", "c"};
- MockRtcpPacketSink sinks[arraysize(rsids)];
- for (size_t i = 0; i < arraysize(rsids); i++) {
- AddRsidSink(rsids[i], &sinks[i]);
- }
-
- // Only resolve one of the sinks.
- constexpr size_t resolved_sink_index = 0;
- constexpr uint32_t ssrc = 345;
- demuxer_.OnSsrcBoundToRsid(rsids[resolved_sink_index], ssrc);
-
- // The resolved sink gets notifications of RTCP messages with its SSRC.
- auto packet = CreateRtcpPacket(ssrc);
- EXPECT_CALL(sinks[resolved_sink_index], OnRtcpPacket(SamePacketAs(packet)))
- .Times(1);
-
- // RTCP received; expected calls triggered.
- demuxer_.OnRtcpPacket(packet);
-}
-
-TEST_F(RtcpDemuxerTest,
- SingleCallbackAfterResolutionOfAnRsidToAlreadyRegisteredSsrc) {
- // Associate a sink with an SSRC.
- MockRtcpPacketSink sink;
- constexpr uint32_t ssrc = 999;
- AddSsrcSink(ssrc, &sink);
-
- // Associate the same sink with an RSID.
- const std::string rsid = "r";
- AddRsidSink(rsid, &sink);
-
- // Resolve the RSID to the aforementioned SSRC.
- demuxer_.OnSsrcBoundToRsid(rsid, ssrc);
-
- // OnRtcpPacket still called only a single time for messages with this SSRC.
- auto packet = CreateRtcpPacket(ssrc);
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1);
- demuxer_.OnRtcpPacket(packet);
-}
-
-TEST_F(RtcpDemuxerTest,
- OnRtcpPacketCalledOnAllBroadcastSinksForAllRtcpPackets) {
- MockRtcpPacketSink sinks[3];
- for (MockRtcpPacketSink& sink : sinks) {
- AddBroadcastSink(&sink);
- }
-
- constexpr uint32_t ssrc = 747;
- auto packet = CreateRtcpPacket(ssrc);
-
- for (MockRtcpPacketSink& sink : sinks) {
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1);
- }
-
- // RTCP received; expected calls triggered.
- demuxer_.OnRtcpPacket(packet);
-}
-
-TEST_F(RtcpDemuxerTest, PacketsDeliveredInRightOrderToNonBroadcastSink) {
- constexpr uint32_t ssrc = 101;
- MockRtcpPacketSink sink;
- AddSsrcSink(ssrc, &sink);
-
- std::vector<rtc::Buffer> packets;
- for (size_t i = 0; i < 5; i++) {
- packets.push_back(CreateRtcpPacket(ssrc, std::to_string(i)));
- }
-
- InSequence sequence;
- for (const auto& packet : packets) {
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1);
- }
-
- for (const auto& packet : packets) {
- demuxer_.OnRtcpPacket(packet);
- }
-}
-
-TEST_F(RtcpDemuxerTest, PacketsDeliveredInRightOrderToBroadcastSink) {
- MockRtcpPacketSink sink;
- AddBroadcastSink(&sink);
-
- std::vector<rtc::Buffer> packets;
- for (size_t i = 0; i < 5; i++) {
- constexpr uint32_t ssrc = 101;
- packets.push_back(CreateRtcpPacket(ssrc, std::to_string(i)));
- }
-
- InSequence sequence;
- for (const auto& packet : packets) {
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1);
- }
-
- for (const auto& packet : packets) {
- demuxer_.OnRtcpPacket(packet);
- }
-}
-
-TEST_F(RtcpDemuxerTest, MultipleSinksMappedToSameSsrc) {
- MockRtcpPacketSink sinks[3];
- constexpr uint32_t ssrc = 404;
- for (auto& sink : sinks) {
- AddSsrcSink(ssrc, &sink);
- }
-
- // Reception of an RTCP packet associated with the shared SSRC triggers the
- // callback on all of the sinks associated with it.
- auto packet = CreateRtcpPacket(ssrc);
- for (auto& sink : sinks) {
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet)));
- }
-
- demuxer_.OnRtcpPacket(packet);
-}
-
-TEST_F(RtcpDemuxerTest, SinkMappedToMultipleSsrcs) {
- constexpr uint32_t ssrcs[] = {404, 505, 606};
- MockRtcpPacketSink sink;
- for (uint32_t ssrc : ssrcs) {
- AddSsrcSink(ssrc, &sink);
- }
-
- // The sink which is associated with multiple SSRCs gets the callback
- // triggered for each of those SSRCs.
- for (uint32_t ssrc : ssrcs) {
- auto packet = CreateRtcpPacket(ssrc);
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet)));
- demuxer_.OnRtcpPacket(packet);
- }
-}
-
-TEST_F(RtcpDemuxerTest, MultipleRsidsOnSameSink) {
- // Sink associated with multiple sinks.
- MockRtcpPacketSink sink;
- const std::string rsids[] = {"a", "b", "c"};
- for (const auto& rsid : rsids) {
- AddRsidSink(rsid, &sink);
- }
-
- // RSIDs resolved to SSRCs.
- uint32_t ssrcs[arraysize(rsids)];
- for (size_t i = 0; i < arraysize(rsids); i++) {
- ssrcs[i] = 1000 + static_cast<uint32_t>(i);
- demuxer_.OnSsrcBoundToRsid(rsids[i], ssrcs[i]);
- }
-
- // Set up packets to match those RSIDs/SSRCs.
- std::vector<rtc::Buffer> packets;
- for (size_t i = 0; i < arraysize(rsids); i++) {
- packets.push_back(CreateRtcpPacket(ssrcs[i]));
- }
-
- // The sink expects to receive all of the packets.
- for (const auto& packet : packets) {
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1);
- }
-
- // Packet demuxed correctly; OnRtcpPacket() triggered on sink.
- for (const auto& packet : packets) {
- demuxer_.OnRtcpPacket(packet);
- }
-}
-
-TEST_F(RtcpDemuxerTest, RsidUsedByMultipleSinks) {
- MockRtcpPacketSink sinks[3];
- const std::string shared_rsid = "a";
-
- for (MockRtcpPacketSink& sink : sinks) {
- AddRsidSink(shared_rsid, &sink);
- }
-
- constexpr uint32_t shared_ssrc = 888;
- demuxer_.OnSsrcBoundToRsid(shared_rsid, shared_ssrc);
-
- auto packet = CreateRtcpPacket(shared_ssrc);
-
- for (MockRtcpPacketSink& sink : sinks) {
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1);
- }
-
- demuxer_.OnRtcpPacket(packet);
-}
-
-TEST_F(RtcpDemuxerTest, NoCallbackOnSsrcSinkRemovedBeforeFirstPacket) {
- constexpr uint32_t ssrc = 404;
- MockRtcpPacketSink sink;
- AddSsrcSink(ssrc, &sink);
-
- RemoveSink(&sink);
-
- // The removed sink does not get callbacks.
- auto packet = CreateRtcpPacket(ssrc);
- EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called.
- demuxer_.OnRtcpPacket(packet);
-}
-
-TEST_F(RtcpDemuxerTest, NoCallbackOnSsrcSinkRemovedAfterFirstPacket) {
- constexpr uint32_t ssrc = 404;
- NiceMock<MockRtcpPacketSink> sink;
- AddSsrcSink(ssrc, &sink);
-
- auto before_packet = CreateRtcpPacket(ssrc);
- demuxer_.OnRtcpPacket(before_packet);
-
- RemoveSink(&sink);
-
- // The removed sink does not get callbacks.
- auto after_packet = CreateRtcpPacket(ssrc);
- EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called.
- demuxer_.OnRtcpPacket(after_packet);
-}
-
-TEST_F(RtcpDemuxerTest, NoCallbackOnRsidSinkRemovedBeforeRsidResolution) {
- const std::string rsid = "a";
- constexpr uint32_t ssrc = 404;
- MockRtcpPacketSink sink;
- AddRsidSink(rsid, &sink);
-
- // Removal before resolution.
- RemoveSink(&sink);
- demuxer_.OnSsrcBoundToRsid(rsid, ssrc);
-
- // The removed sink does not get callbacks.
- auto packet = CreateRtcpPacket(ssrc);
- EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called.
- demuxer_.OnRtcpPacket(packet);
-}
-
-TEST_F(RtcpDemuxerTest, NoCallbackOnRsidSinkRemovedAfterRsidResolution) {
- const std::string rsid = "a";
- constexpr uint32_t ssrc = 404;
- MockRtcpPacketSink sink;
- AddRsidSink(rsid, &sink);
-
- // Removal after resolution.
- demuxer_.OnSsrcBoundToRsid(rsid, ssrc);
- RemoveSink(&sink);
-
- // The removed sink does not get callbacks.
- auto packet = CreateRtcpPacket(ssrc);
- EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called.
- demuxer_.OnRtcpPacket(packet);
-}
-
-TEST_F(RtcpDemuxerTest, NoCallbackOnBroadcastSinkRemovedBeforeFirstPacket) {
- MockRtcpPacketSink sink;
- AddBroadcastSink(&sink);
-
- RemoveBroadcastSink(&sink);
-
- // The removed sink does not get callbacks.
- constexpr uint32_t ssrc = 404;
- auto packet = CreateRtcpPacket(ssrc);
- EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called.
- demuxer_.OnRtcpPacket(packet);
-}
-
-TEST_F(RtcpDemuxerTest, NoCallbackOnBroadcastSinkRemovedAfterFirstPacket) {
- NiceMock<MockRtcpPacketSink> sink;
- AddBroadcastSink(&sink);
-
- constexpr uint32_t ssrc = 404;
- auto before_packet = CreateRtcpPacket(ssrc);
- demuxer_.OnRtcpPacket(before_packet);
-
- RemoveBroadcastSink(&sink);
-
- // The removed sink does not get callbacks.
- auto after_packet = CreateRtcpPacket(ssrc);
- EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called.
- demuxer_.OnRtcpPacket(after_packet);
-}
-
-// The RSID to SSRC mapping should be one-to-one. If we end up receiving
-// two (or more) packets with the same SSRC, but different RSIDs, we guarantee
-// remembering the first one; no guarantees are made about further associations.
-TEST_F(RtcpDemuxerTest, FirstResolutionOfRsidNotForgotten) {
- MockRtcpPacketSink sink;
- const std::string rsid = "a";
- AddRsidSink(rsid, &sink);
-
- constexpr uint32_t ssrc_a = 111; // First resolution - guaranteed effective.
- demuxer_.OnSsrcBoundToRsid(rsid, ssrc_a);
-
- constexpr uint32_t ssrc_b = 222; // Second resolution - no guarantees.
- demuxer_.OnSsrcBoundToRsid(rsid, ssrc_b);
-
- auto packet_a = CreateRtcpPacket(ssrc_a);
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet_a))).Times(1);
- demuxer_.OnRtcpPacket(packet_a);
-
- auto packet_b = CreateRtcpPacket(ssrc_b);
- EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet_b))).Times(AtLeast(0));
- demuxer_.OnRtcpPacket(packet_b);
-}
-
-#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-
-TEST_F(RtcpDemuxerTest, RepeatedSsrcToSinkAssociationsDisallowed) {
- MockRtcpPacketSink sink;
-
- constexpr uint32_t ssrc = 101;
- AddSsrcSink(ssrc, &sink);
- EXPECT_DEATH(AddSsrcSink(ssrc, &sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, RepeatedRsidToSinkAssociationsDisallowed) {
- MockRtcpPacketSink sink;
-
- const std::string rsid = "z";
- AddRsidSink(rsid, &sink);
- EXPECT_DEATH(AddRsidSink(rsid, &sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, RepeatedBroadcastSinkRegistrationDisallowed) {
- MockRtcpPacketSink sink;
-
- AddBroadcastSink(&sink);
- EXPECT_DEATH(AddBroadcastSink(&sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, SsrcSinkCannotAlsoBeRegisteredAsBroadcast) {
- MockRtcpPacketSink sink;
-
- constexpr uint32_t ssrc = 101;
- AddSsrcSink(ssrc, &sink);
- EXPECT_DEATH(AddBroadcastSink(&sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, RsidSinkCannotAlsoBeRegisteredAsBroadcast) {
- MockRtcpPacketSink sink;
-
- const std::string rsid = "z";
- AddRsidSink(rsid, &sink);
- EXPECT_DEATH(AddBroadcastSink(&sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsSsrcSink) {
- MockRtcpPacketSink sink;
-
- AddBroadcastSink(&sink);
- constexpr uint32_t ssrc = 101;
- EXPECT_DEATH(AddSsrcSink(ssrc, &sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsRsidSink) {
- MockRtcpPacketSink sink;
-
- AddBroadcastSink(&sink);
- const std::string rsid = "j";
- EXPECT_DEATH(AddRsidSink(rsid, &sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, MayNotCallRemoveSinkOnNeverAddedSink) {
- MockRtcpPacketSink sink;
- EXPECT_DEATH(RemoveSink(&sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, MayNotCallRemoveBroadcastSinkOnNeverAddedSink) {
- MockRtcpPacketSink sink;
- EXPECT_DEATH(RemoveBroadcastSink(&sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, RsidMustBeNonEmpty) {
- MockRtcpPacketSink sink;
- EXPECT_DEATH(AddRsidSink("", &sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, RsidMustBeAlphaNumeric) {
- MockRtcpPacketSink sink;
- EXPECT_DEATH(AddRsidSink("a_3", &sink), "");
-}
-
-TEST_F(RtcpDemuxerTest, RsidMustNotExceedMaximumLength) {
- MockRtcpPacketSink sink;
- std::string rsid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a');
- EXPECT_DEATH(AddRsidSink(rsid, &sink), "");
-}
-
-#endif
-
-} // namespace webrtc
diff --git a/call/rtcp_packet_sink_interface.h b/call/rtcp_packet_sink_interface.h
deleted file mode 100644
index 8ea3f7d21c..0000000000
--- a/call/rtcp_packet_sink_interface.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef CALL_RTCP_PACKET_SINK_INTERFACE_H_
-#define CALL_RTCP_PACKET_SINK_INTERFACE_H_
-
-#include "api/array_view.h"
-
-namespace webrtc {
-
-// This class represents a receiver of unparsed RTCP packets.
-// TODO(eladalon): Replace this by demuxing over parsed rather than raw data.
-// Whether this should be over an entire RTCP packet, or over RTCP blocks,
-// is still under discussion.
-class RtcpPacketSinkInterface {
- public:
- virtual ~RtcpPacketSinkInterface() = default;
- virtual void OnRtcpPacket(rtc::ArrayView<const uint8_t> packet) = 0;
-};
-
-} // namespace webrtc
-
-#endif // CALL_RTCP_PACKET_SINK_INTERFACE_H_
diff --git a/call/rtp_demuxer.cc b/call/rtp_demuxer.cc
index 14725cf023..9fc4ba1c16 100644
--- a/call/rtp_demuxer.cc
+++ b/call/rtp_demuxer.cc
@@ -11,8 +11,6 @@
#include "call/rtp_demuxer.h"
#include "call/rtp_packet_sink_interface.h"
-#include "call/rtp_rtcp_demuxer_helper.h"
-#include "call/ssrc_binding_observer.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "rtc_base/checks.h"
@@ -20,10 +18,60 @@
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
+namespace {
+
+template <typename Container, typename Value>
+size_t RemoveFromMultimapByValue(Container* multimap, const Value& value) {
+ size_t count = 0;
+ for (auto it = multimap->begin(); it != multimap->end();) {
+ if (it->second == value) {
+ it = multimap->erase(it);
+ ++count;
+ } else {
+ ++it;
+ }
+ }
+ return count;
+}
+
+template <typename Map, typename Value>
+size_t RemoveFromMapByValue(Map* map, const Value& value) {
+ size_t count = 0;
+ for (auto it = map->begin(); it != map->end();) {
+ if (it->second == value) {
+ it = map->erase(it);
+ ++count;
+ } else {
+ ++it;
+ }
+ }
+ return count;
+}
+
+} // namespace
RtpDemuxerCriteria::RtpDemuxerCriteria() = default;
RtpDemuxerCriteria::~RtpDemuxerCriteria() = default;
+std::string RtpDemuxerCriteria::ToString() const {
+ rtc::StringBuilder sb;
+ sb << "{mid: " << (mid.empty() ? "<empty>" : mid)
+ << ", rsid: " << (rsid.empty() ? "<empty>" : rsid) << ", ssrcs: [";
+
+ for (auto ssrc : ssrcs) {
+ sb << ssrc << ", ";
+ }
+
+ sb << "], payload_types = [";
+
+ for (auto pt : payload_types) {
+ sb << pt << ", ";
+ }
+
+ sb << "]}";
+ return sb.Release();
+}
+
// static
std::string RtpDemuxer::DescribePacket(const RtpPacketReceived& packet) {
rtc::StringBuilder sb;
@@ -51,7 +99,6 @@ RtpDemuxer::~RtpDemuxer() {
RTC_DCHECK(sinks_by_pt_.empty());
RTC_DCHECK(sink_by_mid_and_rsid_.empty());
RTC_DCHECK(sink_by_rsid_.empty());
- RTC_DCHECK(ssrc_binding_observers_.empty());
}
bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria,
@@ -66,6 +113,8 @@ bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria,
// criteria because new sinks are created according to user-specified SDP and
// we do not want to crash due to a data validation error.
if (CriteriaWouldConflict(criteria)) {
+ RTC_LOG(LS_ERROR) << "Unable to add sink = " << sink
+ << " due conflicting criteria " << criteria.ToString();
return false;
}
@@ -92,6 +141,9 @@ bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria,
RefreshKnownMids();
+ RTC_LOG(LS_INFO) << "Added sink = " << sink << " for criteria "
+ << criteria.ToString();
+
return true;
}
@@ -105,25 +157,40 @@ bool RtpDemuxer::CriteriaWouldConflict(
// Adding this criteria would cause one of these rules to be shadowed, so
// reject this new criteria.
if (known_mids_.find(criteria.mid) != known_mids_.end()) {
+ RTC_LOG(LS_INFO) << criteria.ToString()
+ << " would conflict with known mid";
return true;
}
} else {
// If the exact rule already exists, then reject this duplicate.
- if (sink_by_mid_and_rsid_.find(std::make_pair(
- criteria.mid, criteria.rsid)) != sink_by_mid_and_rsid_.end()) {
+ const auto sink_by_mid_and_rsid = sink_by_mid_and_rsid_.find(
+ std::make_pair(criteria.mid, criteria.rsid));
+ if (sink_by_mid_and_rsid != sink_by_mid_and_rsid_.end()) {
+ RTC_LOG(LS_INFO) << criteria.ToString()
+ << " would conflict with existing sink = "
+ << sink_by_mid_and_rsid->second
+ << " by mid+rsid binding";
return true;
}
// If there is already a sink registered for the bare MID, then this
// criteria will never receive any packets because they will just be
// directed to that MID sink, so reject this new criteria.
- if (sink_by_mid_.find(criteria.mid) != sink_by_mid_.end()) {
+ const auto sink_by_mid = sink_by_mid_.find(criteria.mid);
+ if (sink_by_mid != sink_by_mid_.end()) {
+ RTC_LOG(LS_INFO) << criteria.ToString()
+ << " would conflict with existing sink = "
+ << sink_by_mid->second << " by mid binding";
return true;
}
}
}
for (uint32_t ssrc : criteria.ssrcs) {
- if (sink_by_ssrc_.find(ssrc) != sink_by_ssrc_.end()) {
+ const auto sink_by_ssrc = sink_by_ssrc_.find(ssrc);
+ if (sink_by_ssrc != sink_by_ssrc_.end()) {
+ RTC_LOG(LS_INFO) << criteria.ToString()
+ << " would conflict with existing sink = "
+ << sink_by_ssrc->second << " binding by SSRC=" << ssrc;
return true;
}
}
@@ -168,7 +235,11 @@ bool RtpDemuxer::RemoveSink(const RtpPacketSinkInterface* sink) {
RemoveFromMapByValue(&sink_by_mid_and_rsid_, sink) +
RemoveFromMapByValue(&sink_by_rsid_, sink);
RefreshKnownMids();
- return num_removed > 0;
+ bool removed = num_removed > 0;
+ if (removed) {
+ RTC_LOG(LS_INFO) << "Removed sink = " << sink << " bindings";
+ }
+ return removed;
}
bool RtpDemuxer::OnRtpPacket(const RtpPacketReceived& packet) {
@@ -284,12 +355,7 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMid(const std::string& mid,
const auto it = sink_by_mid_.find(mid);
if (it != sink_by_mid_.end()) {
RtpPacketSinkInterface* sink = it->second;
- bool notify = AddSsrcSinkBinding(ssrc, sink);
- if (notify) {
- for (auto* observer : ssrc_binding_observers_) {
- observer->OnSsrcBoundToMid(mid, ssrc);
- }
- }
+ AddSsrcSinkBinding(ssrc, sink);
return sink;
}
return nullptr;
@@ -302,39 +368,22 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMidRsid(
const auto it = sink_by_mid_and_rsid_.find(std::make_pair(mid, rsid));
if (it != sink_by_mid_and_rsid_.end()) {
RtpPacketSinkInterface* sink = it->second;
- bool notify = AddSsrcSinkBinding(ssrc, sink);
- if (notify) {
- for (auto* observer : ssrc_binding_observers_) {
- observer->OnSsrcBoundToMidRsid(mid, rsid, ssrc);
- }
- }
+ AddSsrcSinkBinding(ssrc, sink);
return sink;
}
return nullptr;
}
-void RtpDemuxer::RegisterRsidResolutionObserver(SsrcBindingObserver* observer) {
- RegisterSsrcBindingObserver(observer);
-}
RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByRsid(const std::string& rsid,
uint32_t ssrc) {
const auto it = sink_by_rsid_.find(rsid);
if (it != sink_by_rsid_.end()) {
RtpPacketSinkInterface* sink = it->second;
- bool notify = AddSsrcSinkBinding(ssrc, sink);
- if (notify) {
- for (auto* observer : ssrc_binding_observers_) {
- observer->OnSsrcBoundToRsid(rsid, ssrc);
- }
- }
+ AddSsrcSinkBinding(ssrc, sink);
return sink;
}
return nullptr;
}
-void RtpDemuxer::DeregisterRsidResolutionObserver(
- const SsrcBindingObserver* observer) {
- DeregisterSsrcBindingObserver(observer);
-}
RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByPayloadType(
uint8_t payload_type,
@@ -345,54 +394,33 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByPayloadType(
const auto end = range.second;
if (std::next(it) == end) {
RtpPacketSinkInterface* sink = it->second;
- bool notify = AddSsrcSinkBinding(ssrc, sink);
- if (notify) {
- for (auto* observer : ssrc_binding_observers_) {
- observer->OnSsrcBoundToPayloadType(payload_type, ssrc);
- }
- }
+ AddSsrcSinkBinding(ssrc, sink);
return sink;
}
}
return nullptr;
}
-bool RtpDemuxer::AddSsrcSinkBinding(uint32_t ssrc,
+void RtpDemuxer::AddSsrcSinkBinding(uint32_t ssrc,
RtpPacketSinkInterface* sink) {
if (sink_by_ssrc_.size() >= kMaxSsrcBindings) {
RTC_LOG(LS_WARNING) << "New SSRC=" << ssrc
<< " sink binding ignored; limit of" << kMaxSsrcBindings
<< " bindings has been reached.";
- return false;
+ return;
}
auto result = sink_by_ssrc_.emplace(ssrc, sink);
auto it = result.first;
bool inserted = result.second;
if (inserted) {
- return true;
- }
- if (it->second != sink) {
+ RTC_LOG(LS_INFO) << "Added sink = " << sink
+ << " binding with SSRC=" << ssrc;
+ } else if (it->second != sink) {
+ RTC_LOG(LS_INFO) << "Updated sink = " << sink
+ << " binding with SSRC=" << ssrc;
it->second = sink;
- return true;
}
- return false;
-}
-
-void RtpDemuxer::RegisterSsrcBindingObserver(SsrcBindingObserver* observer) {
- RTC_DCHECK(observer);
- RTC_DCHECK(!ContainerHasKey(ssrc_binding_observers_, observer));
-
- ssrc_binding_observers_.push_back(observer);
-}
-
-void RtpDemuxer::DeregisterSsrcBindingObserver(
- const SsrcBindingObserver* observer) {
- RTC_DCHECK(observer);
- auto it = std::find(ssrc_binding_observers_.begin(),
- ssrc_binding_observers_.end(), observer);
- RTC_DCHECK(it != ssrc_binding_observers_.end());
- ssrc_binding_observers_.erase(it);
}
} // namespace webrtc
diff --git a/call/rtp_demuxer.h b/call/rtp_demuxer.h
index c815c47f72..3aa7e9df26 100644
--- a/call/rtp_demuxer.h
+++ b/call/rtp_demuxer.h
@@ -21,7 +21,6 @@ namespace webrtc {
class RtpPacketReceived;
class RtpPacketSinkInterface;
-class SsrcBindingObserver;
// This struct describes the criteria that will be used to match packets to a
// specific sink.
@@ -44,6 +43,9 @@ struct RtpDemuxerCriteria {
// Will match packets with any of these payload types.
std::set<uint8_t> payload_types;
+
+ // Return string representation of demux criteria to facilitate logging
+ std::string ToString() const;
};
// This class represents the RTP demuxing, for a single RTP session (i.e., one
@@ -130,17 +132,6 @@ class RtpDemuxer {
// if the packet was forwarded and false if the packet was dropped.
bool OnRtpPacket(const RtpPacketReceived& packet);
- // The Observer will be notified when an attribute (e.g., RSID, MID, etc.) is
- // bound to an SSRC.
- void RegisterSsrcBindingObserver(SsrcBindingObserver* observer);
- // Deprecated: Use the above method.
- void RegisterRsidResolutionObserver(SsrcBindingObserver* observer);
-
- // Undo a previous RegisterSsrcBindingObserver().
- void DeregisterSsrcBindingObserver(const SsrcBindingObserver* observer);
- // Deprecated: Use the above method.
- void DeregisterRsidResolutionObserver(const SsrcBindingObserver* observer);
-
// Configure whether to look at the MID header extension when demuxing
// incoming RTP packets. By default this is enabled.
void set_use_mid(bool use_mid) { use_mid_ = use_mid; }
@@ -197,14 +188,8 @@ class RtpDemuxer {
std::map<uint32_t, std::string> mid_by_ssrc_;
std::map<uint32_t, std::string> rsid_by_ssrc_;
- // Adds a binding from the SSRC to the given sink. Returns true if there was
- // not already a sink bound to the SSRC or if the sink replaced a different
- // sink. Returns false if the binding was unchanged.
- bool AddSsrcSinkBinding(uint32_t ssrc, RtpPacketSinkInterface* sink);
-
- // Observers which will be notified when an RSID association to an SSRC is
- // resolved by this object.
- std::vector<SsrcBindingObserver*> ssrc_binding_observers_;
+ // Adds a binding from the SSRC to the given sink.
+ void AddSsrcSinkBinding(uint32_t ssrc, RtpPacketSinkInterface* sink);
bool use_mid_ = true;
};
diff --git a/call/rtp_demuxer_unittest.cc b/call/rtp_demuxer_unittest.cc
index 86b458a0cc..a4abab73ed 100644
--- a/call/rtp_demuxer_unittest.cc
+++ b/call/rtp_demuxer_unittest.cc
@@ -14,7 +14,6 @@
#include <set>
#include <string>
-#include "call/ssrc_binding_observer.h"
#include "call/test/mock_rtp_packet_sink_interface.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
@@ -31,31 +30,15 @@ namespace {
using ::testing::_;
using ::testing::AtLeast;
-using ::testing::AtMost;
using ::testing::InSequence;
using ::testing::NiceMock;
-class MockSsrcBindingObserver : public SsrcBindingObserver {
- public:
- MOCK_METHOD2(OnSsrcBoundToRsid, void(const std::string& rsid, uint32_t ssrc));
- MOCK_METHOD2(OnSsrcBoundToMid, void(const std::string& mid, uint32_t ssrc));
- MOCK_METHOD3(OnSsrcBoundToMidRsid,
- void(const std::string& mid,
- const std::string& rsid,
- uint32_t ssrc));
- MOCK_METHOD2(OnSsrcBoundToPayloadType,
- void(uint8_t payload_type, uint32_t ssrc));
-};
-
class RtpDemuxerTest : public ::testing::Test {
protected:
~RtpDemuxerTest() {
for (auto* sink : sinks_to_tear_down_) {
demuxer_.RemoveSink(sink);
}
- for (auto* observer : observers_to_tear_down_) {
- demuxer_.DeregisterSsrcBindingObserver(observer);
- }
}
// These are convenience methods for calling demuxer.AddSink with different
@@ -103,20 +86,6 @@ class RtpDemuxerTest : public ::testing::Test {
return demuxer_.RemoveSink(sink);
}
- // These are convenience methods for calling
- // demuxer.{Register|Unregister}SsrcBindingObserver such that observers are
- // automatically removed when the test finishes.
-
- void RegisterSsrcBindingObserver(SsrcBindingObserver* observer) {
- demuxer_.RegisterSsrcBindingObserver(observer);
- observers_to_tear_down_.insert(observer);
- }
-
- void DeregisterSsrcBindingObserver(SsrcBindingObserver* observer) {
- demuxer_.DeregisterSsrcBindingObserver(observer);
- observers_to_tear_down_.erase(observer);
- }
-
// The CreatePacket* methods are helpers for creating new RTP packets with
// various attributes set. Tests should use the helper that provides the
// minimum information needed to exercise the behavior under test. Tests also
@@ -206,10 +175,11 @@ class RtpDemuxerTest : public ::testing::Test {
RtpDemuxer demuxer_;
std::set<RtpPacketSinkInterface*> sinks_to_tear_down_;
- std::set<SsrcBindingObserver*> observers_to_tear_down_;
uint16_t next_sequence_number_ = 1;
};
+class RtpDemuxerDeathTest : public RtpDemuxerTest {};
+
MATCHER_P(SamePacketAs, other, "") {
return arg.Ssrc() == other.Ssrc() &&
arg.SequenceNumber() == other.SequenceNumber();
@@ -746,73 +716,6 @@ TEST_F(RtpDemuxerTest, AssociatingByRsidAndBySsrcCannotTriggerDoubleCall) {
EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
}
-TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToMid) {
- const std::string mid = "v";
- constexpr uint32_t ssrc = 10;
-
- NiceMock<MockRtpPacketSink> sink;
- AddSinkOnlyMid(mid, &sink);
-
- MockSsrcBindingObserver observer;
- RegisterSsrcBindingObserver(&observer);
-
- auto packet = CreatePacketWithSsrcMid(ssrc, mid);
- EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc));
- EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
-}
-
-TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToRsid) {
- const std::string rsid = "1";
- constexpr uint32_t ssrc = 111;
-
- // Only RSIDs which the demuxer knows may be resolved.
- NiceMock<MockRtpPacketSink> sink;
- AddSinkOnlyRsid(rsid, &sink);
-
- NiceMock<MockSsrcBindingObserver> rsid_resolution_observers[3];
- for (auto& observer : rsid_resolution_observers) {
- RegisterSsrcBindingObserver(&observer);
- EXPECT_CALL(observer, OnSsrcBoundToRsid(rsid, ssrc)).Times(1);
- }
-
- // The expected calls to OnSsrcBoundToRsid() will be triggered by this.
- auto packet = CreatePacketWithSsrcRsid(ssrc, rsid);
- EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
-}
-
-TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToMidRsid) {
- const std::string mid = "v";
- const std::string rsid = "1";
- constexpr uint32_t ssrc = 10;
-
- NiceMock<MockRtpPacketSink> sink;
- AddSinkBothMidRsid(mid, rsid, &sink);
-
- MockSsrcBindingObserver observer;
- RegisterSsrcBindingObserver(&observer);
-
- auto packet = CreatePacketWithSsrcMidRsid(ssrc, mid, rsid);
- EXPECT_CALL(observer, OnSsrcBoundToMidRsid(mid, rsid, ssrc));
- EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
-}
-
-TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToPayloadType) {
- constexpr uint8_t payload_type = 3;
- constexpr uint32_t ssrc = 10;
-
- RtpDemuxerCriteria criteria;
- criteria.payload_types = {payload_type};
- NiceMock<MockRtpPacketSink> sink;
- AddSink(criteria, &sink);
-
- MockSsrcBindingObserver observer;
- RegisterSsrcBindingObserver(&observer);
-
- auto packet = CreatePacketWithSsrc(ssrc);
- packet->SetPayloadType(payload_type);
- EXPECT_CALL(observer, OnSsrcBoundToPayloadType(payload_type, ssrc));
- EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
-}
// If one sink is associated with SSRC x, and another sink with RSID y, then if
// we receive a packet with both SSRC x and RSID y, route that to only the sink
@@ -847,9 +750,6 @@ TEST_F(RtpDemuxerTest,
NiceMock<MockRtpPacketSink> rsid_sink;
AddSinkOnlyRsid(rsid, &rsid_sink);
- NiceMock<MockSsrcBindingObserver> observer;
- RegisterSsrcBindingObserver(&observer);
-
// The SSRC was mapped to an SSRC sink, but was even active (packets flowed
// over it).
auto packet = CreatePacketWithSsrcRsid(ssrc, rsid);
@@ -860,7 +760,6 @@ TEST_F(RtpDemuxerTest,
// is guaranteed.
RemoveSink(&ssrc_sink);
EXPECT_CALL(rsid_sink, OnRtpPacket(SamePacketAs(*packet))).Times(AtLeast(0));
- EXPECT_CALL(observer, OnSsrcBoundToRsid(rsid, ssrc)).Times(AtLeast(0));
EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
}
@@ -1355,169 +1254,36 @@ TEST_F(RtpDemuxerTest, PacketWithMidAndUnknownRsidIsNotRoutedByPayloadType) {
EXPECT_FALSE(demuxer_.OnRtpPacket(*packet));
}
-// Observers are only notified of an SSRC binding to an RSID if we care about
-// the RSID (i.e., have a sink added for that RSID).
-TEST_F(RtpDemuxerTest, ObserversNotNotifiedOfUntrackedRsids) {
- const std::string rsid = "1";
- constexpr uint32_t ssrc = 111;
-
- MockSsrcBindingObserver rsid_resolution_observers[3];
- for (auto& observer : rsid_resolution_observers) {
- RegisterSsrcBindingObserver(&observer);
- EXPECT_CALL(observer, OnSsrcBoundToRsid(_, _)).Times(0);
- }
-
- // Since no sink is registered for this SSRC/RSID, expect the packet to not be
- // routed and no observers notified of the SSRC -> RSID binding.
- EXPECT_FALSE(demuxer_.OnRtpPacket(*CreatePacketWithSsrcRsid(ssrc, rsid)));
-}
-
-// Ensure that observers are notified of SSRC bindings only once per unique
-// binding source (e.g., SSRC -> MID, SSRC -> RSID, etc.)
-TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundtoMidOnlyOnce) {
- const std::string mid = "v";
- constexpr uint32_t ssrc = 10;
-
- NiceMock<MockRtpPacketSink> sink;
- AddSinkOnlyMid(mid, &sink);
-
- MockSsrcBindingObserver observer;
- RegisterSsrcBindingObserver(&observer);
-
- EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc)).Times(1);
-
- demuxer_.OnRtpPacket(*CreatePacketWithSsrcMid(ssrc, mid));
- demuxer_.OnRtpPacket(*CreatePacketWithSsrcMid(ssrc, mid));
-}
-
-// Ensure that when a new SSRC -> MID binding is discovered observers are also
-// notified of that, even if there has already been an SSRC bound to the MID.
-TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundtoMidWhenSsrcChanges) {
- const std::string mid = "v";
- constexpr uint32_t ssrc1 = 10;
- constexpr uint32_t ssrc2 = 11;
-
- NiceMock<MockRtpPacketSink> sink;
- AddSinkOnlyMid(mid, &sink);
-
- MockSsrcBindingObserver observer;
- RegisterSsrcBindingObserver(&observer);
-
- InSequence seq;
- EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc1)).Times(1);
- EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc2)).Times(1);
-
- auto p1 = CreatePacketWithSsrcMid(ssrc1, mid);
- demuxer_.OnRtpPacket(*p1);
-
- auto p2 = CreatePacketWithSsrcMid(ssrc2, mid);
- demuxer_.OnRtpPacket(*p2);
-}
-
-TEST_F(RtpDemuxerTest, DeregisteredRsidObserversNotInformedOfResolutions) {
- constexpr uint32_t ssrc = 111;
- const std::string rsid = "a";
- NiceMock<MockRtpPacketSink> sink;
- AddSinkOnlyRsid(rsid, &sink);
-
- // Register several, then deregister only one, to show that not all of the
- // observers had been forgotten when one was removed.
- MockSsrcBindingObserver observer_1;
- MockSsrcBindingObserver observer_2_removed;
- MockSsrcBindingObserver observer_3;
-
- RegisterSsrcBindingObserver(&observer_1);
- RegisterSsrcBindingObserver(&observer_2_removed);
- RegisterSsrcBindingObserver(&observer_3);
-
- DeregisterSsrcBindingObserver(&observer_2_removed);
-
- EXPECT_CALL(observer_1, OnSsrcBoundToRsid(rsid, ssrc)).Times(1);
- EXPECT_CALL(observer_2_removed, OnSsrcBoundToRsid(_, _)).Times(0);
- EXPECT_CALL(observer_3, OnSsrcBoundToRsid(rsid, ssrc)).Times(1);
-
- // The expected calls to OnSsrcBoundToRsid() will be triggered by this.
- demuxer_.OnRtpPacket(*CreatePacketWithSsrcRsid(ssrc, rsid));
-}
-
-TEST_F(RtpDemuxerTest,
- PacketFittingBothRsidSinkAndSsrcSinkTriggersResolutionCallbacks) {
- constexpr uint32_t ssrc = 111;
- NiceMock<MockRtpPacketSink> ssrc_sink;
- AddSinkOnlySsrc(ssrc, &ssrc_sink);
-
- const std::string rsid = "a";
- NiceMock<MockRtpPacketSink> rsid_sink;
- AddSinkOnlyRsid(rsid, &rsid_sink);
-
- MockSsrcBindingObserver observer;
- RegisterSsrcBindingObserver(&observer);
-
- auto packet = CreatePacketWithSsrcRsid(ssrc, rsid);
- EXPECT_CALL(observer, OnSsrcBoundToRsid(rsid, ssrc)).Times(1);
- demuxer_.OnRtpPacket(*packet);
-}
-
-TEST_F(RtpDemuxerTest, MaliciousPeerCannotCauseMemoryOveruse) {
- const std::string mid = "v";
-
- NiceMock<MockRtpPacketSink> sink;
- AddSinkOnlyMid(mid, &sink);
-
- MockSsrcBindingObserver observer;
- RegisterSsrcBindingObserver(&observer);
-
- EXPECT_CALL(observer, OnSsrcBoundToMid(_, _))
- .Times(AtMost(RtpDemuxer::kMaxSsrcBindings));
-
- for (int i = 0; i < RtpDemuxer::kMaxSsrcBindings + 1; i++) {
- auto packet = CreatePacketWithSsrcMid(i, mid);
- EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
- }
-}
-
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(RtpDemuxerTest, CriteriaMustBeNonEmpty) {
+TEST_F(RtpDemuxerDeathTest, CriteriaMustBeNonEmpty) {
MockRtpPacketSink sink;
RtpDemuxerCriteria criteria;
EXPECT_DEATH(AddSink(criteria, &sink), "");
}
-TEST_F(RtpDemuxerTest, RsidMustBeAlphaNumeric) {
+TEST_F(RtpDemuxerDeathTest, RsidMustBeAlphaNumeric) {
MockRtpPacketSink sink;
EXPECT_DEATH(AddSinkOnlyRsid("a_3", &sink), "");
}
-TEST_F(RtpDemuxerTest, MidMustBeToken) {
+TEST_F(RtpDemuxerDeathTest, MidMustBeToken) {
MockRtpPacketSink sink;
EXPECT_DEATH(AddSinkOnlyMid("a(3)", &sink), "");
}
-TEST_F(RtpDemuxerTest, RsidMustNotExceedMaximumLength) {
+TEST_F(RtpDemuxerDeathTest, RsidMustNotExceedMaximumLength) {
MockRtpPacketSink sink;
std::string rsid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a');
EXPECT_DEATH(AddSinkOnlyRsid(rsid, &sink), "");
}
-TEST_F(RtpDemuxerTest, MidMustNotExceedMaximumLength) {
+TEST_F(RtpDemuxerDeathTest, MidMustNotExceedMaximumLength) {
MockRtpPacketSink sink;
std::string mid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a');
EXPECT_DEATH(AddSinkOnlyMid(mid, &sink), "");
}
-TEST_F(RtpDemuxerTest, DoubleRegisterationOfSsrcBindingObserverDisallowed) {
- MockSsrcBindingObserver observer;
- RegisterSsrcBindingObserver(&observer);
- EXPECT_DEATH(RegisterSsrcBindingObserver(&observer), "");
-}
-
-TEST_F(RtpDemuxerTest,
- DregisterationOfNeverRegisteredSsrcBindingObserverDisallowed) {
- MockSsrcBindingObserver observer;
- EXPECT_DEATH(DeregisterSsrcBindingObserver(&observer), "");
-}
-
#endif
} // namespace
diff --git a/call/rtp_payload_params.cc b/call/rtp_payload_params.cc
index 279eb588d7..ad979a590a 100644
--- a/call/rtp_payload_params.cc
+++ b/call/rtp_payload_params.cc
@@ -93,15 +93,6 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info,
h264_header.packetization_mode =
info.codecSpecific.H264.packetization_mode;
rtp->simulcastIdx = spatial_index.value_or(0);
- rtp->frame_marking.temporal_id = kNoTemporalIdx;
- if (info.codecSpecific.H264.temporal_idx != kNoTemporalIdx) {
- rtp->frame_marking.temporal_id = info.codecSpecific.H264.temporal_idx;
- rtp->frame_marking.layer_id = 0;
- rtp->frame_marking.independent_frame =
- info.codecSpecific.H264.idr_frame;
- rtp->frame_marking.base_layer_sync =
- info.codecSpecific.H264.base_layer_sync;
- }
return;
}
case kVideoCodecMultiplex:
@@ -139,10 +130,7 @@ RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc,
: ssrc_(ssrc),
generic_picture_id_experiment_(
absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"),
- "Enabled")),
- generic_descriptor_experiment_(
- !absl::StartsWith(trials.Lookup("WebRTC-GenericDescriptor"),
- "Disabled")) {
+ "Enabled")) {
for (auto& spatial_layer : last_shared_frame_id_)
spatial_layer.fill(-1);
@@ -186,9 +174,8 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader(
SetCodecSpecific(&rtp_video_header, first_frame_in_picture);
- if (generic_descriptor_experiment_)
- SetGeneric(codec_specific_info, shared_frame_id, is_keyframe,
- &rtp_video_header);
+ SetGeneric(codec_specific_info, shared_frame_id, is_keyframe,
+ &rtp_video_header);
return rtp_video_header;
}
@@ -237,14 +224,6 @@ void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header,
vp9_header.tl0_pic_idx = state_.tl0_pic_idx;
}
}
- if (rtp_video_header->codec == kVideoCodecH264) {
- if (rtp_video_header->frame_marking.temporal_id != kNoTemporalIdx) {
- if (rtp_video_header->frame_marking.temporal_id == 0) {
- ++state_.tl0_pic_idx;
- }
- rtp_video_header->frame_marking.tl0_pic_idx = state_.tl0_pic_idx;
- }
- }
if (generic_picture_id_experiment_ &&
rtp_video_header->codec == kVideoCodecGeneric) {
rtp_video_header->video_type_header.emplace<RTPVideoHeaderLegacyGeneric>()
@@ -261,9 +240,12 @@ RtpPayloadParams::GenericDescriptorFromFrameInfo(
generic.frame_id = frame_id;
generic.dependencies = dependencies_calculator_.FromBuffersUsage(
frame_type, frame_id, frame_info.encoder_buffers);
+ generic.chain_diffs =
+ chains_calculator_.From(frame_id, frame_info.part_of_chain);
generic.spatial_index = frame_info.spatial_id;
generic.temporal_index = frame_info.temporal_id;
generic.decode_target_indications = frame_info.decode_target_indications;
+ generic.active_decode_targets = frame_info.active_decode_targets;
return generic;
}
@@ -273,6 +255,11 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info,
RTPVideoHeader* rtp_video_header) {
if (codec_specific_info && codec_specific_info->generic_frame_info &&
!codec_specific_info->generic_frame_info->encoder_buffers.empty()) {
+ if (is_keyframe) {
+ // Key frame resets all chains it is in.
+ chains_calculator_.Reset(
+ codec_specific_info->generic_frame_info->part_of_chain);
+ }
rtp_video_header->generic =
GenericDescriptorFromFrameInfo(*codec_specific_info->generic_frame_info,
frame_id, rtp_video_header->frame_type);
diff --git a/call/rtp_payload_params.h b/call/rtp_payload_params.h
index 13b1050378..2e0faeb5c9 100644
--- a/call/rtp_payload_params.h
+++ b/call/rtp_payload_params.h
@@ -19,6 +19,7 @@
#include "call/rtp_config.h"
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
+#include "modules/video_coding/chain_diff_calculator.h"
#include "modules/video_coding/frame_dependencies_calculator.h"
#include "modules/video_coding/include/video_codec_interface.h"
@@ -88,6 +89,7 @@ class RtpPayloadParams final {
RTPVideoHeader::GenericDescriptorInfo* generic);
FrameDependenciesCalculator dependencies_calculator_;
+ ChainDiffCalculator chains_calculator_;
// TODO(bugs.webrtc.org/10242): Remove once all encoder-wrappers are updated.
// Holds the last shared frame id for a given (spatial, temporal) layer.
std::array<std::array<int64_t, RtpGenericFrameDescriptor::kMaxTemporalLayers>,
@@ -112,7 +114,6 @@ class RtpPayloadParams final {
RtpPayloadState state_;
const bool generic_picture_id_experiment_;
- const bool generic_descriptor_experiment_;
};
} // namespace webrtc
#endif // CALL_RTP_PAYLOAD_PARAMS_H_
diff --git a/call/rtp_payload_params_unittest.cc b/call/rtp_payload_params_unittest.cc
index 1045504b44..a5510b0240 100644
--- a/call/rtp_payload_params_unittest.cc
+++ b/call/rtp_payload_params_unittest.cc
@@ -32,6 +32,7 @@
using ::testing::ElementsAre;
using ::testing::IsEmpty;
+using ::testing::SizeIs;
namespace webrtc {
namespace {
@@ -147,54 +148,6 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) {
codec_info.codecSpecific.VP9.end_of_picture);
}
-TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_H264) {
- RtpPayloadState state;
- state.picture_id = kPictureId;
- state.tl0_pic_idx = kInitialTl0PicIdx1;
- RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig());
-
- EncodedImage encoded_image;
- CodecSpecificInfo codec_info;
- CodecSpecificInfoH264* h264info = &codec_info.codecSpecific.H264;
- codec_info.codecType = kVideoCodecH264;
- h264info->packetization_mode = H264PacketizationMode::SingleNalUnit;
- h264info->temporal_idx = kNoTemporalIdx;
-
- RTPVideoHeader header =
- params.GetRtpVideoHeader(encoded_image, &codec_info, 10);
-
- EXPECT_EQ(0, header.simulcastIdx);
- EXPECT_EQ(kVideoCodecH264, header.codec);
- const auto& h264 = absl::get<RTPVideoHeaderH264>(header.video_type_header);
- EXPECT_EQ(H264PacketizationMode::SingleNalUnit, h264.packetization_mode);
-
- // test temporal param 1
- h264info->temporal_idx = 1;
- h264info->base_layer_sync = true;
- h264info->idr_frame = false;
-
- header = params.GetRtpVideoHeader(encoded_image, &codec_info, 20);
-
- EXPECT_EQ(kVideoCodecH264, header.codec);
- EXPECT_EQ(header.frame_marking.tl0_pic_idx, kInitialTl0PicIdx1);
- EXPECT_EQ(header.frame_marking.temporal_id, h264info->temporal_idx);
- EXPECT_EQ(header.frame_marking.base_layer_sync, h264info->base_layer_sync);
- EXPECT_EQ(header.frame_marking.independent_frame, h264info->idr_frame);
-
- // test temporal param 2
- h264info->temporal_idx = 0;
- h264info->base_layer_sync = false;
- h264info->idr_frame = true;
-
- header = params.GetRtpVideoHeader(encoded_image, &codec_info, 30);
-
- EXPECT_EQ(kVideoCodecH264, header.codec);
- EXPECT_EQ(header.frame_marking.tl0_pic_idx, kInitialTl0PicIdx1 + 1);
- EXPECT_EQ(header.frame_marking.temporal_id, h264info->temporal_idx);
- EXPECT_EQ(header.frame_marking.base_layer_sync, h264info->base_layer_sync);
- EXPECT_EQ(header.frame_marking.independent_frame, h264info->idr_frame);
-}
-
TEST(RtpPayloadParamsTest, PictureIdIsSetForVp8) {
RtpPayloadState state;
state.picture_id = kInitialPictureId1;
@@ -349,8 +302,6 @@ TEST(RtpPayloadParamsTest, PictureIdForOldGenericFormat) {
}
TEST(RtpPayloadParamsTest, GenericDescriptorForGenericCodec) {
- test::ScopedFieldTrials generic_picture_id(
- "WebRTC-GenericDescriptor/Enabled/");
RtpPayloadState state{};
EncodedImage encoded_image;
@@ -375,8 +326,6 @@ TEST(RtpPayloadParamsTest, GenericDescriptorForGenericCodec) {
}
TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) {
- test::ScopedFieldTrials generic_picture_id(
- "WebRTC-GenericDescriptor/Enabled/");
RtpPayloadState state;
EncodedImage encoded_image;
CodecSpecificInfo codec_info;
@@ -388,6 +337,7 @@ TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) {
GenericFrameInfo::Builder().S(1).T(0).Dtis("S").Build();
codec_info.generic_frame_info->encoder_buffers = {
{/*id=*/0, /*referenced=*/false, /*updated=*/true}};
+ codec_info.generic_frame_info->part_of_chain = {true, false};
RTPVideoHeader key_header =
params.GetRtpVideoHeader(encoded_image, &codec_info, /*frame_id=*/1);
@@ -398,12 +348,14 @@ TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) {
EXPECT_THAT(key_header.generic->dependencies, IsEmpty());
EXPECT_THAT(key_header.generic->decode_target_indications,
ElementsAre(DecodeTargetIndication::kSwitch));
+ EXPECT_THAT(key_header.generic->chain_diffs, SizeIs(2));
encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
codec_info.generic_frame_info =
GenericFrameInfo::Builder().S(2).T(3).Dtis("D").Build();
codec_info.generic_frame_info->encoder_buffers = {
{/*id=*/0, /*referenced=*/true, /*updated=*/false}};
+ codec_info.generic_frame_info->part_of_chain = {false, false};
RTPVideoHeader delta_header =
params.GetRtpVideoHeader(encoded_image, &codec_info, /*frame_id=*/3);
@@ -414,6 +366,7 @@ TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) {
EXPECT_THAT(delta_header.generic->dependencies, ElementsAre(1));
EXPECT_THAT(delta_header.generic->decode_target_indications,
ElementsAre(DecodeTargetIndication::kDiscardable));
+ EXPECT_THAT(delta_header.generic->chain_diffs, SizeIs(2));
}
class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test {
@@ -421,9 +374,7 @@ class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test {
enum LayerSync { kNoSync, kSync };
RtpPayloadParamsVp8ToGenericTest()
- : generic_descriptor_field_trial_("WebRTC-GenericDescriptor/Enabled/"),
- state_(),
- params_(123, &state_, trials_config_) {}
+ : state_(), params_(123, &state_, trials_config_) {}
void ConvertAndCheck(int temporal_index,
int64_t shared_frame_id,
@@ -459,7 +410,6 @@ class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test {
}
protected:
- test::ScopedFieldTrials generic_descriptor_field_trial_;
FieldTrialBasedConfig trials_config_;
RtpPayloadState state_;
RtpPayloadParams params_;
@@ -518,9 +468,7 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test {
enum LayerSync { kNoSync, kSync };
RtpPayloadParamsH264ToGenericTest()
- : generic_descriptor_field_trial_("WebRTC-GenericDescriptor/Enabled/"),
- state_(),
- params_(123, &state_, trials_config_) {}
+ : state_(), params_(123, &state_, trials_config_) {}
void ConvertAndCheck(int temporal_index,
int64_t shared_frame_id,
@@ -556,7 +504,6 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test {
}
protected:
- test::ScopedFieldTrials generic_descriptor_field_trial_;
FieldTrialBasedConfig trials_config_;
RtpPayloadState state_;
RtpPayloadParams params_;
diff --git a/call/rtp_rtcp_demuxer_helper.cc b/call/rtp_rtcp_demuxer_helper.cc
deleted file mode 100644
index 125169b077..0000000000
--- a/call/rtp_rtcp_demuxer_helper.cc
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "call/rtp_rtcp_demuxer_helper.h"
-
-#include "modules/rtp_rtcp/source/byte_io.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/bye.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h"
-
-namespace webrtc {
-
-absl::optional<uint32_t> ParseRtcpPacketSenderSsrc(
- rtc::ArrayView<const uint8_t> packet) {
- rtcp::CommonHeader header;
- for (const uint8_t* next_packet = packet.begin(); next_packet < packet.end();
- next_packet = header.NextPacket()) {
- if (!header.Parse(next_packet, packet.end() - next_packet)) {
- return absl::nullopt;
- }
-
- switch (header.type()) {
- case rtcp::Bye::kPacketType:
- case rtcp::ExtendedReports::kPacketType:
- case rtcp::Psfb::kPacketType:
- case rtcp::ReceiverReport::kPacketType:
- case rtcp::Rtpfb::kPacketType:
- case rtcp::SenderReport::kPacketType: {
- // Sender SSRC at the beginning of the RTCP payload.
- if (header.payload_size_bytes() >= sizeof(uint32_t)) {
- const uint32_t ssrc_sender =
- ByteReader<uint32_t>::ReadBigEndian(header.payload());
- return ssrc_sender;
- } else {
- return absl::nullopt;
- }
- }
- }
- }
-
- return absl::nullopt;
-}
-
-} // namespace webrtc
diff --git a/call/rtp_rtcp_demuxer_helper.h b/call/rtp_rtcp_demuxer_helper.h
deleted file mode 100644
index 6134d56143..0000000000
--- a/call/rtp_rtcp_demuxer_helper.h
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef CALL_RTP_RTCP_DEMUXER_HELPER_H_
-#define CALL_RTP_RTCP_DEMUXER_HELPER_H_
-
-#include <stddef.h>
-#include <stdint.h>
-
-#include "absl/types/optional.h"
-#include "api/array_view.h"
-
-namespace webrtc {
-
-// TODO(eladalon): Remove this in the next CL.
-template <typename Container>
-bool MultimapAssociationExists(const Container& multimap,
- const typename Container::key_type& key,
- const typename Container::mapped_type& val) {
- auto it_range = multimap.equal_range(key);
- using Reference = typename Container::const_reference;
- return std::any_of(it_range.first, it_range.second,
- [val](Reference elem) { return elem.second == val; });
-}
-
-template <typename Container, typename Value>
-size_t RemoveFromMultimapByValue(Container* multimap, const Value& value) {
- size_t count = 0;
- for (auto it = multimap->begin(); it != multimap->end();) {
- if (it->second == value) {
- it = multimap->erase(it);
- ++count;
- } else {
- ++it;
- }
- }
- return count;
-}
-
-template <typename Map, typename Value>
-size_t RemoveFromMapByValue(Map* map, const Value& value) {
- size_t count = 0;
- for (auto it = map->begin(); it != map->end();) {
- if (it->second == value) {
- it = map->erase(it);
- ++count;
- } else {
- ++it;
- }
- }
- return count;
-}
-
-template <typename Container, typename Key>
-bool ContainerHasKey(const Container& c, const Key& k) {
- return std::find(c.cbegin(), c.cend(), k) != c.cend();
-}
-
-// TODO(eladalon): Remove this in the next CL.
-template <typename Container>
-bool MultimapHasValue(const Container& c,
- const typename Container::mapped_type& v) {
- auto predicate = [v](const typename Container::value_type& it) {
- return it.second == v;
- };
- return std::any_of(c.cbegin(), c.cend(), predicate);
-}
-
-template <typename Map>
-bool MapHasValue(const Map& map, const typename Map::mapped_type& value) {
- auto predicate = [value](const typename Map::value_type& it) {
- return it.second == value;
- };
- return std::any_of(map.cbegin(), map.cend(), predicate);
-}
-
-template <typename Container>
-bool MultimapHasKey(const Container& c,
- const typename Container::key_type& key) {
- auto it_range = c.equal_range(key);
- return it_range.first != it_range.second;
-}
-
-absl::optional<uint32_t> ParseRtcpPacketSenderSsrc(
- rtc::ArrayView<const uint8_t> packet);
-
-} // namespace webrtc
-
-#endif // CALL_RTP_RTCP_DEMUXER_HELPER_H_
diff --git a/call/rtp_rtcp_demuxer_helper_unittest.cc b/call/rtp_rtcp_demuxer_helper_unittest.cc
deleted file mode 100644
index 17e6617fb0..0000000000
--- a/call/rtp_rtcp_demuxer_helper_unittest.cc
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "call/rtp_rtcp_demuxer_helper.h"
-
-#include <string.h>
-
-#include <cstdio>
-
-#include "modules/rtp_rtcp/source/rtcp_packet/bye.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/pli.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h"
-#include "rtc_base/arraysize.h"
-#include "rtc_base/buffer.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-
-namespace {
-constexpr uint32_t kSsrc = 8374;
-} // namespace
-
-TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_ByePacket) {
- webrtc::rtcp::Bye rtcp_packet;
- rtcp_packet.SetSenderSsrc(kSsrc);
- rtc::Buffer raw_packet = rtcp_packet.Build();
-
- absl::optional<uint32_t> ssrc = ParseRtcpPacketSenderSsrc(raw_packet);
- EXPECT_EQ(ssrc, kSsrc);
-}
-
-TEST(RtpRtcpDemuxerHelperTest,
- ParseRtcpPacketSenderSsrc_ExtendedReportsPacket) {
- webrtc::rtcp::ExtendedReports rtcp_packet;
- rtcp_packet.SetSenderSsrc(kSsrc);
- rtc::Buffer raw_packet = rtcp_packet.Build();
-
- absl::optional<uint32_t> ssrc = ParseRtcpPacketSenderSsrc(raw_packet);
- EXPECT_EQ(ssrc, kSsrc);
-}
-
-TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_PsfbPacket) {
- webrtc::rtcp::Pli rtcp_packet; // Psfb is abstract; use a subclass.
- rtcp_packet.SetSenderSsrc(kSsrc);
- rtc::Buffer raw_packet = rtcp_packet.Build();
-
- absl::optional<uint32_t> ssrc = ParseRtcpPacketSenderSsrc(raw_packet);
- EXPECT_EQ(ssrc, kSsrc);
-}
-
-TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_ReceiverReportPacket) {
- webrtc::rtcp::ReceiverReport rtcp_packet;
- rtcp_packet.SetSenderSsrc(kSsrc);
- rtc::Buffer raw_packet = rtcp_packet.Build();
-
- absl::optional<uint32_t> ssrc = ParseRtcpPacketSenderSsrc(raw_packet);
- EXPECT_EQ(ssrc, kSsrc);
-}
-
-TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_RtpfbPacket) {
- // Rtpfb is abstract; use a subclass.
- webrtc::rtcp::RapidResyncRequest rtcp_packet;
- rtcp_packet.SetSenderSsrc(kSsrc);
- rtc::Buffer raw_packet = rtcp_packet.Build();
-
- absl::optional<uint32_t> ssrc = ParseRtcpPacketSenderSsrc(raw_packet);
- EXPECT_EQ(ssrc, kSsrc);
-}
-
-TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_SenderReportPacket) {
- webrtc::rtcp::SenderReport rtcp_packet;
- rtcp_packet.SetSenderSsrc(kSsrc);
- rtc::Buffer raw_packet = rtcp_packet.Build();
-
- absl::optional<uint32_t> ssrc = ParseRtcpPacketSenderSsrc(raw_packet);
- EXPECT_EQ(ssrc, kSsrc);
-}
-
-TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_MalformedRtcpPacket) {
- uint8_t garbage[100];
- memset(&garbage[0], 0, arraysize(garbage));
-
- absl::optional<uint32_t> ssrc = ParseRtcpPacketSenderSsrc(garbage);
- EXPECT_FALSE(ssrc);
-}
-
-TEST(RtpRtcpDemuxerHelperTest,
- ParseRtcpPacketSenderSsrc_RtcpMessageWithoutSenderSsrc) {
- webrtc::rtcp::ExtendedJitterReport rtcp_packet; // Has no sender SSRC.
- rtc::Buffer raw_packet = rtcp_packet.Build();
-
- absl::optional<uint32_t> ssrc = ParseRtcpPacketSenderSsrc(raw_packet);
- EXPECT_FALSE(ssrc);
-}
-
-TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_TruncatedRtcpMessage) {
- webrtc::rtcp::Bye rtcp_packet;
- rtcp_packet.SetSenderSsrc(kSsrc);
- rtc::Buffer raw_packet = rtcp_packet.Build();
-
- constexpr size_t rtcp_length_bytes = 8;
- ASSERT_EQ(rtcp_length_bytes, raw_packet.size());
-
- absl::optional<uint32_t> ssrc = ParseRtcpPacketSenderSsrc(
- rtc::ArrayView<const uint8_t>(raw_packet.data(), rtcp_length_bytes - 1));
- EXPECT_FALSE(ssrc);
-}
-
-} // namespace webrtc
diff --git a/call/rtp_stream_receiver_controller.h b/call/rtp_stream_receiver_controller.h
index 045af3cf8d..62447aa521 100644
--- a/call/rtp_stream_receiver_controller.h
+++ b/call/rtp_stream_receiver_controller.h
@@ -14,7 +14,7 @@
#include "call/rtp_demuxer.h"
#include "call/rtp_stream_receiver_controller_interface.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
namespace webrtc {
@@ -63,7 +63,7 @@ class RtpStreamReceiverController
// to be called on the same thread, and OnRtpPacket to be called
// by a single, but possibly distinct, thread. But applications not
// using Call may have use threads differently.
- rtc::CriticalSection lock_;
+ rtc::RecursiveCriticalSection lock_;
RtpDemuxer demuxer_ RTC_GUARDED_BY(&lock_);
};
diff --git a/call/rtp_transport_controller_send.cc b/call/rtp_transport_controller_send.cc
index 56c5e55ca1..9baf164a60 100644
--- a/call/rtp_transport_controller_send.cc
+++ b/call/rtp_transport_controller_send.cc
@@ -91,13 +91,16 @@ RtpTransportControllerSend::RtpTransportControllerSend(
event_log,
trials,
process_thread_.get())),
- task_queue_pacer_(use_task_queue_pacer_
- ? new TaskQueuePacedSender(clock,
- &packet_router_,
- event_log,
- trials,
- task_queue_factory)
- : nullptr),
+ task_queue_pacer_(
+ use_task_queue_pacer_
+ ? new TaskQueuePacedSender(
+ clock,
+ &packet_router_,
+ event_log,
+ trials,
+ task_queue_factory,
+ /*hold_back_window = */ PacingController::kMinSleepTime)
+ : nullptr),
observer_(nullptr),
controller_factory_override_(controller_factory),
controller_factory_fallback_(
diff --git a/call/rtp_video_sender.cc b/call/rtp_video_sender.cc
index 8c31a848aa..fb6825e719 100644
--- a/call/rtp_video_sender.cc
+++ b/call/rtp_video_sender.cc
@@ -22,21 +22,22 @@
#include "api/video_codecs/video_codec.h"
#include "call/rtp_transport_controller_send_interface.h"
#include "modules/pacing/packet_router.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
#include "modules/utility/include/process_thread.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
+#include "rtc_base/task_queue.h"
namespace webrtc {
namespace webrtc_internal_rtp_video_sender {
RtpStreamSender::RtpStreamSender(
- std::unique_ptr<RtpRtcp> rtp_rtcp,
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp,
std::unique_ptr<RTPSenderVideo> sender_video,
std::unique_ptr<VideoFecGenerator> fec_generator)
: rtp_rtcp(std::move(rtp_rtcp)),
@@ -196,10 +197,11 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
FrameEncryptorInterface* frame_encryptor,
const CryptoOptions& crypto_options,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
+ bool use_deferred_fec,
const WebRtcKeyValueConfig& trials) {
RTC_DCHECK_GT(rtp_config.ssrcs.size(), 0);
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.clock = clock;
configuration.audio = false;
configuration.receiver_only = false;
@@ -243,7 +245,9 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
std::unique_ptr<VideoFecGenerator> fec_generator =
MaybeCreateFecGenerator(clock, rtp_config, suspended_ssrcs, i, trials);
configuration.fec_generator = fec_generator.get();
- video_config.fec_generator = fec_generator.get();
+ if (!use_deferred_fec) {
+ video_config.fec_generator = fec_generator.get();
+ }
configuration.rtx_send_ssrc =
rtp_config.GetRtxSsrcAssociatedWithMediaSsrc(rtp_config.ssrcs[i]);
@@ -252,7 +256,8 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
configuration.need_rtp_packet_infos = rtp_config.lntf.enabled;
- auto rtp_rtcp = RtpRtcp::Create(configuration);
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp(
+ ModuleRtpRtcpImpl2::Create(configuration));
rtp_rtcp->SetSendingStatus(false);
rtp_rtcp->SetSendingMediaStatus(false);
rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
@@ -281,6 +286,7 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead();
}
video_config.frame_transformer = frame_transformer;
+ video_config.send_transport_queue = transport->GetWorkerQueue()->Get();
auto sender_video = std::make_unique<RTPSenderVideo>(video_config);
rtp_streams.emplace_back(std::move(rtp_rtcp), std::move(sender_video),
std::move(fec_generator));
@@ -335,6 +341,9 @@ RtpVideoSender::RtpVideoSender(
field_trials_.Lookup("WebRTC-UseEarlyLossDetection"),
"Disabled")),
has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)),
+ use_deferred_fec_(
+ absl::StartsWith(field_trials_.Lookup("WebRTC-DeferredFecGeneration"),
+ "Enabled")),
active_(false),
module_process_thread_(nullptr),
suspended_ssrcs_(std::move(suspended_ssrcs)),
@@ -353,6 +362,7 @@ RtpVideoSender::RtpVideoSender(
frame_encryptor,
crypto_options,
std::move(frame_transformer),
+ use_deferred_fec_,
field_trials_)),
rtp_config_(rtp_config),
codec_type_(GetVideoCodecType(rtp_config)),
@@ -457,15 +467,20 @@ void RtpVideoSender::DeRegisterProcessThread() {
}
void RtpVideoSender::SetActive(bool active) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (active_ == active)
return;
const std::vector<bool> active_modules(rtp_streams_.size(), active);
- SetActiveModules(active_modules);
+ SetActiveModulesLocked(active_modules);
}
void RtpVideoSender::SetActiveModules(const std::vector<bool> active_modules) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
+ return SetActiveModulesLocked(active_modules);
+}
+
+void RtpVideoSender::SetActiveModulesLocked(
+ const std::vector<bool> active_modules) {
RTC_DCHECK_EQ(rtp_streams_.size(), active_modules.size());
active_ = false;
for (size_t i = 0; i < active_modules.size(); ++i) {
@@ -480,17 +495,21 @@ void RtpVideoSender::SetActiveModules(const std::vector<bool> active_modules) {
}
bool RtpVideoSender::IsActive() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
+ return IsActiveLocked();
+}
+
+bool RtpVideoSender::IsActiveLocked() {
return active_ && !rtp_streams_.empty();
}
EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
- const RTPFragmentationHeader* fragmentation) {
+ const RTPFragmentationHeader* /*fragmentation*/) {
fec_controller_->UpdateWithEncodedData(encoded_image.size(),
encoded_image._frameType);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RTC_DCHECK(!rtp_streams_.empty());
if (!active_)
return Result(Result::ERROR_SEND_FAILED);
@@ -540,7 +559,6 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
bool send_result = rtp_streams_[stream_index].sender_video->SendEncodedImage(
rtp_config_.payload_type, codec_type_, rtp_timestamp, encoded_image,
- fragmentation,
params_[stream_index].GetRtpVideoHeader(
encoded_image, codec_specific_info, shared_frame_id_),
expected_retransmission_time_ms);
@@ -564,8 +582,8 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
void RtpVideoSender::OnBitrateAllocationUpdated(
const VideoBitrateAllocation& bitrate) {
- rtc::CritScope lock(&crit_);
- if (IsActive()) {
+ MutexLock lock(&mutex_);
+ if (IsActiveLocked()) {
if (rtp_streams_.size() == 1) {
// If spatial scalability is enabled, it is covered by a single stream.
rtp_streams_[0].rtp_rtcp->SetVideoBitrateAllocation(bitrate);
@@ -617,7 +635,7 @@ void RtpVideoSender::ConfigureSsrcs() {
RTC_CHECK(ssrc_to_rtp_module_.empty());
for (size_t i = 0; i < rtp_config_.ssrcs.size(); ++i) {
uint32_t ssrc = rtp_config_.ssrcs[i];
- RtpRtcp* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
+ RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
// Restore RTP state if previous existed.
auto it = suspended_ssrcs_.find(ssrc);
@@ -634,7 +652,7 @@ void RtpVideoSender::ConfigureSsrcs() {
RTC_DCHECK_EQ(rtp_config_.rtx.ssrcs.size(), rtp_config_.ssrcs.size());
for (size_t i = 0; i < rtp_config_.rtx.ssrcs.size(); ++i) {
uint32_t ssrc = rtp_config_.rtx.ssrcs[i];
- RtpRtcp* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
+ RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
auto it = suspended_ssrcs_.find(ssrc);
if (it != suspended_ssrcs_.end())
rtp_rtcp->SetRtxState(it->second);
@@ -707,7 +725,7 @@ std::map<uint32_t, RtpState> RtpVideoSender::GetRtpStates() const {
std::map<uint32_t, RtpPayloadState> RtpVideoSender::GetRtpPayloadStates()
const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
std::map<uint32_t, RtpPayloadState> payload_states;
for (const auto& param : params_) {
payload_states[param.ssrc()] = param.state();
@@ -718,7 +736,7 @@ std::map<uint32_t, RtpPayloadState> RtpVideoSender::GetRtpPayloadStates()
void RtpVideoSender::OnTransportOverheadChanged(
size_t transport_overhead_bytes_per_packet) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
transport_overhead_bytes_per_packet_ = transport_overhead_bytes_per_packet;
size_t max_rtp_packet_size =
@@ -732,7 +750,7 @@ void RtpVideoSender::OnTransportOverheadChanged(
void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update,
int framerate) {
// Substract overhead from bitrate.
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
size_t num_active_streams = 0;
size_t overhead_bytes_per_packet = 0;
for (const auto& stream : rtp_streams_) {
@@ -836,27 +854,39 @@ int RtpVideoSender::ProtectionRequest(const FecProtectionParams* delta_params,
*sent_nack_rate_bps = 0;
*sent_fec_rate_bps = 0;
for (const RtpStreamSender& stream : rtp_streams_) {
- if (stream.fec_generator) {
- stream.fec_generator->SetProtectionParameters(*delta_params, *key_params);
- *sent_fec_rate_bps += stream.fec_generator->CurrentFecRate().bps();
+ if (use_deferred_fec_) {
+ stream.rtp_rtcp->SetFecProtectionParams(*delta_params, *key_params);
+
+ auto send_bitrate = stream.rtp_rtcp->GetSendRates();
+ *sent_video_rate_bps += send_bitrate[RtpPacketMediaType::kVideo].bps();
+ *sent_fec_rate_bps +=
+ send_bitrate[RtpPacketMediaType::kForwardErrorCorrection].bps();
+ *sent_nack_rate_bps +=
+ send_bitrate[RtpPacketMediaType::kRetransmission].bps();
+ } else {
+ if (stream.fec_generator) {
+ stream.fec_generator->SetProtectionParameters(*delta_params,
+ *key_params);
+ *sent_fec_rate_bps += stream.fec_generator->CurrentFecRate().bps();
+ }
+ *sent_video_rate_bps += stream.sender_video->VideoBitrateSent();
+ *sent_nack_rate_bps +=
+ stream.rtp_rtcp->GetSendRates()[RtpPacketMediaType::kRetransmission]
+ .bps<uint32_t>();
}
- *sent_video_rate_bps += stream.sender_video->VideoBitrateSent();
- *sent_nack_rate_bps +=
- stream.rtp_rtcp->GetSendRates()[RtpPacketMediaType::kRetransmission]
- .bps<uint32_t>();
}
return 0;
}
void RtpVideoSender::SetFecAllowed(bool fec_allowed) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
fec_allowed_ = fec_allowed;
}
void RtpVideoSender::OnPacketFeedbackVector(
std::vector<StreamPacketInfo> packet_feedback_vector) {
if (fec_controller_->UseLossVectorMask()) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
for (const StreamPacketInfo& packet : packet_feedback_vector) {
loss_mask_vector_.push_back(!packet.received);
}
diff --git a/call/rtp_video_sender.h b/call/rtp_video_sender.h
index f7d8c763d2..876f6e9cb2 100644
--- a/call/rtp_video_sender.h
+++ b/call/rtp_video_sender.h
@@ -29,14 +29,15 @@
#include "call/rtp_transport_controller_send_interface.h"
#include "call/rtp_video_sender_interface.h"
#include "modules/rtp_rtcp/include/flexfec_sender.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
#include "modules/rtp_rtcp/source/rtp_sender_video.h"
#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/rate_limiter.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
@@ -44,14 +45,13 @@ namespace webrtc {
class FrameEncryptorInterface;
class RTPFragmentationHeader;
-class RtpRtcp;
class RtpTransportControllerSendInterface;
namespace webrtc_internal_rtp_video_sender {
// RTP state for a single simulcast stream. Internal to the implementation of
// RtpVideoSender.
struct RtpStreamSender {
- RtpStreamSender(std::unique_ptr<RtpRtcp> rtp_rtcp,
+ RtpStreamSender(std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp,
std::unique_ptr<RTPSenderVideo> sender_video,
std::unique_ptr<VideoFecGenerator> fec_generator);
~RtpStreamSender();
@@ -60,7 +60,7 @@ struct RtpStreamSender {
RtpStreamSender& operator=(RtpStreamSender&&) = default;
// Note: Needs pointer stability.
- std::unique_ptr<RtpRtcp> rtp_rtcp;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp;
std::unique_ptr<RTPSenderVideo> sender_video;
std::unique_ptr<VideoFecGenerator> fec_generator;
};
@@ -96,62 +96,75 @@ class RtpVideoSender : public RtpVideoSenderInterface,
// |module_process_thread| was created (libjingle's worker thread).
// TODO(perkj): Replace the use of |module_process_thread| with a TaskQueue,
// maybe |worker_queue|.
- void RegisterProcessThread(ProcessThread* module_process_thread) override;
- void DeRegisterProcessThread() override;
+ void RegisterProcessThread(ProcessThread* module_process_thread)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ void DeRegisterProcessThread() RTC_LOCKS_EXCLUDED(mutex_) override;
// RtpVideoSender will only route packets if being active, all packets will be
// dropped otherwise.
- void SetActive(bool active) override;
+ void SetActive(bool active) RTC_LOCKS_EXCLUDED(mutex_) override;
// Sets the sending status of the rtp modules and appropriately sets the
// payload router to active if any rtp modules are active.
- void SetActiveModules(const std::vector<bool> active_modules) override;
- bool IsActive() override;
+ void SetActiveModules(const std::vector<bool> active_modules)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ bool IsActive() RTC_LOCKS_EXCLUDED(mutex_) override;
- void OnNetworkAvailability(bool network_available) override;
- std::map<uint32_t, RtpState> GetRtpStates() const override;
- std::map<uint32_t, RtpPayloadState> GetRtpPayloadStates() const override;
+ void OnNetworkAvailability(bool network_available)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ std::map<uint32_t, RtpState> GetRtpStates() const
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ std::map<uint32_t, RtpPayloadState> GetRtpPayloadStates() const
+ RTC_LOCKS_EXCLUDED(mutex_) override;
- void DeliverRtcp(const uint8_t* packet, size_t length) override;
+ void DeliverRtcp(const uint8_t* packet, size_t length)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
// Implements webrtc::VCMProtectionCallback.
int ProtectionRequest(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params,
uint32_t* sent_video_rate_bps,
uint32_t* sent_nack_rate_bps,
- uint32_t* sent_fec_rate_bps) override;
+ uint32_t* sent_fec_rate_bps)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
// Implements FecControllerOverride.
- void SetFecAllowed(bool fec_allowed) override;
+ void SetFecAllowed(bool fec_allowed) RTC_LOCKS_EXCLUDED(mutex_) override;
// Implements EncodedImageCallback.
// Returns 0 if the packet was routed / sent, -1 otherwise.
EncodedImageCallback::Result OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
- const RTPFragmentationHeader* fragmentation) override;
+ const RTPFragmentationHeader* fragmentation)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
- void OnBitrateAllocationUpdated(
- const VideoBitrateAllocation& bitrate) override;
+ void OnBitrateAllocationUpdated(const VideoBitrateAllocation& bitrate)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
- void OnTransportOverheadChanged(
- size_t transport_overhead_bytes_per_packet) override;
- void OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) override;
- uint32_t GetPayloadBitrateBps() const override;
- uint32_t GetProtectionBitrateBps() const override;
- void SetEncodingData(size_t width,
- size_t height,
- size_t num_temporal_layers) override;
+ void OnTransportOverheadChanged(size_t transport_overhead_bytes_per_packet)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ void OnBitrateUpdated(BitrateAllocationUpdate update, int framerate)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ uint32_t GetPayloadBitrateBps() const RTC_LOCKS_EXCLUDED(mutex_) override;
+ uint32_t GetProtectionBitrateBps() const RTC_LOCKS_EXCLUDED(mutex_) override;
+ void SetEncodingData(size_t width, size_t height, size_t num_temporal_layers)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
uint32_t ssrc,
- rtc::ArrayView<const uint16_t> sequence_numbers) const override;
+ rtc::ArrayView<const uint16_t> sequence_numbers) const
+ RTC_LOCKS_EXCLUDED(mutex_) override;
// From StreamFeedbackObserver.
void OnPacketFeedbackVector(
- std::vector<StreamPacketInfo> packet_feedback_vector) override;
+ std::vector<StreamPacketInfo> packet_feedback_vector)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
private:
- void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ bool IsActiveLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ void SetActiveModulesLocked(const std::vector<bool> active_modules)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void ConfigureProtection();
void ConfigureSsrcs();
void ConfigureRids();
@@ -163,18 +176,19 @@ class RtpVideoSender : public RtpVideoSenderInterface,
const bool account_for_packetization_overhead_;
const bool use_early_loss_detection_;
const bool has_packet_feedback_;
+ const bool use_deferred_fec_;
- // TODO(holmer): Remove crit_ once RtpVideoSender runs on the
+ // TODO(holmer): Remove mutex_ once RtpVideoSender runs on the
// transport task queue.
- rtc::CriticalSection crit_;
- bool active_ RTC_GUARDED_BY(crit_);
+ mutable Mutex mutex_;
+ bool active_ RTC_GUARDED_BY(mutex_);
ProcessThread* module_process_thread_;
rtc::ThreadChecker module_process_thread_checker_;
std::map<uint32_t, RtpState> suspended_ssrcs_;
const std::unique_ptr<FecController> fec_controller_;
- bool fec_allowed_ RTC_GUARDED_BY(crit_);
+ bool fec_allowed_ RTC_GUARDED_BY(mutex_);
// Rtp modules are assumed to be sorted in simulcast index order.
const std::vector<webrtc_internal_rtp_video_sender::RtpStreamSender>
@@ -188,21 +202,21 @@ class RtpVideoSender : public RtpVideoSenderInterface,
// rewrite the frame id), therefore |shared_frame_id| has to live in a place
// where we are aware of all the different streams.
int64_t shared_frame_id_ = 0;
- std::vector<RtpPayloadParams> params_ RTC_GUARDED_BY(crit_);
+ std::vector<RtpPayloadParams> params_ RTC_GUARDED_BY(mutex_);
- size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(crit_);
+ size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(mutex_);
uint32_t protection_bitrate_bps_;
uint32_t encoder_target_rate_bps_;
- std::vector<bool> loss_mask_vector_ RTC_GUARDED_BY(crit_);
+ std::vector<bool> loss_mask_vector_ RTC_GUARDED_BY(mutex_);
- std::vector<FrameCounts> frame_counts_ RTC_GUARDED_BY(crit_);
+ std::vector<FrameCounts> frame_counts_ RTC_GUARDED_BY(mutex_);
FrameCountObserver* const frame_count_observer_;
// Effectively const map from SSRC to RtpRtcp, for all media SSRCs.
// This map is set at construction time and never changed, but it's
// non-trivial to make it properly const.
- std::map<uint32_t, RtpRtcp*> ssrc_to_rtp_module_;
+ std::map<uint32_t, RtpRtcpInterface*> ssrc_to_rtp_module_;
RTC_DISALLOW_COPY_AND_ASSIGN(RtpVideoSender);
};
diff --git a/call/rtp_video_sender_unittest.cc b/call/rtp_video_sender_unittest.cc
index a87196111a..8a88a24e3b 100644
--- a/call/rtp_video_sender_unittest.cc
+++ b/call/rtp_video_sender_unittest.cc
@@ -56,7 +56,7 @@ const int kDependencyDescriptorExtensionId = 8;
class MockRtcpIntraFrameObserver : public RtcpIntraFrameObserver {
public:
- MOCK_METHOD1(OnReceivedIntraFrameRequest, void(uint32_t));
+ MOCK_METHOD(void, OnReceivedIntraFrameRequest, (uint32_t), (override));
};
RtpSenderObservers CreateObservers(
@@ -361,8 +361,10 @@ TEST(RtpVideoSenderTest, CreateWithPreviousStates) {
TEST(RtpVideoSenderTest, FrameCountCallbacks) {
class MockFrameCountObserver : public FrameCountObserver {
public:
- MOCK_METHOD2(FrameCountUpdated,
- void(const FrameCounts& frame_counts, uint32_t ssrc));
+ MOCK_METHOD(void,
+ FrameCountUpdated,
+ (const FrameCounts& frame_counts, uint32_t ssrc),
+ (override));
} callback;
RtpVideoSenderTestFixture test({kSsrc1}, {kRtxSsrc1}, kPayloadType, {},
@@ -676,8 +678,6 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) {
}
TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) {
- test::ScopedFieldTrials trials("WebRTC-GenericDescriptor/Enabled/");
-
RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {});
test.router()->SetActive(true);
@@ -705,9 +705,9 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) {
codec_specific.template_structure.emplace();
codec_specific.template_structure->num_decode_targets = 1;
codec_specific.template_structure->templates = {
- GenericFrameInfo::Builder().T(0).Dtis("S").Build(),
- GenericFrameInfo::Builder().T(0).Dtis("S").Fdiffs({2}).Build(),
- GenericFrameInfo::Builder().T(1).Dtis("D").Fdiffs({1}).Build(),
+ FrameDependencyTemplate().T(0).Dtis("S"),
+ FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({2}),
+ FrameDependencyTemplate().T(1).Dtis("D").FrameDiffs({1}),
};
// Send two tiny images, mapping to single RTP packets.
@@ -742,8 +742,6 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) {
}
TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) {
- test::ScopedFieldTrials trials("WebRTC-GenericDescriptor/Enabled/");
-
RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {});
test.router()->SetActive(true);
@@ -771,9 +769,9 @@ TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) {
codec_specific.template_structure.emplace();
codec_specific.template_structure->num_decode_targets = 1;
codec_specific.template_structure->templates = {
- GenericFrameInfo::Builder().T(0).Dtis("S").Build(),
- GenericFrameInfo::Builder().T(0).Dtis("S").Fdiffs({2}).Build(),
- GenericFrameInfo::Builder().T(1).Dtis("D").Fdiffs({1}).Build(),
+ FrameDependencyTemplate().T(0).Dtis("S"),
+ FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({2}),
+ FrameDependencyTemplate().T(1).Dtis("D").FrameDiffs({1}),
};
// Send two tiny images, mapping to single RTP packets.
diff --git a/call/simulated_network.cc b/call/simulated_network.cc
index b298fdb4e2..2ed9140fa5 100644
--- a/call/simulated_network.cc
+++ b/call/simulated_network.cc
@@ -87,7 +87,7 @@ SimulatedNetwork::SimulatedNetwork(Config config, uint64_t random_seed)
SimulatedNetwork::~SimulatedNetwork() = default;
void SimulatedNetwork::SetConfig(const Config& config) {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
config_state_.config = config; // Shallow copy of the struct.
double prob_loss = config.loss_percent / 100.0;
if (config_state_.config.avg_burst_loss_length == -1) {
@@ -113,12 +113,12 @@ void SimulatedNetwork::SetConfig(const Config& config) {
void SimulatedNetwork::UpdateConfig(
std::function<void(BuiltInNetworkBehaviorConfig*)> config_modifier) {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
config_modifier(&config_state_.config);
}
void SimulatedNetwork::PauseTransmissionUntil(int64_t until_us) {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
config_state_.pause_transmission_until_us = until_us;
}
@@ -260,7 +260,7 @@ void SimulatedNetwork::UpdateCapacityQueue(ConfigState state,
}
SimulatedNetwork::ConfigState SimulatedNetwork::GetConfigState() const {
- rtc::CritScope crit(&config_lock_);
+ MutexLock lock(&config_lock_);
return config_state_;
}
diff --git a/call/simulated_network.h b/call/simulated_network.h
index 2ff90ec284..b53ecc0ddb 100644
--- a/call/simulated_network.h
+++ b/call/simulated_network.h
@@ -20,9 +20,9 @@
#include "api/test/simulated_network.h"
#include "api/units/data_size.h"
#include "api/units/timestamp.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/random.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
@@ -96,7 +96,7 @@ class SimulatedNetwork : public SimulatedNetworkInterface {
RTC_RUN_ON(&process_checker_);
ConfigState GetConfigState() const;
- rtc::CriticalSection config_lock_;
+ mutable Mutex config_lock_;
// |process_checker_| guards the data structures involved in delay and loss
// processes, such as the packet queues.
diff --git a/call/ssrc_binding_observer.h b/call/ssrc_binding_observer.h
deleted file mode 100644
index ada505610f..0000000000
--- a/call/ssrc_binding_observer.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef CALL_SSRC_BINDING_OBSERVER_H_
-#define CALL_SSRC_BINDING_OBSERVER_H_
-
-#include <string>
-
-namespace webrtc {
-
-// With newer versions of SDP, SSRC is often not explicitly signaled and must
-// be learned on the fly. This happens by correlating packet SSRCs with included
-// RTP extension headers like MID and RSID, or by receiving information from
-// RTCP messages.
-// SsrcBindingObservers will be notified when a new binding is learned, which
-// can happen during call setup and/or during the call.
-class SsrcBindingObserver {
- public:
- virtual ~SsrcBindingObserver() = default;
-
- virtual void OnSsrcBoundToRsid(const std::string& rsid, uint32_t ssrc) {}
-
- virtual void OnSsrcBoundToMid(const std::string& mid, uint32_t ssrc) {}
-
- virtual void OnSsrcBoundToMidRsid(const std::string& mid,
- const std::string& rsid,
- uint32_t ssrc) {}
-
- virtual void OnSsrcBoundToPayloadType(uint8_t payload_type, uint32_t ssrc) {}
-};
-
-} // namespace webrtc
-
-#endif // CALL_SSRC_BINDING_OBSERVER_H_
diff --git a/call/test/mock_audio_send_stream.h b/call/test/mock_audio_send_stream.h
index 489e826d0e..4164dd550e 100644
--- a/call/test/mock_audio_send_stream.h
+++ b/call/test/mock_audio_send_stream.h
@@ -21,23 +21,26 @@ namespace test {
class MockAudioSendStream : public AudioSendStream {
public:
- MOCK_CONST_METHOD0(GetConfig, const webrtc::AudioSendStream::Config&());
- MOCK_METHOD1(Reconfigure, void(const Config& config));
- MOCK_METHOD0(Start, void());
- MOCK_METHOD0(Stop, void());
+ MOCK_METHOD(const webrtc::AudioSendStream::Config&,
+ GetConfig,
+ (),
+ (const, override));
+ MOCK_METHOD(void, Reconfigure, (const Config& config), (override));
+ MOCK_METHOD(void, Start, (), (override));
+ MOCK_METHOD(void, Stop, (), (override));
// GMock doesn't like move-only types, such as std::unique_ptr.
- virtual void SendAudioData(std::unique_ptr<webrtc::AudioFrame> audio_frame) {
+ void SendAudioData(std::unique_ptr<webrtc::AudioFrame> audio_frame) override {
SendAudioDataForMock(audio_frame.get());
}
- MOCK_METHOD1(SendAudioDataForMock, void(webrtc::AudioFrame* audio_frame));
- MOCK_METHOD4(SendTelephoneEvent,
- bool(int payload_type,
- int payload_frequency,
- int event,
- int duration_ms));
- MOCK_METHOD1(SetMuted, void(bool muted));
- MOCK_CONST_METHOD0(GetStats, Stats());
- MOCK_CONST_METHOD1(GetStats, Stats(bool has_remote_tracks));
+ MOCK_METHOD(void, SendAudioDataForMock, (webrtc::AudioFrame*));
+ MOCK_METHOD(
+ bool,
+ SendTelephoneEvent,
+ (int payload_type, int payload_frequency, int event, int duration_ms),
+ (override));
+ MOCK_METHOD(void, SetMuted, (bool muted), (override));
+ MOCK_METHOD(Stats, GetStats, (), (const, override));
+ MOCK_METHOD(Stats, GetStats, (bool has_remote_tracks), (const, override));
};
} // namespace test
} // namespace webrtc
diff --git a/call/test/mock_bitrate_allocator.h b/call/test/mock_bitrate_allocator.h
index f00ed79c59..b08916fe4f 100644
--- a/call/test/mock_bitrate_allocator.h
+++ b/call/test/mock_bitrate_allocator.h
@@ -18,10 +18,15 @@
namespace webrtc {
class MockBitrateAllocator : public BitrateAllocatorInterface {
public:
- MOCK_METHOD2(AddObserver,
- void(BitrateAllocatorObserver*, MediaStreamAllocationConfig));
- MOCK_METHOD1(RemoveObserver, void(BitrateAllocatorObserver*));
- MOCK_CONST_METHOD1(GetStartBitrate, int(BitrateAllocatorObserver*));
+ MOCK_METHOD(void,
+ AddObserver,
+ (BitrateAllocatorObserver*, MediaStreamAllocationConfig),
+ (override));
+ MOCK_METHOD(void, RemoveObserver, (BitrateAllocatorObserver*), (override));
+ MOCK_METHOD(int,
+ GetStartBitrate,
+ (BitrateAllocatorObserver*),
+ (const, override));
};
} // namespace webrtc
#endif // CALL_TEST_MOCK_BITRATE_ALLOCATOR_H_
diff --git a/call/test/mock_rtp_packet_sink_interface.h b/call/test/mock_rtp_packet_sink_interface.h
index adc804f941..e6d14f05c5 100644
--- a/call/test/mock_rtp_packet_sink_interface.h
+++ b/call/test/mock_rtp_packet_sink_interface.h
@@ -17,7 +17,7 @@ namespace webrtc {
class MockRtpPacketSink : public RtpPacketSinkInterface {
public:
- MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived&));
+ MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override));
};
} // namespace webrtc
diff --git a/call/test/mock_rtp_transport_controller_send.h b/call/test/mock_rtp_transport_controller_send.h
index afc8400f73..308c087a40 100644
--- a/call/test/mock_rtp_transport_controller_send.h
+++ b/call/test/mock_rtp_transport_controller_send.h
@@ -32,45 +32,73 @@ namespace webrtc {
class MockRtpTransportControllerSend
: public RtpTransportControllerSendInterface {
public:
- MOCK_METHOD10(
- CreateRtpVideoSender,
- RtpVideoSenderInterface*(std::map<uint32_t, RtpState>,
- const std::map<uint32_t, RtpPayloadState>&,
- const RtpConfig&,
- int rtcp_report_interval_ms,
- Transport*,
- const RtpSenderObservers&,
- RtcEventLog*,
- std::unique_ptr<FecController>,
- const RtpSenderFrameEncryptionConfig&,
- rtc::scoped_refptr<FrameTransformerInterface>));
- MOCK_METHOD1(DestroyRtpVideoSender, void(RtpVideoSenderInterface*));
- MOCK_METHOD0(GetWorkerQueue, rtc::TaskQueue*());
- MOCK_METHOD0(packet_router, PacketRouter*());
- MOCK_METHOD0(network_state_estimate_observer,
- NetworkStateEstimateObserver*());
- MOCK_METHOD0(transport_feedback_observer, TransportFeedbackObserver*());
- MOCK_METHOD0(packet_sender, RtpPacketSender*());
- MOCK_METHOD1(SetAllocatedSendBitrateLimits, void(BitrateAllocationLimits));
- MOCK_METHOD1(SetPacingFactor, void(float));
- MOCK_METHOD1(SetQueueTimeLimit, void(int));
- MOCK_METHOD0(GetStreamFeedbackProvider, StreamFeedbackProvider*());
- MOCK_METHOD1(RegisterTargetTransferRateObserver,
- void(TargetTransferRateObserver*));
- MOCK_METHOD2(OnNetworkRouteChanged,
- void(const std::string&, const rtc::NetworkRoute&));
- MOCK_METHOD1(OnNetworkAvailability, void(bool));
- MOCK_METHOD0(GetBandwidthObserver, RtcpBandwidthObserver*());
- MOCK_CONST_METHOD0(GetPacerQueuingDelayMs, int64_t());
- MOCK_CONST_METHOD0(GetFirstPacketTime, absl::optional<Timestamp>());
- MOCK_METHOD1(EnablePeriodicAlrProbing, void(bool));
- MOCK_METHOD1(OnSentPacket, void(const rtc::SentPacket&));
- MOCK_METHOD1(SetSdpBitrateParameters, void(const BitrateConstraints&));
- MOCK_METHOD1(SetClientBitratePreferences, void(const BitrateSettings&));
- MOCK_METHOD1(OnTransportOverheadChanged, void(size_t));
- MOCK_METHOD1(AccountForAudioPacketsInPacedSender, void(bool));
- MOCK_METHOD0(IncludeOverheadInPacedSender, void());
- MOCK_METHOD1(OnReceivedPacket, void(const ReceivedPacket&));
+ MOCK_METHOD(RtpVideoSenderInterface*,
+ CreateRtpVideoSender,
+ ((std::map<uint32_t, RtpState>),
+ (const std::map<uint32_t, RtpPayloadState>&),
+ const RtpConfig&,
+ int rtcp_report_interval_ms,
+ Transport*,
+ const RtpSenderObservers&,
+ RtcEventLog*,
+ std::unique_ptr<FecController>,
+ const RtpSenderFrameEncryptionConfig&,
+ rtc::scoped_refptr<FrameTransformerInterface>),
+ (override));
+ MOCK_METHOD(void,
+ DestroyRtpVideoSender,
+ (RtpVideoSenderInterface*),
+ (override));
+ MOCK_METHOD(rtc::TaskQueue*, GetWorkerQueue, (), (override));
+ MOCK_METHOD(PacketRouter*, packet_router, (), (override));
+ MOCK_METHOD(NetworkStateEstimateObserver*,
+ network_state_estimate_observer,
+ (),
+ (override));
+ MOCK_METHOD(TransportFeedbackObserver*,
+ transport_feedback_observer,
+ (),
+ (override));
+ MOCK_METHOD(RtpPacketSender*, packet_sender, (), (override));
+ MOCK_METHOD(void,
+ SetAllocatedSendBitrateLimits,
+ (BitrateAllocationLimits),
+ (override));
+ MOCK_METHOD(void, SetPacingFactor, (float), (override));
+ MOCK_METHOD(void, SetQueueTimeLimit, (int), (override));
+ MOCK_METHOD(StreamFeedbackProvider*,
+ GetStreamFeedbackProvider,
+ (),
+ (override));
+ MOCK_METHOD(void,
+ RegisterTargetTransferRateObserver,
+ (TargetTransferRateObserver*),
+ (override));
+ MOCK_METHOD(void,
+ OnNetworkRouteChanged,
+ (const std::string&, const rtc::NetworkRoute&),
+ (override));
+ MOCK_METHOD(void, OnNetworkAvailability, (bool), (override));
+ MOCK_METHOD(RtcpBandwidthObserver*, GetBandwidthObserver, (), (override));
+ MOCK_METHOD(int64_t, GetPacerQueuingDelayMs, (), (const, override));
+ MOCK_METHOD(absl::optional<Timestamp>,
+ GetFirstPacketTime,
+ (),
+ (const, override));
+ MOCK_METHOD(void, EnablePeriodicAlrProbing, (bool), (override));
+ MOCK_METHOD(void, OnSentPacket, (const rtc::SentPacket&), (override));
+ MOCK_METHOD(void,
+ SetSdpBitrateParameters,
+ (const BitrateConstraints&),
+ (override));
+ MOCK_METHOD(void,
+ SetClientBitratePreferences,
+ (const BitrateSettings&),
+ (override));
+ MOCK_METHOD(void, OnTransportOverheadChanged, (size_t), (override));
+ MOCK_METHOD(void, AccountForAudioPacketsInPacedSender, (bool), (override));
+ MOCK_METHOD(void, IncludeOverheadInPacedSender, (), (override));
+ MOCK_METHOD(void, OnReceivedPacket, (const ReceivedPacket&), (override));
};
} // namespace webrtc
#endif // CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_
diff --git a/call/video_send_stream.h b/call/video_send_stream.h
index 392c955f47..715d5d73e7 100644
--- a/call/video_send_stream.h
+++ b/call/video_send_stream.h
@@ -18,10 +18,12 @@
#include <vector>
#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
#include "api/call/transport.h"
#include "api/crypto/crypto_options.h"
#include "api/frame_transformer_interface.h"
#include "api/rtp_parameters.h"
+#include "api/scoped_refptr.h"
#include "api/video/video_content_type.h"
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
@@ -215,6 +217,15 @@ class VideoSendStream {
// When a stream is stopped, it can't receive, process or deliver packets.
virtual void Stop() = 0;
+ // If the resource is overusing, the VideoSendStream will try to reduce
+ // resolution or frame rate until no resource is overusing.
+ // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor
+ // is moved to Call this method could be deleted altogether in favor of
+ // Call-level APIs only.
+ virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) = 0;
+ virtual std::vector<rtc::scoped_refptr<Resource>>
+ GetAdaptationResources() = 0;
+
virtual void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) = 0;
diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn
index 72eed1f003..4077486d87 100644
--- a/common_audio/BUILD.gn
+++ b/common_audio/BUILD.gn
@@ -56,8 +56,8 @@ rtc_library("common_audio") {
"../system_wrappers",
"../system_wrappers:cpu_features_api",
"third_party/ooura:fft_size_256",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
diff --git a/common_audio/OWNERS.webrtc b/common_audio/OWNERS.webrtc
index 7c9c9af12a..ba1c8b11f4 100644
--- a/common_audio/OWNERS.webrtc
+++ b/common_audio/OWNERS.webrtc
@@ -1,2 +1,3 @@
henrik.lundin@webrtc.org
kwiberg@webrtc.org
+peah@webrtc.org
diff --git a/common_audio/channel_buffer_unittest.cc b/common_audio/channel_buffer_unittest.cc
index 8ec42346d1..a8b64891d6 100644
--- a/common_audio/channel_buffer_unittest.cc
+++ b/common_audio/channel_buffer_unittest.cc
@@ -53,12 +53,12 @@ TEST(IFChannelBufferTest, SettingNumChannelsOfOneChannelBufferSetsTheOther) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(ChannelBufferTest, SetNumChannelsDeathTest) {
+TEST(ChannelBufferDeathTest, SetNumChannelsDeathTest) {
ChannelBuffer<float> chb(kNumFrames, kMono);
RTC_EXPECT_DEATH(chb.set_num_channels(kStereo), "num_channels");
}
-TEST(IFChannelBufferTest, SetNumChannelsDeathTest) {
+TEST(IFChannelBufferDeathTest, SetNumChannelsDeathTest) {
IFChannelBuffer ifchb(kNumFrames, kMono);
RTC_EXPECT_DEATH(ifchb.ibuf()->set_num_channels(kStereo), "num_channels");
}
diff --git a/common_audio/mocks/mock_smoothing_filter.h b/common_audio/mocks/mock_smoothing_filter.h
index 712049fa6a..9df49dd11a 100644
--- a/common_audio/mocks/mock_smoothing_filter.h
+++ b/common_audio/mocks/mock_smoothing_filter.h
@@ -18,9 +18,9 @@ namespace webrtc {
class MockSmoothingFilter : public SmoothingFilter {
public:
- MOCK_METHOD1(AddSample, void(float));
- MOCK_METHOD0(GetAverage, absl::optional<float>());
- MOCK_METHOD1(SetTimeConstantMs, bool(int));
+ MOCK_METHOD(void, AddSample, (float), (override));
+ MOCK_METHOD(absl::optional<float>, GetAverage, (), (override));
+ MOCK_METHOD(bool, SetTimeConstantMs, (int), (override));
};
} // namespace webrtc
diff --git a/common_audio/resampler/include/resampler.h b/common_audio/resampler/include/resampler.h
index 04c487b331..41940f9a12 100644
--- a/common_audio/resampler/include/resampler.h
+++ b/common_audio/resampler/include/resampler.h
@@ -90,8 +90,8 @@ class Resampler {
size_t num_channels_;
// Extra instance for stereo
- Resampler* slave_left_;
- Resampler* slave_right_;
+ Resampler* helper_left_;
+ Resampler* helper_right_;
};
} // namespace webrtc
diff --git a/common_audio/resampler/push_resampler_unittest.cc b/common_audio/resampler/push_resampler_unittest.cc
index 61b9725b3a..4724833fbb 100644
--- a/common_audio/resampler/push_resampler_unittest.cc
+++ b/common_audio/resampler/push_resampler_unittest.cc
@@ -31,19 +31,19 @@ TEST(PushResamplerTest, VerifiesInputParameters) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(PushResamplerTest, VerifiesBadInputParameters1) {
+TEST(PushResamplerDeathTest, VerifiesBadInputParameters1) {
PushResampler<int16_t> resampler;
RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(-1, 16000, 1),
"src_sample_rate_hz");
}
-TEST(PushResamplerTest, VerifiesBadInputParameters2) {
+TEST(PushResamplerDeathTest, VerifiesBadInputParameters2) {
PushResampler<int16_t> resampler;
RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, -1, 1),
"dst_sample_rate_hz");
}
-TEST(PushResamplerTest, VerifiesBadInputParameters3) {
+TEST(PushResamplerDeathTest, VerifiesBadInputParameters3) {
PushResampler<int16_t> resampler;
RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, 16000, 0),
"num_channels");
diff --git a/common_audio/resampler/resampler.cc b/common_audio/resampler/resampler.cc
index ce38ef56de..ccfed5a014 100644
--- a/common_audio/resampler/resampler.cc
+++ b/common_audio/resampler/resampler.cc
@@ -37,8 +37,8 @@ Resampler::Resampler()
my_out_frequency_khz_(0),
my_mode_(kResamplerMode1To1),
num_channels_(0),
- slave_left_(nullptr),
- slave_right_(nullptr) {}
+ helper_left_(nullptr),
+ helper_right_(nullptr) {}
Resampler::Resampler(int inFreq, int outFreq, size_t num_channels)
: Resampler() {
@@ -61,11 +61,11 @@ Resampler::~Resampler() {
if (out_buffer_) {
free(out_buffer_);
}
- if (slave_left_) {
- delete slave_left_;
+ if (helper_left_) {
+ delete helper_left_;
}
- if (slave_right_) {
- delete slave_right_;
+ if (helper_right_) {
+ delete helper_right_;
}
}
@@ -120,13 +120,13 @@ int Resampler::Reset(int inFreq, int outFreq, size_t num_channels) {
free(out_buffer_);
out_buffer_ = nullptr;
}
- if (slave_left_) {
- delete slave_left_;
- slave_left_ = nullptr;
+ if (helper_left_) {
+ delete helper_left_;
+ helper_left_ = nullptr;
}
- if (slave_right_) {
- delete slave_right_;
- slave_right_ = nullptr;
+ if (helper_right_) {
+ delete helper_right_;
+ helper_right_ = nullptr;
}
in_buffer_size_ = 0;
@@ -140,8 +140,8 @@ int Resampler::Reset(int inFreq, int outFreq, size_t num_channels) {
if (num_channels_ == 2) {
// Create two mono resamplers.
- slave_left_ = new Resampler(inFreq, outFreq, 1);
- slave_right_ = new Resampler(inFreq, outFreq, 1);
+ helper_left_ = new Resampler(inFreq, outFreq, 1);
+ helper_right_ = new Resampler(inFreq, outFreq, 1);
}
// Now create the states we need.
@@ -401,7 +401,7 @@ int Resampler::Push(const int16_t* samplesIn,
size_t maxLen,
size_t& outLen) {
if (num_channels_ == 2) {
- // Split up the signal and call the slave object for each channel
+ // Split up the signal and call the helper object for each channel
int16_t* left =
static_cast<int16_t*>(malloc(lengthIn * sizeof(int16_t) / 2));
int16_t* right =
@@ -422,10 +422,10 @@ int Resampler::Push(const int16_t* samplesIn,
size_t actualOutLen_left = 0;
size_t actualOutLen_right = 0;
// Do resampling for right channel
- res |= slave_left_->Push(left, lengthIn, out_left, maxLen / 2,
- actualOutLen_left);
- res |= slave_right_->Push(right, lengthIn, out_right, maxLen / 2,
- actualOutLen_right);
+ res |= helper_left_->Push(left, lengthIn, out_left, maxLen / 2,
+ actualOutLen_left);
+ res |= helper_right_->Push(right, lengthIn, out_right, maxLen / 2,
+ actualOutLen_right);
if (res || (actualOutLen_left != actualOutLen_right)) {
free(left);
free(right);
diff --git a/common_audio/resampler/sinc_resampler_unittest.cc b/common_audio/resampler/sinc_resampler_unittest.cc
index 7bcd7f146e..b067b23b88 100644
--- a/common_audio/resampler/sinc_resampler_unittest.cc
+++ b/common_audio/resampler/sinc_resampler_unittest.cc
@@ -40,7 +40,7 @@ static const double kKernelInterpolationFactor = 0.5;
// Helper class to ensure ChunkedResample() functions properly.
class MockSource : public SincResamplerCallback {
public:
- MOCK_METHOD2(Run, void(size_t frames, float* destination));
+ MOCK_METHOD(void, Run, (size_t frames, float* destination), (override));
};
ACTION(ClearBuffer) {
diff --git a/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc b/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc
index 2918374bba..6b6d6f1fd7 100644
--- a/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc
+++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc
@@ -313,6 +313,14 @@ static void rftbsub_128_C(float* a) {
} // namespace
+OouraFft::OouraFft(bool sse2_available) {
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+ use_sse2_ = sse2_available;
+#else
+ use_sse2_ = false;
+#endif
+}
+
OouraFft::OouraFft() {
#if defined(WEBRTC_ARCH_X86_FAMILY)
use_sse2_ = (WebRtc_GetCPUInfo(kSSE2) != 0);
diff --git a/common_audio/third_party/ooura/fft_size_128/ooura_fft.h b/common_audio/third_party/ooura/fft_size_128/ooura_fft.h
index 0cdd6aa66f..8273dfe58e 100644
--- a/common_audio/third_party/ooura/fft_size_128/ooura_fft.h
+++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft.h
@@ -38,6 +38,10 @@ void rftbsub_128_neon(float* a);
class OouraFft {
public:
+ // Ctor allowing the availability of SSE2 support to be specified.
+ explicit OouraFft(bool sse2_available);
+
+ // Deprecated: This Ctor will soon be removed.
OouraFft();
~OouraFft();
void Fft(float* a) const;
diff --git a/common_audio/vad/mock/mock_vad.h b/common_audio/vad/mock/mock_vad.h
index afe80ef5e1..5a554ce1f9 100644
--- a/common_audio/vad/mock/mock_vad.h
+++ b/common_audio/vad/mock/mock_vad.h
@@ -18,14 +18,14 @@ namespace webrtc {
class MockVad : public Vad {
public:
- virtual ~MockVad() { Die(); }
- MOCK_METHOD0(Die, void());
+ ~MockVad() override { Die(); }
+ MOCK_METHOD(void, Die, ());
- MOCK_METHOD3(VoiceActivity,
- enum Activity(const int16_t* audio,
- size_t num_samples,
- int sample_rate_hz));
- MOCK_METHOD0(Reset, void());
+ MOCK_METHOD(enum Activity,
+ VoiceActivity,
+ (const int16_t* audio, size_t num_samples, int sample_rate_hz),
+ (override));
+ MOCK_METHOD(void, Reset, (), (override));
};
} // namespace webrtc
diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn
index ddf4c2d495..8c25eb0953 100644
--- a/common_video/BUILD.gn
+++ b/common_video/BUILD.gn
@@ -58,11 +58,12 @@ rtc_library("common_video") {
"../rtc_base:checks",
"../rtc_base:rtc_task_queue",
"../rtc_base:safe_minmax",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/system:rtc_export",
"../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -113,6 +114,7 @@ if (rtc_include_tests) {
"../test:test_support",
"../test:video_test_common",
"//testing/gtest",
+ "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
diff --git a/common_video/bitrate_adjuster.cc b/common_video/bitrate_adjuster.cc
index ca52ed9e69..c53c3a02f6 100644
--- a/common_video/bitrate_adjuster.cc
+++ b/common_video/bitrate_adjuster.cc
@@ -39,7 +39,7 @@ BitrateAdjuster::BitrateAdjuster(float min_adjusted_bitrate_pct,
}
void BitrateAdjuster::SetTargetBitrateBps(uint32_t bitrate_bps) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
// If the change in target bitrate is large, update the adjusted bitrate
// immediately since it's likely we have gained or lost a sizeable amount of
// bandwidth and we'll want to respond quickly.
@@ -58,22 +58,22 @@ void BitrateAdjuster::SetTargetBitrateBps(uint32_t bitrate_bps) {
}
uint32_t BitrateAdjuster::GetTargetBitrateBps() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return target_bitrate_bps_;
}
uint32_t BitrateAdjuster::GetAdjustedBitrateBps() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return adjusted_bitrate_bps_;
}
absl::optional<uint32_t> BitrateAdjuster::GetEstimatedBitrateBps() {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return bitrate_tracker_.Rate(rtc::TimeMillis());
}
void BitrateAdjuster::Update(size_t frame_size) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
uint32_t current_time_ms = rtc::TimeMillis();
bitrate_tracker_.Update(frame_size, current_time_ms);
UpdateBitrate(current_time_ms);
@@ -100,7 +100,7 @@ uint32_t BitrateAdjuster::GetMaxAdjustedBitrateBps() const {
// Only safe to call this after Update calls have stopped
void BitrateAdjuster::Reset() {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
target_bitrate_bps_ = 0;
adjusted_bitrate_bps_ = 0;
last_adjusted_target_bitrate_bps_ = 0;
diff --git a/common_video/generic_frame_descriptor/BUILD.gn b/common_video/generic_frame_descriptor/BUILD.gn
index 05a4e2396c..ab97e887f2 100644
--- a/common_video/generic_frame_descriptor/BUILD.gn
+++ b/common_video/generic_frame_descriptor/BUILD.gn
@@ -19,6 +19,8 @@ rtc_library("generic_frame_descriptor") {
"../../api/transport/rtp:dependency_descriptor",
"../../api/video:video_codec_constants",
"../../rtc_base:checks",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
diff --git a/common_video/generic_frame_descriptor/generic_frame_info.cc b/common_video/generic_frame_descriptor/generic_frame_info.cc
index ca61072799..af66bbaf67 100644
--- a/common_video/generic_frame_descriptor/generic_frame_info.cc
+++ b/common_video/generic_frame_descriptor/generic_frame_info.cc
@@ -15,33 +15,6 @@
namespace webrtc {
-absl::InlinedVector<DecodeTargetIndication, 10>
-GenericFrameInfo::DecodeTargetInfo(absl::string_view indication_symbols) {
- absl::InlinedVector<DecodeTargetIndication, 10> decode_targets;
- for (char symbol : indication_symbols) {
- DecodeTargetIndication indication;
- switch (symbol) {
- case '-':
- indication = DecodeTargetIndication::kNotPresent;
- break;
- case 'D':
- indication = DecodeTargetIndication::kDiscardable;
- break;
- case 'R':
- indication = DecodeTargetIndication::kRequired;
- break;
- case 'S':
- indication = DecodeTargetIndication::kSwitch;
- break;
- default:
- RTC_NOTREACHED();
- }
- decode_targets.push_back(indication);
- }
-
- return decode_targets;
-}
-
GenericFrameInfo::GenericFrameInfo() = default;
GenericFrameInfo::GenericFrameInfo(const GenericFrameInfo&) = default;
GenericFrameInfo::~GenericFrameInfo() = default;
@@ -65,14 +38,8 @@ GenericFrameInfo::Builder& GenericFrameInfo::Builder::S(int spatial_id) {
GenericFrameInfo::Builder& GenericFrameInfo::Builder::Dtis(
absl::string_view indication_symbols) {
- info_.decode_target_indications = DecodeTargetInfo(indication_symbols);
- return *this;
-}
-
-GenericFrameInfo::Builder& GenericFrameInfo::Builder::Fdiffs(
- std::initializer_list<int> frame_diffs) {
- info_.frame_diffs.insert(info_.frame_diffs.end(), frame_diffs.begin(),
- frame_diffs.end());
+ info_.decode_target_indications =
+ webrtc_impl::StringToDecodeTargetIndications(indication_symbols);
return *this;
}
diff --git a/common_video/generic_frame_descriptor/generic_frame_info.h b/common_video/generic_frame_descriptor/generic_frame_info.h
index b602ee06a6..19f413b5d4 100644
--- a/common_video/generic_frame_descriptor/generic_frame_info.h
+++ b/common_video/generic_frame_descriptor/generic_frame_info.h
@@ -11,7 +11,9 @@
#ifndef COMMON_VIDEO_GENERIC_FRAME_DESCRIPTOR_GENERIC_FRAME_INFO_H_
#define COMMON_VIDEO_GENERIC_FRAME_DESCRIPTOR_GENERIC_FRAME_INFO_H_
+#include <bitset>
#include <initializer_list>
+#include <vector>
#include "absl/container/inlined_vector.h"
#include "absl/strings/string_view.h"
@@ -31,17 +33,15 @@ struct CodecBufferUsage {
};
struct GenericFrameInfo : public FrameDependencyTemplate {
- static absl::InlinedVector<DecodeTargetIndication, 10> DecodeTargetInfo(
- absl::string_view indication_symbols);
-
class Builder;
GenericFrameInfo();
GenericFrameInfo(const GenericFrameInfo&);
~GenericFrameInfo();
- int64_t frame_id = 0;
absl::InlinedVector<CodecBufferUsage, kMaxEncoderBuffers> encoder_buffers;
+ std::vector<bool> part_of_chain;
+ std::bitset<32> active_decode_targets = ~uint32_t{0};
};
class GenericFrameInfo::Builder {
@@ -53,7 +53,6 @@ class GenericFrameInfo::Builder {
Builder& T(int temporal_id);
Builder& S(int spatial_id);
Builder& Dtis(absl::string_view indication_symbols);
- Builder& Fdiffs(std::initializer_list<int> frame_diffs);
private:
GenericFrameInfo info_;
diff --git a/common_video/include/bitrate_adjuster.h b/common_video/include/bitrate_adjuster.h
index aea1872216..4b208307a1 100644
--- a/common_video/include/bitrate_adjuster.h
+++ b/common_video/include/bitrate_adjuster.h
@@ -15,8 +15,8 @@
#include <stdint.h>
#include "absl/types/optional.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/rtc_export.h"
#include "rtc_base/thread_annotations.h"
@@ -60,29 +60,31 @@ class RTC_EXPORT BitrateAdjuster {
bool IsWithinTolerance(uint32_t bitrate_bps, uint32_t target_bitrate_bps);
// Returns smallest possible adjusted value.
- uint32_t GetMinAdjustedBitrateBps() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ uint32_t GetMinAdjustedBitrateBps() const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Returns largest possible adjusted value.
- uint32_t GetMaxAdjustedBitrateBps() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ uint32_t GetMaxAdjustedBitrateBps() const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void Reset();
void UpdateBitrate(uint32_t current_time_ms)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
const float min_adjusted_bitrate_pct_;
const float max_adjusted_bitrate_pct_;
// The bitrate we want.
- volatile uint32_t target_bitrate_bps_ RTC_GUARDED_BY(crit_);
+ volatile uint32_t target_bitrate_bps_ RTC_GUARDED_BY(mutex_);
// The bitrate we use to get what we want.
- volatile uint32_t adjusted_bitrate_bps_ RTC_GUARDED_BY(crit_);
+ volatile uint32_t adjusted_bitrate_bps_ RTC_GUARDED_BY(mutex_);
// The target bitrate that the adjusted bitrate was computed from.
- volatile uint32_t last_adjusted_target_bitrate_bps_ RTC_GUARDED_BY(crit_);
+ volatile uint32_t last_adjusted_target_bitrate_bps_ RTC_GUARDED_BY(mutex_);
// Used to estimate bitrate.
- RateStatistics bitrate_tracker_ RTC_GUARDED_BY(crit_);
+ RateStatistics bitrate_tracker_ RTC_GUARDED_BY(mutex_);
// The last time we tried to adjust the bitrate.
- uint32_t last_bitrate_update_time_ms_ RTC_GUARDED_BY(crit_);
+ uint32_t last_bitrate_update_time_ms_ RTC_GUARDED_BY(mutex_);
// The number of frames since the last time we tried to adjust the bitrate.
- uint32_t frames_since_last_update_ RTC_GUARDED_BY(crit_);
+ uint32_t frames_since_last_update_ RTC_GUARDED_BY(mutex_);
};
} // namespace webrtc
diff --git a/common_video/video_frame_buffer.cc b/common_video/video_frame_buffer.cc
index 823c5ad7a1..a13548f95e 100644
--- a/common_video/video_frame_buffer.cc
+++ b/common_video/video_frame_buffer.cc
@@ -187,11 +187,15 @@ class I010BufferBase : public I010BufferInterface {
rtc::scoped_refptr<I420BufferInterface> I010BufferBase::ToI420() {
rtc::scoped_refptr<I420Buffer> i420_buffer =
I420Buffer::Create(width(), height());
+#if 0
libyuv::I010ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
i420_buffer->MutableDataV(), i420_buffer->StrideV(),
width(), height());
+#else
+ abort();
+#endif
return i420_buffer;
}
diff --git a/docs/faq.md b/docs/faq.md
index ed9143812a..9f31f31ee4 100644
--- a/docs/faq.md
+++ b/docs/faq.md
@@ -231,7 +231,7 @@ Yes, you still have the right to redistribute and you still have a patent
license for Google's patents that cover the code that Google released.
-### What if my competitor uses the code and brings patent litigation against me for something unrelated to the code. Does he or she still have a patent license?
+### What if my competitor uses the code and brings patent litigation against me for something unrelated to the code. Do they still have a patent license?
-Yes, he/she still has the right to redistribute and he/she still has a patent
+Yes, they still have the right to redistribute and they still have a patent
license for Google's patents that cover the code that Google released.
diff --git a/docs/native-code/android/index.md b/docs/native-code/android/index.md
index 07491fdc29..82078210d1 100644
--- a/docs/native-code/android/index.md
+++ b/docs/native-code/android/index.md
@@ -23,6 +23,7 @@ build config.
See [Development][webrtc-development] for instructions on how to update
the code, building etc.
+
## Compiling
1. Generate projects using GN.
@@ -44,9 +45,12 @@ to enable managing multiple configurations in parallel.
2. Compile using:
```
-$ ninja -C out/Debug
+$ autoninja -C out/Debug
```
+(To list all available targets, run `autoninja -C out/Debug -t targets all`.)
+
+
## Using the Bundled Android SDK/NDK
In order to use the Android SDK and NDK that is bundled in
@@ -59,6 +63,7 @@ $ . build/android/envsetup.sh
Then you'll have `adb` and all the other Android tools in your `PATH`.
+
## Running the AppRTCMobile App
AppRTCMobile is an Android application using WebRTC Native APIs via JNI (JNI
@@ -77,7 +82,7 @@ https://bugs.webrtc.org/9282*
generating the build files using GN):
```
-$ ninja -C out/Debug AppRTCMobile
+$ autoninja -C out/Debug AppRTCMobile
```
2. Generate the project files:
@@ -97,51 +102,54 @@ Android Studio's SDK. When asked whether to use the Gradle wrapper, press
AppRTCMobile should now start on the device.
If you do any changes to the C++ code, you have to compile the project using
-ninja after the changes (see step 1).
+autoninja after the changes (see step 1).
*Note: Only "arm" is supported as the target_cpu when using Android Studio. This
still allows you to run the application on 64-bit ARM devices. x86-based devices
are not supported right now.*
-## Running WebRTC Native Tests on an Android Device
+## Running Tests on an Android Device
To build APKs with the WebRTC native tests, follow these instructions.
-1. Ensure you have an Android device set in Developer mode connected via
-USB.
+1. Ensure you have an Android device set in Developer mode connected via USB.
+
+2. Compile unit tests and/or instrumentation tests:
+
+```
+$ autoninja -C out/Debug android_instrumentation_test_apk
+$ autoninja -C out/Debug rtc_unittests
+```
-2. Compile as described in the section above.
+3. You can find the generated test binaries in `out/Debug/bin`. To run instrumentation tests:
-3. To see which tests are available: look in `out/Debug/bin`.
+```
+$ out/Debug/bin/run_android_instrumentation_test_apk -v
+```
-4. Run a test on your device:
+To run unit tests:
```
-$ out/Debug/bin/run_modules_unittests
+$ out/Debug/bin/run_rtc_unittests -v
```
-5. If you want to limit to a subset of tests, use the `--gtest_filter flag`, e.g.
+Show verbose output with `-v` and filter tests with `--gtest-filter=SomeTest.*`. For example:
```
-$ out/Debug/bin/run_modules_unittests \
- --gtest_filter=RtpRtcpAPITest.SSRC:RtpRtcpRtcpTest.*
+$ out/Debug/bin/run_android_instrumentation_test_apk -v \
+ --gtest_filter=VideoFrameBufferTest.*
```
-6. **NOTICE:** The first time you run a test, you must accept a dialog on
+For a full list of command line arguments, use `--help`.
+
+5. **NOTICE:** The first time you run a test, you must accept a dialog on
the device!
If want to run Release builds instead; pass `is_debug=false` to GN (and
preferably generate the projects files into a directory like `out/Release`).
Then use the scripts generated in `out/Release/bin` instead.
-
-## Running WebRTC Instrumentation Tests on an Android Device
-
-The instrumentation tests (like AppRTCMobileTest and
-libjingle_peerconnection_android_unittest) gets scripts generated in the same
-location as the native tests described in the previous section.
-
[webrtc-prerequisite-sw]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/development/prerequisite-sw/index.md
[webrtc-jni-doc]: https://webrtc.googlesource.com/src/+/master/sdk/android/README
[apprtc-doc]: https://webrtc.googlesource.com/src/+/master/examples/androidapp/README
diff --git a/examples/BUILD.gn b/examples/BUILD.gn
index 4d6d14d0d9..f0c5fa8be1 100644
--- a/examples/BUILD.gn
+++ b/examples/BUILD.gn
@@ -27,6 +27,7 @@ group("examples") {
":AppRTCMobile",
":AppRTCMobile_test_apk",
":libwebrtc_unity",
+ "androidvoip",
]
# TODO(sakal): We include some code from the tests. Remove this dependency
@@ -87,7 +88,7 @@ if (is_android) {
testonly = true
apk_name = "AppRTCMobile"
android_manifest = "androidapp/AndroidManifest.xml"
- min_sdk_version = 16
+ min_sdk_version = 21
target_sdk_version = 29
deps = [
@@ -101,7 +102,7 @@ if (is_android) {
rtc_android_library("AppRTCMobile_javalib") {
testonly = true
- android_manifest_for_lint = "androidapp/AndroidManifest.xml"
+ android_manifest = "androidapp/AndroidManifest.xml"
sources = [
"androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java",
@@ -180,10 +181,10 @@ if (is_android) {
"androidapp/res/layout/fragment_call.xml",
"androidapp/res/layout/fragment_hud.xml",
"androidapp/res/menu/connect_menu.xml",
- "androidapp/res/values/arrays.xml",
- "androidapp/res/values/strings.xml",
"androidapp/res/values-v17/styles.xml",
"androidapp/res/values-v21/styles.xml",
+ "androidapp/res/values/arrays.xml",
+ "androidapp/res/values/strings.xml",
"androidapp/res/xml/preferences.xml",
]
custom_package = "org.appspot.apprtc"
@@ -196,7 +197,7 @@ if (is_android) {
rtc_instrumentation_test_apk("AppRTCMobile_test_apk") {
apk_name = "AppRTCMobileTest"
android_manifest = "androidtests/AndroidManifest.xml"
- min_sdk_version = 16
+ min_sdk_version = 21
target_sdk_version = 21
sources = [
@@ -207,7 +208,11 @@ if (is_android) {
deps = [
":AppRTCMobile_javalib",
+ "../sdk/android:base_java",
+ "../sdk/android:camera_java",
"../sdk/android:libjingle_peerconnection_java",
+ "../sdk/android:peerconnection_java",
+ "../sdk/android:video_api_java",
"../sdk/android:video_java",
"//third_party/android_support_test_runner:runner_java",
"//third_party/junit",
@@ -303,7 +308,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
} else {
deps += [ "../sdk:mac_framework_objc+link" ]
}
- libs = [
+ frameworks = [
"CoreMedia.framework",
"QuartzCore.framework",
]
@@ -345,7 +350,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
"../sdk:ios_framework_bundle",
]
- libs = [ "AVFoundation.framework" ]
+ frameworks = [ "AVFoundation.framework" ]
}
ios_app_bundle("AppRTCMobile") {
@@ -379,14 +384,18 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
if (rtc_apprtcmobile_broadcast_extension) {
bundle_data("AppRTCMobileBroadcastUpload_extension_bundle") {
testonly = true
- public_deps = [ ":AppRTCMobileBroadcastUpload" ] # no-presubmit-check TODO(webrtc:8603)
+ public_deps = [ # no-presubmit-check TODO(webrtc:8603)
+ ":AppRTCMobileBroadcastUpload", # prevent code format
+ ]
sources = [ "$root_out_dir/AppRTCMobileBroadcastUpload.appex" ]
outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ]
}
bundle_data("AppRTCMobileBroadcastSetupUI_extension_bundle") {
testonly = true
- public_deps = [ ":AppRTCMobileBroadcastSetupUI" ] # no-presubmit-check TODO(webrtc:8603)
+ public_deps = [ # no-presubmit-check TODO(webrtc:8603)
+ ":AppRTCMobileBroadcastSetupUI", # prevent code format
+ ]
sources = [ "$root_out_dir/AppRTCMobileBroadcastSetupUI.appex" ]
outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ]
}
@@ -404,7 +413,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
"../sdk:ios_framework_bundle",
]
- libs = [ "ReplayKit.framework" ]
+ frameworks = [ "ReplayKit.framework" ]
}
ios_appex_bundle("AppRTCMobileBroadcastUpload") {
@@ -428,7 +437,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
info_plist = "objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist"
- libs = [ "ReplayKit.framework" ]
+ frameworks = [ "ReplayKit.framework" ]
deps = [ ":AppRTCMobile_ios_bundle_data" ]
}
@@ -484,6 +493,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
"../modules/audio_processing:api",
"../pc:libjingle_peerconnection",
"../rtc_base",
+ "../rtc_base/synchronization:mutex",
"../sdk:base_objc",
"../sdk:default_codec_factory_objc",
"../sdk:helpers_objc",
@@ -542,7 +552,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
info_plist = "objc/AppRTCMobile/mac/Info.plist"
- libs = [ "AppKit.framework" ]
+ frameworks = [ "AppKit.framework" ]
ldflags = [
"-rpath",
@@ -587,10 +597,10 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
configs += [ ":socketrocket_warning_config" ]
public_configs = [ ":socketrocket_include_config" ]
- libs = [
+ libs = [ "icucore" ]
+ frameworks = [
"CFNetwork.framework",
"Security.framework",
- "icucore",
]
}
@@ -829,6 +839,7 @@ if (is_android) {
"../sdk/android:camera_java",
"../sdk/android:libjingle_peerconnection_java",
"../sdk/android:peerconnection_java",
+ "../sdk/android:video_api_java",
"../sdk/android:video_java",
"//third_party/android_deps:com_android_support_support_annotations_java",
]
@@ -859,6 +870,7 @@ if (is_android) {
deps = [
":AppRTCMobile_javalib",
+ "../sdk/android:peerconnection_java",
"//base:base_java_test_support",
"//third_party/google-truth:google_truth_java",
]
diff --git a/examples/aarproject/app/build.gradle b/examples/aarproject/app/build.gradle
index dde0707ace..37499d468b 100644
--- a/examples/aarproject/app/build.gradle
+++ b/examples/aarproject/app/build.gradle
@@ -5,7 +5,7 @@ android {
buildToolsVersion "27.0.1"
defaultConfig {
applicationId "org.appspot.apprtc"
- minSdkVersion 16
+ minSdkVersion 21
targetSdkVersion 21
versionCode 1
versionName "1.0"
diff --git a/examples/androidapp/AndroidManifest.xml b/examples/androidapp/AndroidManifest.xml
index 8a9035e782..c4e1e797d0 100644
--- a/examples/androidapp/AndroidManifest.xml
+++ b/examples/androidapp/AndroidManifest.xml
@@ -8,7 +8,7 @@
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
- <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="29" />
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="29" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
diff --git a/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java b/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
index 7ae3d838dd..c32ab964ad 100644
--- a/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
+++ b/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
@@ -185,8 +185,8 @@ public class AppRTCAudioManager {
// Note that, the sensor will not be active until start() has been called.
proximitySensor = AppRTCProximitySensor.create(context,
// This method will be called each time a state change is detected.
- // Example: user holds his hand over the device (closer than ~5 cm),
- // or removes his hand from the device.
+ // Example: user holds their hand over the device (closer than ~5 cm),
+ // or removes their hand from the device.
this ::onProximitySensorChangedState);
Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice);
diff --git a/examples/androidnativeapi/AndroidManifest.xml b/examples/androidnativeapi/AndroidManifest.xml
index f10f55a1b6..9257c4132e 100644
--- a/examples/androidnativeapi/AndroidManifest.xml
+++ b/examples/androidnativeapi/AndroidManifest.xml
@@ -2,7 +2,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.webrtc.examples.androidnativeapi">
- <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="27" />
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="27" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.CAMERA" />
diff --git a/examples/androidnativeapi/BUILD.gn b/examples/androidnativeapi/BUILD.gn
index 9c114e859c..9253c0bcd9 100644
--- a/examples/androidnativeapi/BUILD.gn
+++ b/examples/androidnativeapi/BUILD.gn
@@ -5,7 +5,7 @@ if (is_android) {
testonly = true
apk_name = "androidnativeapi"
android_manifest = "AndroidManifest.xml"
- min_sdk_version = 19
+ min_sdk_version = 21
target_sdk_version = 27
sources = [
@@ -16,6 +16,7 @@ if (is_android) {
deps = [
":resources",
"//modules/audio_device:audio_device_java",
+ "//rtc_base:base_java",
"//sdk/android:camera_java",
"//sdk/android:surfaceviewrenderer_java",
"//sdk/android:video_api_java",
@@ -47,6 +48,7 @@ if (is_android) {
deps = [
":generated_jni",
"../../api:scoped_refptr",
+ "../../rtc_base/synchronization:mutex",
"//api:libjingle_peerconnection_api",
"//api/rtc_event_log:rtc_event_log_factory",
"//api/task_queue:default_task_queue_factory",
diff --git a/examples/androidnativeapi/jni/android_call_client.cc b/examples/androidnativeapi/jni/android_call_client.cc
index 03968335d9..f0b060632d 100644
--- a/examples/androidnativeapi/jni/android_call_client.cc
+++ b/examples/androidnativeapi/jni/android_call_client.cc
@@ -43,7 +43,7 @@ class AndroidCallClient::PCObserver : public webrtc::PeerConnectionObserver {
void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
private:
- const AndroidCallClient* client_;
+ AndroidCallClient* const client_;
};
namespace {
@@ -88,7 +88,7 @@ void AndroidCallClient::Call(JNIEnv* env,
const webrtc::JavaRef<jobject>& remote_sink) {
RTC_DCHECK_RUN_ON(&thread_checker_);
- rtc::CritScope lock(&pc_mutex_);
+ webrtc::MutexLock lock(&pc_mutex_);
if (call_started_) {
RTC_LOG(LS_WARNING) << "Call already started.";
return;
@@ -112,7 +112,7 @@ void AndroidCallClient::Hangup(JNIEnv* env) {
call_started_ = false;
{
- rtc::CritScope lock(&pc_mutex_);
+ webrtc::MutexLock lock(&pc_mutex_);
if (pc_ != nullptr) {
pc_->Close();
pc_ = nullptr;
@@ -174,7 +174,7 @@ void AndroidCallClient::CreatePeerConnectionFactory() {
}
void AndroidCallClient::CreatePeerConnection() {
- rtc::CritScope lock(&pc_mutex_);
+ webrtc::MutexLock lock(&pc_mutex_);
webrtc::PeerConnectionInterface::RTCConfiguration config;
config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
// DTLS SRTP has to be disabled for loopback to work.
@@ -205,7 +205,7 @@ void AndroidCallClient::CreatePeerConnection() {
}
void AndroidCallClient::Connect() {
- rtc::CritScope lock(&pc_mutex_);
+ webrtc::MutexLock lock(&pc_mutex_);
pc_->CreateOffer(new rtc::RefCountedObject<CreateOfferObserver>(pc_),
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
}
@@ -240,7 +240,7 @@ void AndroidCallClient::PCObserver::OnIceGatheringChange(
void AndroidCallClient::PCObserver::OnIceCandidate(
const webrtc::IceCandidateInterface* candidate) {
RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url();
- rtc::CritScope lock(&client_->pc_mutex_);
+ webrtc::MutexLock lock(&client_->pc_mutex_);
RTC_DCHECK(client_->pc_ != nullptr);
client_->pc_->AddIceCandidate(candidate);
}
diff --git a/examples/androidnativeapi/jni/android_call_client.h b/examples/androidnativeapi/jni/android_call_client.h
index 13992f5960..f3f61a4695 100644
--- a/examples/androidnativeapi/jni/android_call_client.h
+++ b/examples/androidnativeapi/jni/android_call_client.h
@@ -18,7 +18,7 @@
#include "api/peer_connection_interface.h"
#include "api/scoped_refptr.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
#include "sdk/android/native_api/video/video_source.h"
@@ -66,7 +66,7 @@ class AndroidCallClient {
rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> video_source_
RTC_GUARDED_BY(thread_checker_);
- rtc::CriticalSection pc_mutex_;
+ webrtc::Mutex pc_mutex_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_
RTC_GUARDED_BY(pc_mutex_);
};
diff --git a/examples/androidtests/AndroidManifest.xml b/examples/androidtests/AndroidManifest.xml
index dae2e980a6..8e995366dc 100644
--- a/examples/androidtests/AndroidManifest.xml
+++ b/examples/androidtests/AndroidManifest.xml
@@ -14,7 +14,7 @@
package="org.appspot.apprtc.test">
<uses-permission android:name="android.permission.RUN_INSTRUMENTATION" />
- <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21" />
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="21" />
<application>
<uses-library android:name="android.test.runner" />
diff --git a/examples/androidvoip/AndroidManifest.xml b/examples/androidvoip/AndroidManifest.xml
new file mode 100644
index 0000000000..106f71171d
--- /dev/null
+++ b/examples/androidvoip/AndroidManifest.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!--
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.webrtc.examples.androidvoip">
+
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="27" />
+
+ <uses-permission android:name="android.permission.INTERNET" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+
+ <uses-feature android:name="android.hardware.microphone" android:required="true" />
+ <uses-feature android:name="android.hardware.telephony" android:required="false" />
+
+ <application
+ android:allowBackup="true"
+ android:label="@string/app_name"
+ android:supportsRtl="true">
+ <activity android:name=".MainActivity"
+ android:windowSoftInputMode="stateHidden">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+ </application>
+
+</manifest>
diff --git a/examples/androidvoip/BUILD.gn b/examples/androidvoip/BUILD.gn
new file mode 100644
index 0000000000..74341a78ac
--- /dev/null
+++ b/examples/androidvoip/BUILD.gn
@@ -0,0 +1,88 @@
+# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//webrtc.gni")
+
+if (is_android) {
+ rtc_android_apk("androidvoip") {
+ testonly = true
+ apk_name = "androidvoip"
+ android_manifest = "AndroidManifest.xml"
+ min_sdk_version = 21
+ target_sdk_version = 27
+
+ sources = [
+ "java/org/webrtc/examples/androidvoip/MainActivity.java",
+ "java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java",
+ "java/org/webrtc/examples/androidvoip/VoipClient.java",
+ ]
+
+ deps = [
+ ":resources",
+ "//modules/audio_device:audio_device_java",
+ "//rtc_base:base_java",
+ "//sdk/android:java_audio_device_module_java",
+ "//sdk/android:video_java",
+ "//third_party/android_deps:androidx_core_core_java",
+ "//third_party/android_deps:androidx_legacy_legacy_support_v4_java",
+ ]
+
+ shared_libraries = [ ":examples_androidvoip_jni" ]
+ }
+
+ generate_jni("generated_jni") {
+ testonly = true
+ sources = [ "java/org/webrtc/examples/androidvoip/VoipClient.java" ]
+ namespace = "webrtc_examples"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ rtc_shared_library("examples_androidvoip_jni") {
+ testonly = true
+ sources = [
+ "jni/android_voip_client.cc",
+ "jni/android_voip_client.h",
+ "jni/onload.cc",
+ ]
+
+ suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
+ configs += [ "//build/config/android:hide_all_but_jni" ]
+
+ deps = [
+ ":generated_jni",
+ "//api:transport_api",
+ "//api/audio_codecs:audio_codecs_api",
+ "//api/audio_codecs:builtin_audio_decoder_factory",
+ "//api/audio_codecs:builtin_audio_encoder_factory",
+ "//api/task_queue:default_task_queue_factory",
+ "//api/voip:voip_api",
+ "//api/voip:voip_engine_factory",
+ "//modules/utility:utility",
+ "//rtc_base",
+ "//rtc_base/third_party/sigslot:sigslot",
+ "//sdk/android:native_api_audio_device_module",
+ "//sdk/android:native_api_base",
+ "//sdk/android:native_api_jni",
+ "//third_party/abseil-cpp/absl/memory:memory",
+ ]
+ }
+
+ android_resources("resources") {
+ testonly = true
+ custom_package = "org.webrtc.examples.androidvoip"
+ sources = [
+ "res/layout/activity_main.xml",
+ "res/values/colors.xml",
+ "res/values/strings.xml",
+ ]
+
+ # Needed for Bazel converter.
+ resource_dirs = [ "res" ]
+ assert(resource_dirs != []) # Mark as used.
+ }
+}
diff --git a/examples/androidvoip/DEPS b/examples/androidvoip/DEPS
new file mode 100644
index 0000000000..edb714dd44
--- /dev/null
+++ b/examples/androidvoip/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+sdk/android/native_api",
+]
diff --git a/examples/androidvoip/OWNERS b/examples/androidvoip/OWNERS
new file mode 100644
index 0000000000..0fe5182450
--- /dev/null
+++ b/examples/androidvoip/OWNERS
@@ -0,0 +1,2 @@
+natim@webrtc.org
+sakal@webrtc.org
diff --git a/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java b/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java
new file mode 100644
index 0000000000..d787de59a0
--- /dev/null
+++ b/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java
@@ -0,0 +1,339 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.examples.androidvoip;
+
+import android.Manifest.permission;
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.os.Bundle;
+import android.view.Gravity;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.Button;
+import android.widget.EditText;
+import android.widget.RelativeLayout;
+import android.widget.ScrollView;
+import android.widget.Spinner;
+import android.widget.Switch;
+import android.widget.TextView;
+import android.widget.Toast;
+import android.widget.ToggleButton;
+import androidx.core.app.ActivityCompat;
+import androidx.core.content.ContextCompat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+import org.webrtc.ContextUtils;
+
+public class MainActivity extends Activity implements OnVoipClientTaskCompleted {
+ private static final int NUM_SUPPORTED_CODECS = 6;
+
+ private VoipClient voipClient;
+ private List<String> supportedCodecs;
+ private boolean[] isDecoderSelected;
+ private Set<Integer> selectedDecoders;
+
+ private Toast toast;
+ private ScrollView scrollView;
+ private TextView localIPAddressTextView;
+ private EditText localPortNumberEditText;
+ private EditText remoteIPAddressEditText;
+ private EditText remotePortNumberEditText;
+ private Spinner encoderSpinner;
+ private Button decoderSelectionButton;
+ private TextView decodersTextView;
+ private ToggleButton sessionButton;
+ private RelativeLayout switchLayout;
+ private Switch sendSwitch;
+ private Switch playoutSwitch;
+
+ @Override
+ protected void onCreate(Bundle savedInstance) {
+ ContextUtils.initialize(getApplicationContext());
+
+ super.onCreate(savedInstance);
+ setContentView(R.layout.activity_main);
+
+ System.loadLibrary("examples_androidvoip_jni");
+
+ voipClient = new VoipClient(getApplicationContext(), this);
+ voipClient.getAndSetUpLocalIPAddress();
+ voipClient.getAndSetUpSupportedCodecs();
+
+ isDecoderSelected = new boolean[NUM_SUPPORTED_CODECS];
+ selectedDecoders = new HashSet<>();
+
+ toast = Toast.makeText(this, "", Toast.LENGTH_SHORT);
+
+ scrollView = (ScrollView) findViewById(R.id.scroll_view);
+ localIPAddressTextView = (TextView) findViewById(R.id.local_ip_address_text_view);
+ localPortNumberEditText = (EditText) findViewById(R.id.local_port_number_edit_text);
+ remoteIPAddressEditText = (EditText) findViewById(R.id.remote_ip_address_edit_text);
+ remotePortNumberEditText = (EditText) findViewById(R.id.remote_port_number_edit_text);
+ encoderSpinner = (Spinner) findViewById(R.id.encoder_spinner);
+ decoderSelectionButton = (Button) findViewById(R.id.decoder_selection_button);
+ decodersTextView = (TextView) findViewById(R.id.decoders_text_view);
+ sessionButton = (ToggleButton) findViewById(R.id.session_button);
+ switchLayout = (RelativeLayout) findViewById(R.id.switch_layout);
+ sendSwitch = (Switch) findViewById(R.id.start_send_switch);
+ playoutSwitch = (Switch) findViewById(R.id.start_playout_switch);
+
+ setUpSessionButton();
+ setUpSendAndPlayoutSwitch();
+ }
+
+ private void setUpEncoderSpinner(List<String> supportedCodecs) {
+ ArrayAdapter<String> encoderAdapter =
+ new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, supportedCodecs);
+ encoderAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+ encoderSpinner.setAdapter(encoderAdapter);
+ encoderSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
+ @Override
+ public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
+ voipClient.setEncoder((String) parent.getSelectedItem());
+ }
+ @Override
+ public void onNothingSelected(AdapterView<?> parent) {}
+ });
+ }
+
+ private List<String> getSelectedDecoders() {
+ List<String> decoders = new ArrayList<>();
+ for (int i = 0; i < supportedCodecs.size(); i++) {
+ if (selectedDecoders.contains(i)) {
+ decoders.add(supportedCodecs.get(i));
+ }
+ }
+ return decoders;
+ }
+
+ private void setUpDecoderSelectionButton(List<String> supportedCodecs) {
+ decoderSelectionButton.setOnClickListener((view) -> {
+ AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(this);
+ dialogBuilder.setTitle(R.string.dialog_title);
+
+ // Populate multi choice items with supported decoders.
+ String[] supportedCodecsArray = supportedCodecs.toArray(new String[0]);
+ dialogBuilder.setMultiChoiceItems(
+ supportedCodecsArray, isDecoderSelected, (dialog, position, isChecked) -> {
+ if (isChecked) {
+ selectedDecoders.add(position);
+ } else if (!isChecked) {
+ selectedDecoders.remove(position);
+ }
+ });
+
+ // "Ok" button.
+ dialogBuilder.setPositiveButton(R.string.ok_label, (dialog, position) -> {
+ List<String> decoders = getSelectedDecoders();
+ String result = decoders.stream().collect(Collectors.joining(", "));
+ if (result.isEmpty()) {
+ decodersTextView.setText(R.string.decoders_text_view_default);
+ } else {
+ decodersTextView.setText(result);
+ }
+ voipClient.setDecoders(decoders);
+ });
+
+ // "Dismiss" button.
+ dialogBuilder.setNegativeButton(
+ R.string.dismiss_label, (dialog, position) -> { dialog.dismiss(); });
+
+ // "Clear All" button.
+ dialogBuilder.setNeutralButton(R.string.clear_all_label, (dialog, position) -> {
+ Arrays.fill(isDecoderSelected, false);
+ selectedDecoders.clear();
+ decodersTextView.setText(R.string.decoders_text_view_default);
+ });
+
+ AlertDialog dialog = dialogBuilder.create();
+ dialog.show();
+ });
+ }
+
+ private void setUpSessionButton() {
+ sessionButton.setOnCheckedChangeListener((button, isChecked) -> {
+ // Ask for permission on RECORD_AUDIO if not granted.
+ if (ContextCompat.checkSelfPermission(this, permission.RECORD_AUDIO)
+ != PackageManager.PERMISSION_GRANTED) {
+ String[] sList = {permission.RECORD_AUDIO};
+ ActivityCompat.requestPermissions(this, sList, 1);
+ }
+
+ if (isChecked) {
+ // Order matters here, addresses have to be set before starting session
+ // before setting codec.
+ voipClient.setLocalAddress(localIPAddressTextView.getText().toString(),
+ Integer.parseInt(localPortNumberEditText.getText().toString()));
+ voipClient.setRemoteAddress(remoteIPAddressEditText.getText().toString(),
+ Integer.parseInt(remotePortNumberEditText.getText().toString()));
+ voipClient.startSession();
+ voipClient.setEncoder((String) encoderSpinner.getSelectedItem());
+ voipClient.setDecoders(getSelectedDecoders());
+ } else {
+ voipClient.stopSession();
+ }
+ });
+ }
+
+ private void setUpSendAndPlayoutSwitch() {
+ sendSwitch.setOnCheckedChangeListener((button, isChecked) -> {
+ if (isChecked) {
+ voipClient.startSend();
+ } else {
+ voipClient.stopSend();
+ }
+ });
+
+ playoutSwitch.setOnCheckedChangeListener((button, isChecked) -> {
+ if (isChecked) {
+ voipClient.startPlayout();
+ } else {
+ voipClient.stopPlayout();
+ }
+ });
+ }
+
+ private void setUpIPAddressEditTexts(String localIPAddress) {
+ if (localIPAddress.isEmpty()) {
+ showToast("Please check your network configuration");
+ } else {
+ localIPAddressTextView.setText(localIPAddress);
+ // By default remote IP address is the same as local IP address.
+ remoteIPAddressEditText.setText(localIPAddress);
+ }
+ }
+
+ private void showToast(String message) {
+ toast.cancel();
+ toast = Toast.makeText(this, message, Toast.LENGTH_SHORT);
+ toast.setGravity(Gravity.TOP, 0, 200);
+ toast.show();
+ }
+
+ @Override
+ protected void onDestroy() {
+ voipClient.close();
+ voipClient = null;
+
+ super.onDestroy();
+ }
+
+ @Override
+ public void onGetLocalIPAddressCompleted(String localIPAddress) {
+ runOnUiThread(() -> { setUpIPAddressEditTexts(localIPAddress); });
+ }
+
+ @Override
+ public void onGetSupportedCodecsCompleted(List<String> supportedCodecs) {
+ runOnUiThread(() -> {
+ this.supportedCodecs = supportedCodecs;
+ setUpEncoderSpinner(supportedCodecs);
+ setUpDecoderSelectionButton(supportedCodecs);
+ });
+ }
+
+ @Override
+ public void onVoipClientInitializationCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (!isSuccessful) {
+ showToast("Error initializing audio device");
+ }
+ });
+ }
+
+ @Override
+ public void onStartSessionCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Session started");
+ switchLayout.setVisibility(View.VISIBLE);
+ scrollView.post(() -> { scrollView.fullScroll(ScrollView.FOCUS_DOWN); });
+ } else {
+ showToast("Failed to start session");
+ }
+ });
+ }
+
+ @Override
+ public void onStopSessionCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Session stopped");
+ // Set listeners to null so the checked state can be changed programmatically.
+ sendSwitch.setOnCheckedChangeListener(null);
+ playoutSwitch.setOnCheckedChangeListener(null);
+ sendSwitch.setChecked(false);
+ playoutSwitch.setChecked(false);
+ // Redo the switch listener setup.
+ setUpSendAndPlayoutSwitch();
+ switchLayout.setVisibility(View.GONE);
+ } else {
+ showToast("Failed to stop session");
+ }
+ });
+ }
+
+ @Override
+ public void onStartSendCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Started sending");
+ } else {
+ showToast("Error initializing microphone");
+ }
+ });
+ }
+
+ @Override
+ public void onStopSendCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Stopped sending");
+ } else {
+ showToast("Microphone termination failed");
+ }
+ });
+ }
+
+ @Override
+ public void onStartPlayoutCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Started playout");
+ } else {
+ showToast("Error initializing speaker");
+ }
+ });
+ }
+
+ @Override
+ public void onStopPlayoutCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Stopped playout");
+ } else {
+ showToast("Speaker termination failed");
+ }
+ });
+ }
+
+ @Override
+ public void onUninitializedVoipClient() {
+ runOnUiThread(() -> { showToast("Voip client is uninitialized"); });
+ }
+}
diff --git a/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java b/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java
new file mode 100644
index 0000000000..bb85e048bb
--- /dev/null
+++ b/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.examples.androidvoip;
+
+import java.util.List;
+
+public interface OnVoipClientTaskCompleted {
+ void onGetLocalIPAddressCompleted(String localIPAddress);
+ void onGetSupportedCodecsCompleted(List<String> supportedCodecs);
+ void onVoipClientInitializationCompleted(boolean isSuccessful);
+ void onStartSessionCompleted(boolean isSuccessful);
+ void onStopSessionCompleted(boolean isSuccessful);
+ void onStartSendCompleted(boolean isSuccessful);
+ void onStopSendCompleted(boolean isSuccessful);
+ void onStartPlayoutCompleted(boolean isSuccessful);
+ void onStopPlayoutCompleted(boolean isSuccessful);
+ void onUninitializedVoipClient();
+}
diff --git a/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java b/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java
new file mode 100644
index 0000000000..2dcbd99b1d
--- /dev/null
+++ b/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.examples.androidvoip;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.HandlerThread;
+import java.util.ArrayList;
+import java.util.List;
+
+public class VoipClient {
+ private static final String TAG = "VoipClient";
+
+ private final HandlerThread thread;
+ private final Handler handler;
+
+ private long nativeClient;
+ private OnVoipClientTaskCompleted listener;
+
+ public VoipClient(Context applicationContext, OnVoipClientTaskCompleted listener) {
+ this.listener = listener;
+ thread = new HandlerThread(TAG + "Thread");
+ thread.start();
+ handler = new Handler(thread.getLooper());
+
+ handler.post(() -> {
+ nativeClient = nativeCreateClient(applicationContext);
+ listener.onVoipClientInitializationCompleted(/* isSuccessful */ nativeClient != 0);
+ });
+ }
+
+ private boolean isInitialized() {
+ return nativeClient != 0;
+ }
+
+ public void getAndSetUpSupportedCodecs() {
+ handler.post(() -> {
+ if (isInitialized()) {
+ listener.onGetSupportedCodecsCompleted(nativeGetSupportedCodecs(nativeClient));
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void getAndSetUpLocalIPAddress() {
+ handler.post(() -> {
+ if (isInitialized()) {
+ listener.onGetLocalIPAddressCompleted(nativeGetLocalIPAddress(nativeClient));
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void setEncoder(String encoder) {
+ handler.post(() -> {
+ if (isInitialized()) {
+ nativeSetEncoder(nativeClient, encoder);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void setDecoders(List<String> decoders) {
+ handler.post(() -> {
+ if (isInitialized()) {
+ nativeSetDecoders(nativeClient, decoders);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void setLocalAddress(String ipAddress, int portNumber) {
+ handler.post(() -> {
+ if (isInitialized()) {
+ nativeSetLocalAddress(nativeClient, ipAddress, portNumber);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void setRemoteAddress(String ipAddress, int portNumber) {
+ handler.post(() -> {
+ if (isInitialized()) {
+ nativeSetRemoteAddress(nativeClient, ipAddress, portNumber);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void startSession() {
+ handler.post(() -> {
+ if (isInitialized()) {
+ listener.onStartSessionCompleted(nativeStartSession(nativeClient));
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void stopSession() {
+ handler.post(() -> {
+ if (isInitialized()) {
+ listener.onStopSessionCompleted(nativeStopSession(nativeClient));
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void startSend() {
+ handler.post(() -> {
+ if (isInitialized()) {
+ listener.onStartSendCompleted(nativeStartSend(nativeClient));
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void stopSend() {
+ handler.post(() -> {
+ if (isInitialized()) {
+ listener.onStopSendCompleted(nativeStopSend(nativeClient));
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void startPlayout() {
+ handler.post(() -> {
+ if (isInitialized()) {
+ listener.onStartPlayoutCompleted(nativeStartPlayout(nativeClient));
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void stopPlayout() {
+ handler.post(() -> {
+ if (isInitialized()) {
+ listener.onStopPlayoutCompleted(nativeStopPlayout(nativeClient));
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ });
+ }
+
+ public void close() {
+ handler.post(() -> {
+ nativeDelete(nativeClient);
+ nativeClient = 0;
+ });
+ thread.quitSafely();
+ }
+
+ private static native long nativeCreateClient(Context applicationContext);
+ private static native List<String> nativeGetSupportedCodecs(long nativeAndroidVoipClient);
+ private static native String nativeGetLocalIPAddress(long nativeAndroidVoipClient);
+ private static native void nativeSetEncoder(long nativeAndroidVoipClient, String encoder);
+ private static native void nativeSetDecoders(long nativeAndroidVoipClient, List<String> decoders);
+ private static native void nativeSetLocalAddress(
+ long nativeAndroidVoipClient, String ipAddress, int portNumber);
+ private static native void nativeSetRemoteAddress(
+ long nativeAndroidVoipClient, String ipAddress, int portNumber);
+ private static native boolean nativeStartSession(long nativeAndroidVoipClient);
+ private static native boolean nativeStopSession(long nativeAndroidVoipClient);
+ private static native boolean nativeStartSend(long nativeAndroidVoipClient);
+ private static native boolean nativeStopSend(long nativeAndroidVoipClient);
+ private static native boolean nativeStartPlayout(long nativeAndroidVoipClient);
+ private static native boolean nativeStopPlayout(long nativeAndroidVoipClient);
+ private static native void nativeDelete(long nativeAndroidVoipClient);
+}
diff --git a/examples/androidvoip/jni/android_voip_client.cc b/examples/androidvoip/jni/android_voip_client.cc
new file mode 100644
index 0000000000..13cadf2f3d
--- /dev/null
+++ b/examples/androidvoip/jni/android_voip_client.cc
@@ -0,0 +1,405 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/androidvoip/jni/android_voip_client.h"
+
+#include <errno.h>
+#include <sys/socket.h>
+#include <algorithm>
+#include <map>
+#include <memory>
+#include <unordered_map>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/voip/voip_codec.h"
+#include "api/voip/voip_engine_factory.h"
+#include "api/voip/voip_network.h"
+#include "examples/androidvoip/generated_jni/VoipClient_jni.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/network.h"
+#include "rtc_base/socket_server.h"
+#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
+#include "sdk/android/native_api/jni/java_types.h"
+
+namespace {
+
+// Connects a UDP socket to a public address and returns the local
+// address associated with it. Since it binds to the "any" address
+// internally, it returns the default local address on a multi-homed
+// endpoint. Implementation copied from
+// BasicNetworkManager::QueryDefaultLocalAddress.
+rtc::IPAddress QueryDefaultLocalAddress(int family) {
+ const char kPublicIPv4Host[] = "8.8.8.8";
+ const char kPublicIPv6Host[] = "2001:4860:4860::8888";
+ const int kPublicPort = 53;
+ std::unique_ptr<rtc::Thread> thread = rtc::Thread::CreateWithSocketServer();
+
+ RTC_DCHECK(thread->socketserver() != nullptr);
+ RTC_DCHECK(family == AF_INET || family == AF_INET6);
+
+ std::unique_ptr<rtc::AsyncSocket> socket(
+ thread->socketserver()->CreateAsyncSocket(family, SOCK_DGRAM));
+ if (!socket) {
+ RTC_LOG_ERR(LERROR) << "Socket creation failed";
+ return rtc::IPAddress();
+ }
+
+ auto host = family == AF_INET ? kPublicIPv4Host : kPublicIPv6Host;
+ if (socket->Connect(rtc::SocketAddress(host, kPublicPort)) < 0) {
+ if (socket->GetError() != ENETUNREACH &&
+ socket->GetError() != EHOSTUNREACH) {
+ RTC_LOG(LS_INFO) << "Connect failed with " << socket->GetError();
+ }
+ return rtc::IPAddress();
+ }
+ return socket->GetLocalAddress().ipaddr();
+}
+
+// Assigned payload type for supported built-in codecs. PCMU, PCMA,
+// and G722 have set payload types. Whereas opus, ISAC, and ILBC
+// have dynamic payload types.
+enum class PayloadType : int {
+ kPcmu = 0,
+ kPcma = 8,
+ kG722 = 9,
+ kOpus = 96,
+ kIsac = 97,
+ kIlbc = 98,
+};
+
+// Returns the payload type corresponding to codec_name. Only
+// supports the built-in codecs.
+int GetPayloadType(const std::string& codec_name) {
+ RTC_DCHECK(codec_name == "PCMU" || codec_name == "PCMA" ||
+ codec_name == "G722" || codec_name == "opus" ||
+ codec_name == "ISAC" || codec_name == "ILBC");
+
+ if (codec_name == "PCMU") {
+ return static_cast<int>(PayloadType::kPcmu);
+ } else if (codec_name == "PCMA") {
+ return static_cast<int>(PayloadType::kPcma);
+ } else if (codec_name == "G722") {
+ return static_cast<int>(PayloadType::kG722);
+ } else if (codec_name == "opus") {
+ return static_cast<int>(PayloadType::kOpus);
+ } else if (codec_name == "ISAC") {
+ return static_cast<int>(PayloadType::kIsac);
+ } else if (codec_name == "ILBC") {
+ return static_cast<int>(PayloadType::kIlbc);
+ }
+
+ RTC_NOTREACHED();
+ return -1;
+}
+
+} // namespace
+
+namespace webrtc_examples {
+
+AndroidVoipClient::AndroidVoipClient(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context) {
+ voip_thread_ = rtc::Thread::CreateWithSocketServer();
+ voip_thread_->Start();
+
+ webrtc::VoipEngineConfig config;
+ config.encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory();
+ config.decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
+ config.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
+ config.audio_device_module =
+ webrtc::CreateJavaAudioDeviceModule(env, application_context.obj());
+ config.audio_processing = webrtc::AudioProcessingBuilder().Create();
+
+ supported_codecs_ = config.encoder_factory->GetSupportedEncoders();
+
+ // Due to consistent thread requirement on
+ // modules/audio_device/android/audio_device_template.h,
+ // code is invoked in the context of voip_thread_.
+ voip_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
+ voip_engine_ = webrtc::CreateVoipEngine(std::move(config));
+ if (!voip_engine_) {
+ RTC_LOG(LS_ERROR) << "VoipEngine creation failed";
+ }
+ });
+}
+
+AndroidVoipClient::~AndroidVoipClient() {
+ voip_thread_->Stop();
+}
+
+AndroidVoipClient* AndroidVoipClient::Create(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context) {
+ // Using `new` to access a non-public constructor.
+ auto voip_client =
+ absl::WrapUnique(new AndroidVoipClient(env, application_context));
+ if (!voip_client->voip_engine_) {
+ return nullptr;
+ }
+ return voip_client.release();
+}
+
+webrtc::ScopedJavaLocalRef<jobject> AndroidVoipClient::GetSupportedCodecs(
+ JNIEnv* env) {
+ std::vector<std::string> names;
+ for (const webrtc::AudioCodecSpec& spec : supported_codecs_) {
+ names.push_back(spec.format.name);
+ }
+ webrtc::ScopedJavaLocalRef<jstring> (*convert_function)(
+ JNIEnv*, const std::string&) = &webrtc::NativeToJavaString;
+ return NativeToJavaList(env, names, convert_function);
+}
+
+webrtc::ScopedJavaLocalRef<jstring> AndroidVoipClient::GetLocalIPAddress(
+ JNIEnv* env) {
+ rtc::IPAddress ipv4_address = QueryDefaultLocalAddress(AF_INET);
+ if (!ipv4_address.IsNil()) {
+ return webrtc::NativeToJavaString(env, ipv4_address.ToString());
+ }
+ rtc::IPAddress ipv6_address = QueryDefaultLocalAddress(AF_INET6);
+ if (!ipv6_address.IsNil()) {
+ return webrtc::NativeToJavaString(env, ipv6_address.ToString());
+ }
+ return webrtc::NativeToJavaString(env, "");
+}
+
+void AndroidVoipClient::SetEncoder(
+ JNIEnv* env,
+ const webrtc::JavaRef<jstring>& j_encoder_string) {
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return;
+ }
+ const std::string& chosen_encoder =
+ webrtc::JavaToNativeString(env, j_encoder_string);
+ for (const webrtc::AudioCodecSpec& encoder : supported_codecs_) {
+ if (encoder.format.name == chosen_encoder) {
+ voip_engine_->Codec().SetSendCodec(
+ *channel_, GetPayloadType(encoder.format.name), encoder.format);
+ break;
+ }
+ }
+}
+
+void AndroidVoipClient::SetDecoders(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& j_decoder_strings) {
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return;
+ }
+ std::vector<std::string> chosen_decoders =
+ webrtc::JavaListToNativeVector<std::string, jstring>(
+ env, j_decoder_strings, &webrtc::JavaToNativeString);
+ std::map<int, webrtc::SdpAudioFormat> decoder_specs;
+
+ for (const webrtc::AudioCodecSpec& decoder : supported_codecs_) {
+ if (std::find(chosen_decoders.begin(), chosen_decoders.end(),
+ decoder.format.name) != chosen_decoders.end()) {
+ decoder_specs.insert(
+ {GetPayloadType(decoder.format.name), decoder.format});
+ }
+ }
+
+ voip_engine_->Codec().SetReceiveCodecs(*channel_, decoder_specs);
+}
+
+void AndroidVoipClient::SetLocalAddress(
+ JNIEnv* env,
+ const webrtc::JavaRef<jstring>& j_ip_address_string,
+ jint j_port_number_int) {
+ const std::string& ip_address =
+ webrtc::JavaToNativeString(env, j_ip_address_string);
+ rtp_local_address_ = rtc::SocketAddress(ip_address, j_port_number_int);
+ rtcp_local_address_ = rtc::SocketAddress(ip_address, j_port_number_int + 1);
+}
+
+void AndroidVoipClient::SetRemoteAddress(
+ JNIEnv* env,
+ const webrtc::JavaRef<jstring>& j_ip_address_string,
+ jint j_port_number_int) {
+ const std::string& ip_address =
+ webrtc::JavaToNativeString(env, j_ip_address_string);
+ rtp_remote_address_ = rtc::SocketAddress(ip_address, j_port_number_int);
+ rtcp_remote_address_ = rtc::SocketAddress(ip_address, j_port_number_int + 1);
+}
+
+jboolean AndroidVoipClient::StartSession(JNIEnv* env) {
+ // Due to consistent thread requirement on
+ // modules/utility/source/process_thread_impl.cc,
+ // code is invoked in the context of voip_thread_.
+ channel_ = voip_thread_->Invoke<absl::optional<webrtc::ChannelId>>(
+ RTC_FROM_HERE,
+ [this] { return voip_engine_->Base().CreateChannel(this, 0); });
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel creation failed";
+ return false;
+ }
+
+ rtp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(),
+ rtp_local_address_));
+ if (!rtp_socket_) {
+ RTC_LOG_ERR(LERROR) << "Socket creation failed";
+ return false;
+ }
+ rtp_socket_->SignalReadPacket.connect(
+ this, &AndroidVoipClient::OnSignalReadRTPPacket);
+
+ rtcp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(),
+ rtcp_local_address_));
+ if (!rtcp_socket_) {
+ RTC_LOG_ERR(LERROR) << "Socket creation failed";
+ return false;
+ }
+ rtcp_socket_->SignalReadPacket.connect(
+ this, &AndroidVoipClient::OnSignalReadRTCPPacket);
+
+ return true;
+}
+
+jboolean AndroidVoipClient::StopSession(JNIEnv* env) {
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return false;
+ }
+ if (!StopSend(env) || !StopPlayout(env)) {
+ return false;
+ }
+
+ rtp_socket_->Close();
+ rtcp_socket_->Close();
+ // Due to consistent thread requirement on
+ // modules/utility/source/process_thread_impl.cc,
+ // code is invoked in the context of voip_thread_.
+ voip_thread_->Invoke<void>(RTC_FROM_HERE, [this] {
+ voip_engine_->Base().ReleaseChannel(*channel_);
+ });
+ channel_ = absl::nullopt;
+ return true;
+}
+
+jboolean AndroidVoipClient::StartSend(JNIEnv* env) {
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return false;
+ }
+ // Due to consistent thread requirement on
+ // modules/audio_device/android/opensles_recorder.cc,
+ // code is invoked in the context of voip_thread_.
+ return voip_thread_->Invoke<bool>(RTC_FROM_HERE, [this] {
+ return voip_engine_->Base().StartSend(*channel_);
+ });
+}
+
+jboolean AndroidVoipClient::StopSend(JNIEnv* env) {
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return false;
+ }
+ // Due to consistent thread requirement on
+ // modules/audio_device/android/opensles_recorder.cc,
+ // code is invoked in the context of voip_thread_.
+ return voip_thread_->Invoke<bool>(RTC_FROM_HERE, [this] {
+ return voip_engine_->Base().StopSend(*channel_);
+ });
+}
+
+jboolean AndroidVoipClient::StartPlayout(JNIEnv* env) {
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return false;
+ }
+ // Due to consistent thread requirement on
+ // modules/audio_device/android/opensles_player.cc,
+ // code is invoked in the context of voip_thread_.
+ return voip_thread_->Invoke<bool>(RTC_FROM_HERE, [this] {
+ return voip_engine_->Base().StartPlayout(*channel_);
+ });
+}
+
+jboolean AndroidVoipClient::StopPlayout(JNIEnv* env) {
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return false;
+ }
+ // Due to consistent thread requirement on
+ // modules/audio_device/android/opensles_player.cc,
+ // code is invoked in the context of voip_thread_.
+ return voip_thread_->Invoke<bool>(RTC_FROM_HERE, [this] {
+ return voip_engine_->Base().StopPlayout(*channel_);
+ });
+}
+
+void AndroidVoipClient::Delete(JNIEnv* env) {
+ delete this;
+}
+
+bool AndroidVoipClient::SendRtp(const uint8_t* packet,
+ size_t length,
+ const webrtc::PacketOptions& options) {
+ if (!rtp_socket_->SendTo(packet, length, rtp_remote_address_,
+ rtc::PacketOptions())) {
+ RTC_LOG(LS_ERROR) << "Failed to send RTP packet";
+ return false;
+ }
+ return true;
+}
+
+bool AndroidVoipClient::SendRtcp(const uint8_t* packet, size_t length) {
+ if (!rtcp_socket_->SendTo(packet, length, rtcp_remote_address_,
+ rtc::PacketOptions())) {
+ RTC_LOG(LS_ERROR) << "Failed to send RTCP packet";
+ return false;
+ }
+ return true;
+}
+
+void AndroidVoipClient::OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket,
+ const char* rtp_packet,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const int64_t& timestamp) {
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return;
+ }
+ voip_engine_->Network().ReceivedRTPPacket(
+ *channel_, rtc::ArrayView<const uint8_t>(
+ reinterpret_cast<const uint8_t*>(rtp_packet), size));
+}
+
+void AndroidVoipClient::OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket,
+ const char* rtcp_packet,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const int64_t& timestamp) {
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return;
+ }
+ voip_engine_->Network().ReceivedRTCPPacket(
+ *channel_, rtc::ArrayView<const uint8_t>(
+ reinterpret_cast<const uint8_t*>(rtcp_packet), size));
+}
+
+static jlong JNI_VoipClient_CreateClient(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context) {
+ return webrtc::NativeToJavaPointer(
+ AndroidVoipClient::Create(env, application_context));
+}
+
+} // namespace webrtc_examples
diff --git a/examples/androidvoip/jni/android_voip_client.h b/examples/androidvoip/jni/android_voip_client.h
new file mode 100644
index 0000000000..aed652e281
--- /dev/null
+++ b/examples/androidvoip/jni/android_voip_client.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_
+#define EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_
+
+#include <jni.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/audio_codecs/audio_format.h"
+#include "api/call/transport.h"
+#include "api/voip/voip_base.h"
+#include "api/voip/voip_engine.h"
+#include "rtc_base/async_packet_socket.h"
+#include "rtc_base/async_udp_socket.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc_examples {
+
+// AndroidVoipClient facilitates the use of the VoIP API defined in
+// api/voip/voip_engine.h. One instance of AndroidVoipClient should
+// suffice for most VoIP applications. AndroidVoipClient implements
+// webrtc::Transport to send RTP/RTCP packets to the remote endpoint.
+// It also creates methods (slots) for sockets to connect to in
+// order to receive RTP/RTCP packets. AndroidVoipClient does all
+// VoipBase related operations with rtc::Thread (voip_thread_), this
+// is to comply with consistent thread usage requirement with
+// ProcessThread used within VoipEngine. AndroidVoipClient is meant
+// to be used by Java through JNI.
+class AndroidVoipClient : public webrtc::Transport,
+ public sigslot::has_slots<> {
+ public:
+ // Returns a pointer to an AndroidVoipClient object. Clients should
+ // use this factory method to create AndroidVoipClient objects. The
+ // method will return a nullptr in case of initialization errors.
+ // It is the client's responsibility to delete the pointer when
+ // they are done with it (this class provides a Delete() method).
+ static AndroidVoipClient* Create(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context);
+
+ ~AndroidVoipClient() override;
+
+ // Returns a Java List of Strings containing names of the built-in
+ // supported codecs.
+ webrtc::ScopedJavaLocalRef<jobject> GetSupportedCodecs(JNIEnv* env);
+
+ // Returns a Java String of the default local IPv4 address. If IPv4
+ // address is not found, returns the default local IPv6 address. If
+ // IPv6 address is not found, returns an empty string.
+ webrtc::ScopedJavaLocalRef<jstring> GetLocalIPAddress(JNIEnv* env);
+
+ // Sets the encoder used by the VoIP API.
+ void SetEncoder(JNIEnv* env,
+ const webrtc::JavaRef<jstring>& j_encoder_string);
+
+ // Sets the decoders used by the VoIP API.
+ void SetDecoders(JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& j_decoder_strings);
+
+ // Sets two local/remote addresses, one for RTP packets, and another for
+ // RTCP packets. The RTP address will have IP address j_ip_address_string
+ // and port number j_port_number_int, the RTCP address will have IP address
+ // j_ip_address_string and port number j_port_number_int+1.
+ void SetLocalAddress(JNIEnv* env,
+ const webrtc::JavaRef<jstring>& j_ip_address_string,
+ jint j_port_number_int);
+ void SetRemoteAddress(JNIEnv* env,
+ const webrtc::JavaRef<jstring>& j_ip_address_string,
+ jint j_port_number_int);
+
+ // Starts a VoIP session. The VoIP operations below can only be
+ // used after a session has already started. Returns true if session
+ // started successfully and false otherwise.
+ jboolean StartSession(JNIEnv* env);
+
+ // Stops the current session. Returns true if session stopped
+ // successfully and false otherwise.
+ jboolean StopSession(JNIEnv* env);
+
+ // Starts sending RTP/RTCP packets to the remote endpoint. Returns
+ // the return value of StartSend in api/voip/voip_base.h.
+ jboolean StartSend(JNIEnv* env);
+
+ // Stops sending RTP/RTCP packets to the remote endpoint. Returns
+ // the return value of StopSend in api/voip/voip_base.h.
+ jboolean StopSend(JNIEnv* env);
+
+ // Starts playing out the voice data received from the remote endpoint.
+ // Returns the return value of StartPlayout in api/voip/voip_base.h.
+ jboolean StartPlayout(JNIEnv* env);
+
+ // Stops playing out the voice data received from the remote endpoint.
+ // Returns the return value of StopPlayout in api/voip/voip_base.h.
+ jboolean StopPlayout(JNIEnv* env);
+
+ // Deletes this object. Used by client when they are done.
+ void Delete(JNIEnv* env);
+
+ // Implementation for Transport.
+ bool SendRtp(const uint8_t* packet,
+ size_t length,
+ const webrtc::PacketOptions& options) override;
+ bool SendRtcp(const uint8_t* packet, size_t length) override;
+
+ // Slots for sockets to connect to.
+ void OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket,
+ const char* rtp_packet,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const int64_t& timestamp);
+ void OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket,
+ const char* rtcp_packet,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const int64_t& timestamp);
+
+ private:
+ AndroidVoipClient(JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context);
+
+ // Used to invoke VoipBase operations and send/receive
+ // RTP/RTCP packets.
+ std::unique_ptr<rtc::Thread> voip_thread_;
+ // A list of AudioCodecSpec supported by the built-in
+ // encoder/decoder factories.
+ std::vector<webrtc::AudioCodecSpec> supported_codecs_;
+ // The entry point to all VoIP APIs.
+ std::unique_ptr<webrtc::VoipEngine> voip_engine_;
+ // Used by the VoIP API to facilitate a VoIP session.
+ absl::optional<webrtc::ChannelId> channel_;
+ // Members below are used for network related operations.
+ std::unique_ptr<rtc::AsyncUDPSocket> rtp_socket_;
+ std::unique_ptr<rtc::AsyncUDPSocket> rtcp_socket_;
+ rtc::SocketAddress rtp_local_address_;
+ rtc::SocketAddress rtcp_local_address_;
+ rtc::SocketAddress rtp_remote_address_;
+ rtc::SocketAddress rtcp_remote_address_;
+};
+
+} // namespace webrtc_examples
+
+#endif // EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_
diff --git a/examples/androidvoip/jni/onload.cc b/examples/androidvoip/jni/onload.cc
new file mode 100644
index 0000000000..b952de348b
--- /dev/null
+++ b/examples/androidvoip/jni/onload.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "rtc_base/ssl_adapter.h"
+#include "sdk/android/native_api/base/init.h"
+
+namespace webrtc_examples {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) {
+ webrtc::InitAndroid(jvm);
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ return JNI_VERSION_1_6;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) {
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace webrtc_examples
diff --git a/examples/androidvoip/res/layout/activity_main.xml b/examples/androidvoip/res/layout/activity_main.xml
new file mode 100644
index 0000000000..c7fa5a9b31
--- /dev/null
+++ b/examples/androidvoip/res/layout/activity_main.xml
@@ -0,0 +1,303 @@
+<?xml version="1.0" encoding="utf-8"?>
+<ScrollView
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:app="http://schemas.android.com/apk/res-auto"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:id="@+id/scroll_view"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:focusable="true"
+ android:focusableInTouchMode="true"
+ tools:context="org.webrtc.examples.androidvoip.MainActivity">
+
+ <LinearLayout
+ android:orientation="vertical"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:padding="8dp">
+
+ <TextView
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginTop="15dp"
+ android:text="@string/local_endpoint_text_view"
+ android:textSize="19dp"
+ android:textStyle="bold"
+ android:textColor="@color/almost_black" />
+
+ <!--Local IP Adress-->
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical" >
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginTop="12dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/ip_address_text_view"
+ android:textSize="16dp" />
+
+ <TextView
+ android:id="@+id/local_ip_address_text_view"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginTop="12dp"
+ android:layout_marginRight="15dp"
+ android:textSize="16dp" />
+
+ </LinearLayout>
+
+ <!--Local Port Number-->
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical">
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/port_number_text_view"
+ android:textSize="16dp" />
+
+ <EditText
+ android:id="@+id/local_port_number_edit_text"
+ android:layout_width="0dp"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:layout_weight="1"
+ android:text="10000"
+ android:inputType="number"
+ android:textSize="16dp" />
+
+ </LinearLayout>
+
+ <TextView
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginTop="30dp"
+ android:text="@string/remote_endpoint_text_view"
+ android:textSize="19dp"
+ android:textStyle="bold"
+ android:textColor="@color/almost_black" />
+
+ <!--Remote IP Adress-->
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical">
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/ip_address_text_view"
+ android:textSize="16dp" />
+
+ <EditText
+ android:id="@+id/remote_ip_address_edit_text"
+ android:layout_width="0dp"
+ android:layout_height="wrap_content"
+ android:layout_marginRight="15dp"
+ android:layout_weight="1"
+ android:inputType="number"
+ android:digits="0123456789."
+ android:textSize="16dp" />
+
+ </LinearLayout>
+
+ <!--Remote Port Number-->
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical">
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/port_number_text_view"
+ android:textSize="16dp" />
+
+ <EditText
+ android:id="@+id/remote_port_number_edit_text"
+ android:layout_width="0dp"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:layout_weight="1"
+ android:text="10000"
+ android:inputType="number"
+ android:textSize="16dp" />
+
+ </LinearLayout>
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginTop="30dp"
+ android:text="@string/encoder_text_view"
+ android:textSize="19dp"
+ android:textStyle="bold"
+ android:textColor="@color/almost_black" />
+
+ <Spinner
+ android:id="@+id/encoder_spinner"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginTop="10dp"/>
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_marginTop="20dp"
+ android:layout_gravity="center_vertical">
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="25dp"
+ android:text="@string/decoder_text_view"
+ android:textSize="19dp"
+ android:textStyle="bold"
+ android:textColor="@color/almost_black" />
+
+ <Button
+ android:id="@+id/decoder_selection_button"
+ android:text="@string/decoder_selection_button"
+ style="?android:attr/buttonBarButtonStyle"
+ android:layout_width="0dp"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:layout_weight="1" />
+
+ </LinearLayout>
+
+
+ <TextView
+ android:id="@+id/decoders_text_view"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginTop="15dp"
+ android:layout_marginBottom="30dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/decoders_text_view_default"
+ android:textSize="16dp" />
+
+
+ <RelativeLayout
+ android:id="@+id/switch_layout"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:layout_marginTop="15dp"
+ android:visibility="gone" >
+
+ <View
+ android:id="@+id/divider"
+ android:layout_width="match_parent"
+ android:layout_height="1dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:layout_marginBottom="45dp"
+ android:background="@color/light_gray" />
+
+ <LinearLayout
+ android:id="@+id/start_send_switch_layout"
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical"
+ android:layout_below="@id/divider" >
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:layout_marginLeft="15dp"
+ android:gravity="left"
+ android:layout_weight="1"
+ android:text="@string/start_send_text_view"
+ android:textSize="16dp" />
+
+ <Switch
+ android:id="@+id/start_send_switch"
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:gravity="right"
+ android:layout_weight="1" />
+
+ </LinearLayout>
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical"
+ android:layout_below="@id/start_send_switch_layout">
+
+ <TextView
+ android:id="@+id/start_playout_text_view"
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:layout_marginLeft="15dp"
+ android:gravity="left"
+ android:layout_weight="1"
+ android:text="@string/start_playout_text_view"
+ android:textSize="16dp" />
+
+ <Switch
+ android:id="@+id/start_playout_switch"
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:gravity="right"
+ android:layout_weight="1" />
+
+ </LinearLayout>
+
+ </RelativeLayout>
+
+ <LinearLayout
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:gravity="center"
+ android:orientation="vertical" >
+
+ <ToggleButton
+ android:id="@+id/session_button"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_margin="8dp"
+ android:textOff="@string/session_button_text_off"
+ android:textOn="@string/session_button_text_on"
+ style="?android:attr/buttonStyle" />
+
+ </LinearLayout>
+
+ </LinearLayout>
+
+</ScrollView>
diff --git a/examples/androidvoip/res/values/colors.xml b/examples/androidvoip/res/values/colors.xml
new file mode 100644
index 0000000000..4dadaa9941
--- /dev/null
+++ b/examples/androidvoip/res/values/colors.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <color name="almost_black">#484848</color>
+ <color name="light_gray">#D3D3D3</color>
+</resources> \ No newline at end of file
diff --git a/examples/androidvoip/res/values/strings.xml b/examples/androidvoip/res/values/strings.xml
new file mode 100644
index 0000000000..d519bfbbb6
--- /dev/null
+++ b/examples/androidvoip/res/values/strings.xml
@@ -0,0 +1,19 @@
+<resources>
+ <string name="app_name">androidvoip</string>
+ <string name="local_endpoint_text_view">Local Endpoint</string>
+ <string name="remote_endpoint_text_view">Remote Endpoint</string>
+ <string name="ip_address_text_view">IP Address:</string>
+ <string name="port_number_text_view">Port Number:</string>
+ <string name="encoder_text_view">Select Encoder</string>
+ <string name="decoder_text_view">Select Decoder</string>
+ <string name="decoder_selection_button">Configure Selection</string>
+ <string name="decoders_text_view_default">No decoders selected</string>
+ <string name="dialog_title">Choose Decoders</string>
+ <string name="ok_label">Ok</string>
+ <string name="dismiss_label">Dismiss</string>
+ <string name="clear_all_label">Clear All</string>
+ <string name="start_send_text_view">Start Sending</string>
+ <string name="start_playout_text_view">Start Playout</string>
+ <string name="session_button_text_off">Start Session</string>
+ <string name="session_button_text_on">Stop Session</string>
+</resources>
diff --git a/examples/objc/AppRTCMobile/ios/ARDMainViewController.m b/examples/objc/AppRTCMobile/ios/ARDMainViewController.m
index dea7742a1b..a3ede07f3b 100644
--- a/examples/objc/AppRTCMobile/ios/ARDMainViewController.m
+++ b/examples/objc/AppRTCMobile/ios/ARDMainViewController.m
@@ -134,6 +134,7 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
delegate:self];
videoCallViewController.modalTransitionStyle =
UIModalTransitionStyleCrossDissolve;
+ videoCallViewController.modalPresentationStyle = UIModalPresentationFullScreen;
[self presentViewController:videoCallViewController
animated:YES
completion:nil];
diff --git a/examples/objcnativeapi/objc/objc_call_client.h b/examples/objcnativeapi/objc/objc_call_client.h
index 90ac20ac01..b952402bc0 100644
--- a/examples/objcnativeapi/objc/objc_call_client.h
+++ b/examples/objcnativeapi/objc/objc_call_client.h
@@ -18,7 +18,7 @@
#include "api/peer_connection_interface.h"
#include "api/scoped_refptr.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_checker.h"
@class RTC_OBJC_TYPE(RTCVideoCapturer);
@@ -50,7 +50,7 @@ class ObjCCallClient {
void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
private:
- const ObjCCallClient* client_;
+ ObjCCallClient* const client_;
};
void CreatePeerConnectionFactory() RTC_RUN_ON(thread_checker_);
@@ -73,7 +73,7 @@ class ObjCCallClient {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source_
RTC_GUARDED_BY(thread_checker_);
- rtc::CriticalSection pc_mutex_;
+ webrtc::Mutex pc_mutex_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_ RTC_GUARDED_BY(pc_mutex_);
};
diff --git a/examples/objcnativeapi/objc/objc_call_client.mm b/examples/objcnativeapi/objc/objc_call_client.mm
index 52ee2b5f95..5ce7eb7804 100644
--- a/examples/objcnativeapi/objc/objc_call_client.mm
+++ b/examples/objcnativeapi/objc/objc_call_client.mm
@@ -68,7 +68,7 @@ void ObjCCallClient::Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer,
id<RTC_OBJC_TYPE(RTCVideoRenderer)> remote_renderer) {
RTC_DCHECK_RUN_ON(&thread_checker_);
- rtc::CritScope lock(&pc_mutex_);
+ webrtc::MutexLock lock(&pc_mutex_);
if (call_started_) {
RTC_LOG(LS_WARNING) << "Call already started.";
return;
@@ -90,7 +90,7 @@ void ObjCCallClient::Hangup() {
call_started_ = false;
{
- rtc::CritScope lock(&pc_mutex_);
+ webrtc::MutexLock lock(&pc_mutex_);
if (pc_ != nullptr) {
pc_->Close();
pc_ = nullptr;
@@ -138,7 +138,7 @@ void ObjCCallClient::CreatePeerConnectionFactory() {
}
void ObjCCallClient::CreatePeerConnection() {
- rtc::CritScope lock(&pc_mutex_);
+ webrtc::MutexLock lock(&pc_mutex_);
webrtc::PeerConnectionInterface::RTCConfiguration config;
config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
// DTLS SRTP has to be disabled for loopback to work.
@@ -165,7 +165,7 @@ void ObjCCallClient::CreatePeerConnection() {
}
void ObjCCallClient::Connect() {
- rtc::CritScope lock(&pc_mutex_);
+ webrtc::MutexLock lock(&pc_mutex_);
pc_->CreateOffer(new rtc::RefCountedObject<CreateOfferObserver>(pc_),
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
}
@@ -198,7 +198,7 @@ void ObjCCallClient::PCObserver::OnIceGatheringChange(
void ObjCCallClient::PCObserver::OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {
RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url();
- rtc::CritScope lock(&client_->pc_mutex_);
+ webrtc::MutexLock lock(&client_->pc_mutex_);
RTC_DCHECK(client_->pc_ != nullptr);
client_->pc_->AddIceCandidate(candidate);
}
diff --git a/logging/BUILD.gn b/logging/BUILD.gn
index 28176d2583..c1edd69680 100644
--- a/logging/BUILD.gn
+++ b/logging/BUILD.gn
@@ -53,8 +53,8 @@ rtc_library("rtc_event_pacing") {
deps = [
"../api:scoped_refptr",
"../api/rtc_event_log",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("rtc_event_audio") {
@@ -75,8 +75,8 @@ rtc_library("rtc_event_audio") {
"../api/rtc_event_log",
"../modules/audio_coding:audio_network_adaptor_config",
"../rtc_base:checks",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("rtc_event_bwe") {
@@ -101,6 +101,8 @@ rtc_library("rtc_event_bwe") {
"../api/rtc_event_log",
"../api/units:data_rate",
"../modules/remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -119,6 +121,8 @@ rtc_library("rtc_event_generic_packet_events") {
deps = [
"../api/rtc_event_log",
"../rtc_base:timeutils",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -143,8 +147,8 @@ rtc_library("rtc_event_rtp_rtcp") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("rtc_event_video") {
@@ -160,8 +164,8 @@ rtc_library("rtc_event_video") {
"../api:scoped_refptr",
"../api/rtc_event_log",
"../rtc_base:checks",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
# TODO(eladalon): Break down into (1) encoder and (2) decoder; we don't need
@@ -187,6 +191,8 @@ rtc_library("rtc_event_log_impl_encoder") {
"../rtc_base:checks",
"../rtc_base:ignore_wundef",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -245,8 +251,8 @@ if (rtc_enable_protobuf) {
"../rtc_base:rtc_task_queue",
"../rtc_base:safe_minmax",
"../rtc_base/synchronization:sequence_checker",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
@@ -318,6 +324,8 @@ if (rtc_enable_protobuf) {
"../rtc_base:protobuf_utils",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_numerics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -369,6 +377,8 @@ if (rtc_enable_protobuf) {
"../test:fileutils",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -417,8 +427,8 @@ rtc_library("ice_log") {
"../api:libjingle_peerconnection_api", # For api/dtls_transport_interface.h
"../api/rtc_event_log",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
if (rtc_include_tests) {
diff --git a/logging/rtc_event_log/encoder/blob_encoding.h b/logging/rtc_event_log/encoder/blob_encoding.h
index 4a38dc5d0b..b5b589aaf6 100644
--- a/logging/rtc_event_log/encoder/blob_encoding.h
+++ b/logging/rtc_event_log/encoder/blob_encoding.h
@@ -43,7 +43,7 @@ namespace webrtc {
//
// Note that the returned std::string might have been reserved for significantly
// more memory than it ends up using. If the caller to EncodeBlobs() intends
-// to store the result long-term, he should consider shrink_to_fit()-ing it.
+// to store the result long-term, they should consider shrink_to_fit()-ing it.
std::string EncodeBlobs(const std::vector<std::string>& blobs);
std::vector<absl::string_view> DecodeBlobs(absl::string_view encoded_blobs,
size_t num_of_blobs);
diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc b/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc
index 3059621690..cf85775f52 100644
--- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc
+++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc
@@ -675,13 +675,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStarted) {
}
TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStopped) {
- const int64_t timestamp_us = rtc::TimeMicros();
- std::string encoded = encoder_->EncodeLogEnd(timestamp_us);
+ const int64_t start_timestamp_us = rtc::TimeMicros();
+ const int64_t start_utc_time_us = rtc::TimeUTCMicros();
+ std::string encoded =
+ encoder_->EncodeLogStart(start_timestamp_us, start_utc_time_us);
+
+ const int64_t stop_timestamp_us = rtc::TimeMicros();
+ encoded += encoder_->EncodeLogEnd(stop_timestamp_us);
ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
const auto& stop_log_events = parsed_log_.stop_log_events();
ASSERT_EQ(stop_log_events.size(), 1u);
- verifier_.VerifyLoggedStopEvent(timestamp_us, stop_log_events[0]);
+ verifier_.VerifyLoggedStopEvent(stop_timestamp_us, stop_log_events[0]);
}
// TODO(eladalon/terelius): Test with multiple events in the batch.
diff --git a/logging/rtc_event_log/mock/mock_rtc_event_log.h b/logging/rtc_event_log/mock/mock_rtc_event_log.h
index 66a2065ed4..646831de27 100644
--- a/logging/rtc_event_log/mock/mock_rtc_event_log.h
+++ b/logging/rtc_event_log/mock/mock_rtc_event_log.h
@@ -21,20 +21,20 @@ namespace webrtc {
class MockRtcEventLog : public RtcEventLog {
public:
MockRtcEventLog();
- ~MockRtcEventLog();
+ ~MockRtcEventLog() override;
- virtual bool StartLogging(std::unique_ptr<RtcEventLogOutput> output,
- int64_t output_period_ms) {
- return StartLoggingProxy(output.get(), output_period_ms);
- }
- MOCK_METHOD2(StartLoggingProxy, bool(RtcEventLogOutput*, int64_t));
+ MOCK_METHOD(bool,
+ StartLogging,
+ (std::unique_ptr<RtcEventLogOutput> output,
+ int64_t output_period_ms),
+ (override));
- MOCK_METHOD0(StopLogging, void());
+ MOCK_METHOD(void, StopLogging, (), (override));
- virtual void Log(std::unique_ptr<RtcEvent> event) {
+ void Log(std::unique_ptr<RtcEvent> event) override {
return LogProxy(event.get());
}
- MOCK_METHOD1(LogProxy, void(RtcEvent*));
+ MOCK_METHOD(void, LogProxy, (RtcEvent*));
};
} // namespace webrtc
diff --git a/logging/rtc_event_log/rtc_event_log_parser.cc b/logging/rtc_event_log/rtc_event_log_parser.cc
index 4016f84339..c88207607c 100644
--- a/logging/rtc_event_log/rtc_event_log_parser.cc
+++ b/logging/rtc_event_log/rtc_event_log_parser.cc
@@ -1076,6 +1076,7 @@ void ParsedRtcEventLog::Clear() {
first_timestamp_ = std::numeric_limits<int64_t>::max();
last_timestamp_ = std::numeric_limits<int64_t>::min();
+ first_log_segment_ = LogSegment(0, std::numeric_limits<int64_t>::max());
incoming_rtp_extensions_maps_.clear();
outgoing_rtp_extensions_maps_.clear();
@@ -1214,6 +1215,38 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream(
StoreFirstAndLastTimestamp(generic_packets_sent_);
StoreFirstAndLastTimestamp(generic_packets_received_);
StoreFirstAndLastTimestamp(generic_acks_received_);
+ StoreFirstAndLastTimestamp(remote_estimate_events_);
+
+ // Stop events could be missing due to file size limits. If so, use the
+ // last event, or the next start timestamp if available.
+ // TODO(terelius): This could be improved. Instead of using the next start
+ // event, we could use the timestamp of the the last previous regular event.
+ auto start_iter = start_log_events().begin();
+ auto stop_iter = stop_log_events().begin();
+ int64_t start_us = first_timestamp();
+ int64_t next_start_us = std::numeric_limits<int64_t>::max();
+ int64_t stop_us = std::numeric_limits<int64_t>::max();
+ if (start_iter != start_log_events().end()) {
+ start_us = std::min(start_us, start_iter->log_time_us());
+ ++start_iter;
+ if (start_iter != start_log_events().end())
+ next_start_us = start_iter->log_time_us();
+ }
+ if (stop_iter != stop_log_events().end()) {
+ stop_us = stop_iter->log_time_us();
+ }
+ stop_us = std::min(stop_us, next_start_us);
+ if (stop_us == std::numeric_limits<int64_t>::max() &&
+ last_timestamp() != std::numeric_limits<int64_t>::min()) {
+ stop_us = last_timestamp();
+ }
+ RTC_PARSE_CHECK_OR_RETURN_LE(start_us, stop_us);
+ first_log_segment_ = LogSegment(start_us, stop_us);
+
+ if (first_timestamp_ == std::numeric_limits<int64_t>::max() &&
+ last_timestamp_ == std::numeric_limits<int64_t>::min()) {
+ first_timestamp_ = last_timestamp_ = 0;
+ }
return status;
}
diff --git a/logging/rtc_event_log/rtc_event_log_parser.h b/logging/rtc_event_log/rtc_event_log_parser.h
index 7a162af897..8d3351e815 100644
--- a/logging/rtc_event_log/rtc_event_log_parser.h
+++ b/logging/rtc_event_log/rtc_event_log_parser.h
@@ -11,6 +11,7 @@
#define LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_PARSER_H_
#include <iterator>
+#include <limits>
#include <map>
#include <set>
#include <sstream> // no-presubmit-check TODO(webrtc:8982)
@@ -193,6 +194,8 @@ class PacketView {
size_t size() const { return num_elements_; }
+ bool empty() const { return num_elements_ == 0; }
+
T& operator[](size_t i) {
auto elem_ptr = data_ + i * element_size_;
return *reinterpret_cast<T*>(elem_ptr);
@@ -330,6 +333,20 @@ class ParsedRtcEventLog {
PacketView<const LoggedRtpPacket> packet_view;
};
+ class LogSegment {
+ public:
+ LogSegment(int64_t start_time_us, int64_t stop_time_us)
+ : start_time_us_(start_time_us), stop_time_us_(stop_time_us) {}
+ int64_t start_time_ms() const { return start_time_us_ / 1000; }
+ int64_t start_time_us() const { return start_time_us_; }
+ int64_t stop_time_ms() const { return stop_time_us_ / 1000; }
+ int64_t stop_time_us() const { return stop_time_us_; }
+
+ private:
+ int64_t start_time_us_;
+ int64_t stop_time_us_;
+ };
+
static webrtc::RtpHeaderExtensionMap GetDefaultHeaderExtensionMap();
explicit ParsedRtcEventLog(
@@ -597,6 +614,8 @@ class ParsedRtcEventLog {
int64_t first_timestamp() const { return first_timestamp_; }
int64_t last_timestamp() const { return last_timestamp_; }
+ const LogSegment& first_log_segment() const { return first_log_segment_; }
+
std::vector<LoggedPacketInfo> GetPacketInfos(PacketDirection direction) const;
std::vector<LoggedPacketInfo> GetIncomingPacketInfos() const {
return GetPacketInfos(kIncomingPacket);
@@ -850,6 +869,9 @@ class ParsedRtcEventLog {
int64_t first_timestamp_;
int64_t last_timestamp_;
+ LogSegment first_log_segment_ =
+ LogSegment(0, std::numeric_limits<int64_t>::max());
+
// The extension maps are mutable to allow us to insert the default
// configuration when parsing an RTP header for an unconfigured stream.
// TODO(terelius): This is only used for the legacy format. Remove once we've
diff --git a/logging/rtc_event_log/rtc_event_log_unittest.cc b/logging/rtc_event_log/rtc_event_log_unittest.cc
index 579c652880..e785d6160a 100644
--- a/logging/rtc_event_log/rtc_event_log_unittest.cc
+++ b/logging/rtc_event_log/rtc_event_log_unittest.cc
@@ -739,6 +739,11 @@ void RtcEventLogSession::ReadAndVerifyLog() {
EXPECT_EQ(first_timestamp_ms_, parsed_log.first_timestamp() / 1000);
EXPECT_EQ(last_timestamp_ms_, parsed_log.last_timestamp() / 1000);
+ EXPECT_EQ(parsed_log.first_log_segment().start_time_ms(),
+ std::min(start_time_us_ / 1000, first_timestamp_ms_));
+ EXPECT_EQ(parsed_log.first_log_segment().stop_time_ms(),
+ stop_time_us_ / 1000);
+
// Clean up temporary file - can be pretty slow.
remove(temp_filename_.c_str());
}
diff --git a/media/BUILD.gn b/media/BUILD.gn
index 28a8755615..72bec31d2f 100644
--- a/media/BUILD.gn
+++ b/media/BUILD.gn
@@ -36,8 +36,8 @@ rtc_library("rtc_h264_profile_id") {
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("rtc_media_config") {
@@ -57,8 +57,8 @@ rtc_library("rtc_vp9_profile") {
"../api/video_codecs:video_codecs_api",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtc_sdp_fmtp_utils") {
@@ -71,8 +71,8 @@ rtc_library("rtc_sdp_fmtp_utils") {
deps = [
"../api/video_codecs:video_codecs_api",
"../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtc_media_base") {
@@ -96,7 +96,6 @@ rtc_library("rtc_media_base") {
"../api/crypto:frame_encryptor_interface",
"../api/crypto:options",
"../api/transport:stun_types",
- "../api/transport/media:media_transport_interface",
"../api/transport/rtp:rtp_source",
"../api/video:video_bitrate_allocation",
"../api/video:video_bitrate_allocator_factory",
@@ -116,11 +115,14 @@ rtc_library("rtc_media_base") {
"../rtc_base:rtc_task_queue",
"../rtc_base:sanitizer",
"../rtc_base:stringutils",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/system:file_wrapper",
"../rtc_base/system:rtc_export",
"../rtc_base/third_party/sigslot",
"../system_wrappers:field_trial",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -197,8 +199,8 @@ rtc_library("rtc_simulcast_encoder_adapter") {
"../rtc_base/system:rtc_export",
"../system_wrappers",
"../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtc_encoder_simulcast_proxy") {
@@ -253,8 +255,8 @@ rtc_library("rtc_internal_video_codecs") {
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
"../test:fake_video_codecs",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
sources = [
"engine/fake_video_codec_factory.cc",
"engine/fake_video_codec_factory.h",
@@ -290,8 +292,6 @@ rtc_library("rtc_audio_video") {
"../api/audio_codecs:audio_codecs_api",
"../api/task_queue",
"../api/transport:bitrate_settings",
- "../api/transport:datagram_transport_interface",
- "../api/transport/media:media_transport_interface",
"../api/transport/rtp:rtp_source",
"../api/units:data_rate",
"../api/video:video_bitrate_allocation",
@@ -318,17 +318,21 @@ rtc_library("rtc_audio_video") {
"../rtc_base",
"../rtc_base:audio_format_to_string",
"../rtc_base:checks",
+ "../rtc_base:ignore_wundef",
"../rtc_base:rtc_task_queue",
"../rtc_base:stringutils",
"../rtc_base/experiments:field_trial_parser",
"../rtc_base/experiments:min_video_bitrate_experiment",
"../rtc_base/experiments:normalize_simulcast_size_experiment",
"../rtc_base/experiments:rate_control_settings",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/system:rtc_export",
"../rtc_base/third_party/base64",
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -358,7 +362,10 @@ rtc_library("rtc_audio_video") {
deps += [ "../modules/video_capture:video_capture_internal_impl" ]
}
if (rtc_enable_protobuf) {
- deps += [ "../modules/audio_processing/aec_dump:aec_dump_impl" ]
+ deps += [
+ "../modules/audio_coding:ana_config_proto",
+ "../modules/audio_processing/aec_dump:aec_dump_impl",
+ ]
} else {
deps += [ "../modules/audio_processing/aec_dump:null_aec_dump_factory" ]
}
@@ -403,8 +410,11 @@ rtc_library("rtc_data") {
"../p2p:rtc_p2p",
"../rtc_base",
"../rtc_base:rtc_base_approved",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/third_party/sigslot",
"../system_wrappers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
@@ -422,11 +432,6 @@ rtc_library("rtc_data") {
}
if (rtc_enable_sctp && rtc_build_usrsctp) {
- include_dirs = [
- # TODO(jiayl): move this into the public_configs of
- # //third_party/usrsctp/BUILD.gn.
- "//third_party/usrsctp/usrsctplib",
- ]
deps += [ "//third_party/usrsctp" ]
}
}
@@ -476,9 +481,12 @@ if (rtc_include_tests) {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_task_queue",
"../rtc_base:stringutils",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/third_party/sigslot",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
]
@@ -553,7 +561,6 @@ if (rtc_include_tests) {
"../api/task_queue:default_task_queue_factory",
"../api/test/video:function_video_factory",
"../api/transport:field_trial_based_config",
- "../api/transport/media:media_transport_interface",
"../api/units:time_delta",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_bitrate_allocation",
@@ -586,6 +593,7 @@ if (rtc_include_tests) {
"../rtc_base:rtc_task_queue",
"../rtc_base:stringutils",
"../rtc_base/experiments:min_video_bitrate_experiment",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/third_party/sigslot",
"../test:audio_codec_mocks",
"../test:fake_video_codecs",
@@ -651,5 +659,9 @@ if (rtc_include_tests) {
if (is_ios) {
deps += [ ":rtc_media_unittests_bundle_data" ]
}
+
+ if (rtc_enable_sctp && rtc_build_usrsctp) {
+ deps += [ "//third_party/usrsctp" ]
+ }
}
}
diff --git a/media/base/adapted_video_track_source.cc b/media/base/adapted_video_track_source.cc
index c4918725d2..2fce973f68 100644
--- a/media/base/adapted_video_track_source.cc
+++ b/media/base/adapted_video_track_source.cc
@@ -27,7 +27,7 @@ AdaptedVideoTrackSource::AdaptedVideoTrackSource(int required_alignment)
AdaptedVideoTrackSource::~AdaptedVideoTrackSource() = default;
bool AdaptedVideoTrackSource::GetStats(Stats* stats) {
- rtc::CritScope lock(&stats_crit_);
+ webrtc::MutexLock lock(&stats_mutex_);
if (!stats_) {
return false;
@@ -93,7 +93,7 @@ bool AdaptedVideoTrackSource::AdaptFrame(int width,
int* crop_x,
int* crop_y) {
{
- rtc::CritScope lock(&stats_crit_);
+ webrtc::MutexLock lock(&stats_mutex_);
stats_ = Stats{width, height};
}
diff --git a/media/base/adapted_video_track_source.h b/media/base/adapted_video_track_source.h
index 7dbab540ed..59ae036ff6 100644
--- a/media/base/adapted_video_track_source.h
+++ b/media/base/adapted_video_track_source.h
@@ -21,7 +21,7 @@
#include "api/video/video_source_interface.h"
#include "media/base/video_adapter.h"
#include "media/base/video_broadcaster.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/rtc_export.h"
#include "rtc_base/thread_annotations.h"
@@ -89,8 +89,8 @@ class RTC_EXPORT AdaptedVideoTrackSource
cricket::VideoAdapter video_adapter_;
- rtc::CriticalSection stats_crit_;
- absl::optional<Stats> stats_ RTC_GUARDED_BY(stats_crit_);
+ webrtc::Mutex stats_mutex_;
+ absl::optional<Stats> stats_ RTC_GUARDED_BY(stats_mutex_);
VideoBroadcaster broadcaster_;
};
diff --git a/media/base/codec.h b/media/base/codec.h
index fd8a97c5e4..c3be2334ce 100644
--- a/media/base/codec.h
+++ b/media/base/codec.h
@@ -67,6 +67,8 @@ struct RTC_EXPORT Codec {
int id;
std::string name;
int clockrate;
+ // Non key-value parameters such as the telephone-event "0‐15" are
+ // represented using an empty string as key, i.e. {"": "0-15"}.
CodecParameterMap params;
FeedbackParams feedback_params;
diff --git a/media/base/fake_network_interface.h b/media/base/fake_network_interface.h
index 7d50ca84bc..eb08f69b5f 100644
--- a/media/base/fake_network_interface.h
+++ b/media/base/fake_network_interface.h
@@ -19,9 +19,9 @@
#include "media/base/rtp_utils.h"
#include "rtc_base/byte_order.h"
#include "rtc_base/copy_on_write_buffer.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/dscp.h"
#include "rtc_base/message_handler.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread.h"
namespace cricket {
@@ -43,14 +43,15 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
// Conference mode is a mode where instead of simply forwarding the packets,
// the transport will send multiple copies of the packet with the specified
// SSRCs. This allows us to simulate receiving media from multiple sources.
- void SetConferenceMode(bool conf, const std::vector<uint32_t>& ssrcs) {
- rtc::CritScope cs(&crit_);
+ void SetConferenceMode(bool conf, const std::vector<uint32_t>& ssrcs)
+ RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
conf_ = conf;
conf_sent_ssrcs_ = ssrcs;
}
- int NumRtpBytes() {
- rtc::CritScope cs(&crit_);
+ int NumRtpBytes() RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
int bytes = 0;
for (size_t i = 0; i < rtp_packets_.size(); ++i) {
bytes += static_cast<int>(rtp_packets_[i].size());
@@ -58,48 +59,50 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
return bytes;
}
- int NumRtpBytes(uint32_t ssrc) {
- rtc::CritScope cs(&crit_);
+ int NumRtpBytes(uint32_t ssrc) RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
int bytes = 0;
GetNumRtpBytesAndPackets(ssrc, &bytes, NULL);
return bytes;
}
- int NumRtpPackets() {
- rtc::CritScope cs(&crit_);
+ int NumRtpPackets() RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
return static_cast<int>(rtp_packets_.size());
}
- int NumRtpPackets(uint32_t ssrc) {
- rtc::CritScope cs(&crit_);
+ int NumRtpPackets(uint32_t ssrc) RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
int packets = 0;
GetNumRtpBytesAndPackets(ssrc, NULL, &packets);
return packets;
}
- int NumSentSsrcs() {
- rtc::CritScope cs(&crit_);
+ int NumSentSsrcs() RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
return static_cast<int>(sent_ssrcs_.size());
}
// Note: callers are responsible for deleting the returned buffer.
- const rtc::CopyOnWriteBuffer* GetRtpPacket(int index) {
- rtc::CritScope cs(&crit_);
- if (index >= NumRtpPackets()) {
+ const rtc::CopyOnWriteBuffer* GetRtpPacket(int index)
+ RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
+ if (index >= static_cast<int>(rtp_packets_.size())) {
return NULL;
}
return new rtc::CopyOnWriteBuffer(rtp_packets_[index]);
}
- int NumRtcpPackets() {
- rtc::CritScope cs(&crit_);
+ int NumRtcpPackets() RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
return static_cast<int>(rtcp_packets_.size());
}
// Note: callers are responsible for deleting the returned buffer.
- const rtc::CopyOnWriteBuffer* GetRtcpPacket(int index) {
- rtc::CritScope cs(&crit_);
- if (index >= NumRtcpPackets()) {
+ const rtc::CopyOnWriteBuffer* GetRtcpPacket(int index)
+ RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
+ if (index >= static_cast<int>(rtcp_packets_.size())) {
return NULL;
}
return new rtc::CopyOnWriteBuffer(rtcp_packets_[index]);
@@ -112,8 +115,9 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
protected:
virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options) {
- rtc::CritScope cs(&crit_);
+ const rtc::PacketOptions& options)
+ RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
uint32_t cur_ssrc = 0;
if (!GetRtpSsrc(packet->data(), packet->size(), &cur_ssrc)) {
@@ -137,8 +141,9 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
}
virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options) {
- rtc::CritScope cs(&crit_);
+ const rtc::PacketOptions& options)
+ RTC_LOCKS_EXCLUDED(mutex_) {
+ webrtc::MutexLock lock(&mutex_);
rtcp_packets_.push_back(*packet);
options_ = options;
if (!conf_) {
@@ -212,7 +217,7 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
std::map<uint32_t, uint32_t> sent_ssrcs_;
// Map to track packet-number that needs to be dropped per ssrc.
std::map<uint32_t, std::set<uint32_t> > drop_map_;
- rtc::CriticalSection crit_;
+ webrtc::Mutex mutex_;
std::vector<rtc::CopyOnWriteBuffer> rtp_packets_;
std::vector<rtc::CopyOnWriteBuffer> rtcp_packets_;
int sendbuf_size_;
diff --git a/media/base/fake_video_renderer.cc b/media/base/fake_video_renderer.cc
index 801f81dec4..b3ceb352f0 100644
--- a/media/base/fake_video_renderer.cc
+++ b/media/base/fake_video_renderer.cc
@@ -15,7 +15,7 @@ namespace cricket {
FakeVideoRenderer::FakeVideoRenderer() = default;
void FakeVideoRenderer::OnFrame(const webrtc::VideoFrame& frame) {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
// TODO(zhurunz) Check with VP8 team to see if we can remove this
// tolerance on Y values. Some unit tests produce Y values close
// to 16 rather than close to zero, for supposedly black frames.
diff --git a/media/base/fake_video_renderer.h b/media/base/fake_video_renderer.h
index ba67bf0332..9f3c87c379 100644
--- a/media/base/fake_video_renderer.h
+++ b/media/base/fake_video_renderer.h
@@ -18,8 +18,8 @@
#include "api/video/video_frame_buffer.h"
#include "api/video/video_rotation.h"
#include "api/video/video_sink_interface.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
namespace cricket {
@@ -33,46 +33,46 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
int errors() const { return errors_; }
int width() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return width_;
}
int height() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return height_;
}
webrtc::VideoRotation rotation() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return rotation_;
}
int64_t timestamp_us() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return timestamp_us_;
}
int num_rendered_frames() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return num_rendered_frames_;
}
bool black_frame() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return black_frame_;
}
int64_t ntp_time_ms() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return ntp_timestamp_ms_;
}
absl::optional<webrtc::ColorSpace> color_space() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return color_space_;
}
webrtc::RtpPacketInfos packet_infos() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return packet_infos_;
}
@@ -140,7 +140,7 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
int num_rendered_frames_ = 0;
int64_t ntp_timestamp_ms_ = 0;
bool black_frame_ = false;
- rtc::CriticalSection crit_;
+ mutable webrtc::Mutex mutex_;
rtc::Event frame_rendered_event_;
absl::optional<webrtc::ColorSpace> color_space_;
webrtc::RtpPacketInfos packet_infos_;
diff --git a/media/base/media_channel.cc b/media/base/media_channel.cc
index 2e9bfc3d31..0cef36e2b9 100644
--- a/media/base/media_channel.cc
+++ b/media/base/media_channel.cc
@@ -23,12 +23,9 @@ MediaChannel::MediaChannel() : enable_dscp_(false) {}
MediaChannel::~MediaChannel() {}
-void MediaChannel::SetInterface(
- NetworkInterface* iface,
- const webrtc::MediaTransportConfig& media_transport_config) {
- rtc::CritScope cs(&network_interface_crit_);
+void MediaChannel::SetInterface(NetworkInterface* iface) {
+ webrtc::MutexLock lock(&network_interface_mutex_);
network_interface_ = iface;
- media_transport_config_ = media_transport_config;
UpdateDscp();
}
diff --git a/media/base/media_channel.h b/media/base/media_channel.h
index d71ec9158a..e8400a58a9 100644
--- a/media/base/media_channel.h
+++ b/media/base/media_channel.h
@@ -26,7 +26,6 @@
#include "api/media_stream_interface.h"
#include "api/rtc_error.h"
#include "api/rtp_parameters.h"
-#include "api/transport/media/media_transport_config.h"
#include "api/transport/rtp/rtp_source.h"
#include "api/video/video_content_type.h"
#include "api/video/video_sink_interface.h"
@@ -46,13 +45,13 @@
#include "rtc_base/buffer.h"
#include "rtc_base/callback.h"
#include "rtc_base/copy_on_write_buffer.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/dscp.h"
#include "rtc_base/logging.h"
#include "rtc_base/network_route.h"
#include "rtc_base/socket.h"
#include "rtc_base/string_encode.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
namespace rtc {
@@ -156,24 +155,6 @@ struct VideoOptions {
}
};
-// TODO(isheriff): Remove this once client usage is fixed to use RtpExtension.
-struct RtpHeaderExtension {
- RtpHeaderExtension() : id(0) {}
- RtpHeaderExtension(const std::string& uri, int id) : uri(uri), id(id) {}
-
- std::string ToString() const {
- rtc::StringBuilder ost;
- ost << "{";
- ost << "uri: " << uri;
- ost << ", id: " << id;
- ost << "}";
- return ost.Release();
- }
-
- std::string uri;
- int id;
-};
-
class MediaChannel : public sigslot::has_slots<> {
public:
class NetworkInterface {
@@ -195,15 +176,9 @@ class MediaChannel : public sigslot::has_slots<> {
virtual cricket::MediaType media_type() const = 0;
- // Sets the abstract interface class for sending RTP/RTCP data and
- // interface for media transport (experimental). If media transport is
- // provided, it should be used instead of RTP/RTCP.
- // TODO(sukhanov): Currently media transport can co-exist with RTP/RTCP, but
- // in the future we will refactor code to send all frames with media
- // transport.
- virtual void SetInterface(
- NetworkInterface* iface,
- const webrtc::MediaTransportConfig& media_transport_config);
+ // Sets the abstract interface class for sending RTP/RTCP data.
+ virtual void SetInterface(NetworkInterface* iface)
+ RTC_LOCKS_EXCLUDED(network_interface_mutex_);
// Called when a RTP packet is received.
virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) = 0;
@@ -264,16 +239,9 @@ class MediaChannel : public sigslot::has_slots<> {
int SetOption(NetworkInterface::SocketType type,
rtc::Socket::Option opt,
- int option) {
- rtc::CritScope cs(&network_interface_crit_);
- if (!network_interface_)
- return -1;
-
- return network_interface_->SetOption(type, opt, option);
- }
-
- const webrtc::MediaTransportConfig& media_transport_config() const {
- return media_transport_config_;
+ int option) RTC_LOCKS_EXCLUDED(network_interface_mutex_) {
+ webrtc::MutexLock lock(&network_interface_mutex_);
+ return SetOptionLocked(type, opt, option);
}
// Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285.
@@ -298,17 +266,28 @@ class MediaChannel : public sigslot::has_slots<> {
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer);
protected:
+ int SetOptionLocked(NetworkInterface::SocketType type,
+ rtc::Socket::Option opt,
+ int option)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(network_interface_mutex_) {
+ if (!network_interface_)
+ return -1;
+ return network_interface_->SetOption(type, opt, option);
+ }
+
bool DscpEnabled() const { return enable_dscp_; }
// This is the DSCP value used for both RTP and RTCP channels if DSCP is
// enabled. It can be changed at any time via |SetPreferredDscp|.
- rtc::DiffServCodePoint PreferredDscp() const {
- rtc::CritScope cs(&network_interface_crit_);
+ rtc::DiffServCodePoint PreferredDscp() const
+ RTC_LOCKS_EXCLUDED(network_interface_mutex_) {
+ webrtc::MutexLock lock(&network_interface_mutex_);
return preferred_dscp_;
}
- int SetPreferredDscp(rtc::DiffServCodePoint preferred_dscp) {
- rtc::CritScope cs(&network_interface_crit_);
+ int SetPreferredDscp(rtc::DiffServCodePoint preferred_dscp)
+ RTC_LOCKS_EXCLUDED(network_interface_mutex_) {
+ webrtc::MutexLock lock(&network_interface_mutex_);
if (preferred_dscp == preferred_dscp_) {
return 0;
}
@@ -319,20 +298,23 @@ class MediaChannel : public sigslot::has_slots<> {
private:
// Apply the preferred DSCP setting to the underlying network interface RTP
// and RTCP channels. If DSCP is disabled, then apply the default DSCP value.
- int UpdateDscp() RTC_EXCLUSIVE_LOCKS_REQUIRED(network_interface_crit_) {
+ int UpdateDscp() RTC_EXCLUSIVE_LOCKS_REQUIRED(network_interface_mutex_) {
rtc::DiffServCodePoint value =
enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT;
- int ret = SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value);
+ int ret =
+ SetOptionLocked(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value);
if (ret == 0) {
- ret = SetOption(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP, value);
+ ret = SetOptionLocked(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP,
+ value);
}
return ret;
}
bool DoSendPacket(rtc::CopyOnWriteBuffer* packet,
bool rtcp,
- const rtc::PacketOptions& options) {
- rtc::CritScope cs(&network_interface_crit_);
+ const rtc::PacketOptions& options)
+ RTC_LOCKS_EXCLUDED(network_interface_mutex_) {
+ webrtc::MutexLock lock(&network_interface_mutex_);
if (!network_interface_)
return false;
@@ -344,12 +326,11 @@ class MediaChannel : public sigslot::has_slots<> {
// |network_interface_| can be accessed from the worker_thread and
// from any MediaEngine threads. This critical section is to protect accessing
// of network_interface_ object.
- rtc::CriticalSection network_interface_crit_;
- NetworkInterface* network_interface_ RTC_GUARDED_BY(network_interface_crit_) =
- nullptr;
+ mutable webrtc::Mutex network_interface_mutex_;
+ NetworkInterface* network_interface_
+ RTC_GUARDED_BY(network_interface_mutex_) = nullptr;
rtc::DiffServCodePoint preferred_dscp_
- RTC_GUARDED_BY(network_interface_crit_) = rtc::DSCP_DEFAULT;
- webrtc::MediaTransportConfig media_transport_config_;
+ RTC_GUARDED_BY(network_interface_mutex_) = rtc::DSCP_DEFAULT;
bool extmap_allow_mixed_ = false;
};
diff --git a/media/base/media_constants.cc b/media/base/media_constants.cc
index 5144a6ea65..a918078dd8 100644
--- a/media/base/media_constants.cc
+++ b/media/base/media_constants.cc
@@ -98,15 +98,13 @@ const char kCodecParamMaxMessageSize[] = "x-google-max-message-size";
const int kGoogleRtpDataCodecPlType = 109;
const char kGoogleRtpDataCodecName[] = "google-data";
-const int kGoogleSctpDataCodecPlType = 108;
-const char kGoogleSctpDataCodecName[] = "google-sctp-data";
-
const char kComfortNoiseCodecName[] = "CN";
const char kVp8CodecName[] = "VP8";
const char kVp9CodecName[] = "VP9";
const char kAv1CodecName[] = "AV1X";
const char kH264CodecName[] = "H264";
+const char kHEVCCodecName[] = "H265X";
// RFC 6184 RTP Payload Format for H.264 video
const char kH264FmtpProfileLevelId[] = "profile-level-id";
@@ -114,6 +112,7 @@ const char kH264FmtpLevelAsymmetryAllowed[] = "level-asymmetry-allowed";
const char kH264FmtpPacketizationMode[] = "packetization-mode";
const char kH264FmtpSpropParameterSets[] = "sprop-parameter-sets";
const char kH264ProfileLevelConstrainedBaseline[] = "42e01f";
+const char kH264ProfileLevelConstrainedHigh[] = "640c1f";
const int kDefaultVideoMaxFramerate = 60;
diff --git a/media/base/media_constants.h b/media/base/media_constants.h
index b9b8a336f7..5579b6e00c 100644
--- a/media/base/media_constants.h
+++ b/media/base/media_constants.h
@@ -124,18 +124,13 @@ extern const char kCodecParamMaxMessageSize[];
extern const int kGoogleRtpDataCodecPlType;
extern const char kGoogleRtpDataCodecName[];
-// TODO(pthatcher): Find an id that won't conflict with anything. On
-// the other hand, it really shouldn't matter since the id won't be
-// used on the wire.
-extern const int kGoogleSctpDataCodecPlType;
-extern const char kGoogleSctpDataCodecName[];
-
extern const char kComfortNoiseCodecName[];
RTC_EXPORT extern const char kVp8CodecName[];
RTC_EXPORT extern const char kVp9CodecName[];
RTC_EXPORT extern const char kAv1CodecName[];
RTC_EXPORT extern const char kH264CodecName[];
+RTC_EXPORT extern const char kHEVCCodecName[];
// RFC 6184 RTP Payload Format for H.264 video
RTC_EXPORT extern const char kH264FmtpProfileLevelId[];
@@ -143,6 +138,7 @@ RTC_EXPORT extern const char kH264FmtpLevelAsymmetryAllowed[];
RTC_EXPORT extern const char kH264FmtpPacketizationMode[];
extern const char kH264FmtpSpropParameterSets[];
extern const char kH264ProfileLevelConstrainedBaseline[];
+extern const char kH264ProfileLevelConstrainedHigh[];
extern const int kDefaultVideoMaxFramerate;
diff --git a/media/base/media_engine.h b/media/base/media_engine.h
index be0ae59a04..4d9cc56a16 100644
--- a/media/base/media_engine.h
+++ b/media/base/media_engine.h
@@ -152,18 +152,6 @@ enum DataChannelType {
DCT_NONE = 0,
DCT_RTP = 1,
DCT_SCTP = 2,
-
- // Data channel transport over media transport.
- DCT_MEDIA_TRANSPORT = 3,
-
- // Data channel transport over datagram transport (with no fallback). This is
- // the same behavior as data channel transport over media transport, and is
- // usable without DTLS.
- DCT_DATA_CHANNEL_TRANSPORT = 4,
-
- // Data channel transport over datagram transport (with SCTP negotiation
- // semantics and a fallback to SCTP). Only usable with DTLS.
- DCT_DATA_CHANNEL_TRANSPORT_SCTP = 5,
};
class DataEngineInterface {
diff --git a/media/base/media_engine_unittest.cc b/media/base/media_engine_unittest.cc
index f4c6f5f045..83f80c4669 100644
--- a/media/base/media_engine_unittest.cc
+++ b/media/base/media_engine_unittest.cc
@@ -26,8 +26,10 @@ namespace {
class MockRtpHeaderExtensionQueryInterface
: public RtpHeaderExtensionQueryInterface {
public:
- MOCK_CONST_METHOD0(GetRtpHeaderExtensions,
- std::vector<RtpHeaderExtensionCapability>());
+ MOCK_METHOD(std::vector<RtpHeaderExtensionCapability>,
+ GetRtpHeaderExtensions,
+ (),
+ (const, override));
};
} // namespace
diff --git a/media/base/rtp_data_engine_unittest.cc b/media/base/rtp_data_engine_unittest.cc
index dab4058c33..f01c7c60c7 100644
--- a/media/base/rtp_data_engine_unittest.cc
+++ b/media/base/rtp_data_engine_unittest.cc
@@ -75,7 +75,7 @@ class RtpDataMediaChannelTest : public ::testing::Test {
cricket::MediaConfig config;
cricket::RtpDataMediaChannel* channel =
static_cast<cricket::RtpDataMediaChannel*>(dme->CreateChannel(config));
- channel->SetInterface(iface_.get(), webrtc::MediaTransportConfig());
+ channel->SetInterface(iface_.get());
channel->SignalDataReceived.connect(receiver_.get(),
&FakeDataReceiver::OnDataReceived);
return channel;
diff --git a/media/base/rtp_utils.cc b/media/base/rtp_utils.cc
index 0b45e69410..4a2b3267cc 100644
--- a/media/base/rtp_utils.cc
+++ b/media/base/rtp_utils.cc
@@ -34,6 +34,7 @@ static const size_t kRtcpPayloadTypeOffset = 1;
static const size_t kRtpExtensionHeaderLen = 4;
static const size_t kAbsSendTimeExtensionLen = 3;
static const size_t kOneByteExtensionHeaderLen = 1;
+static const size_t kTwoByteExtensionHeaderLen = 2;
namespace {
@@ -424,10 +425,13 @@ bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp,
rtp += kRtpExtensionHeaderLen; // Moving past extension header.
+ constexpr uint16_t kOneByteExtensionProfileId = 0xBEDE;
+ constexpr uint16_t kTwoByteExtensionProfileId = 0x1000;
+
bool found = false;
- // WebRTC is using one byte header extension.
- // TODO(mallinath) - Handle two byte header extension.
- if (profile_id == 0xBEDE) { // OneByte extension header
+ if (profile_id == kOneByteExtensionProfileId ||
+ profile_id == kTwoByteExtensionProfileId) {
+ // OneByte extension header
// 0
// 0 1 2 3 4 5 6 7
// +-+-+-+-+-+-+-+-+
@@ -445,24 +449,53 @@ bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp,
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | data |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ // TwoByte extension header
+ // 0
+ // 0 1 2 3 4 5 6 7
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | length |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | 0x10 | 0x00 | length=3 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | L=1 | data | ID |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | L=2 | data | 0 (pad) |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | L=2 | data |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ size_t extension_header_length = profile_id == kOneByteExtensionProfileId
+ ? kOneByteExtensionHeaderLen
+ : kTwoByteExtensionHeaderLen;
+
const uint8_t* extension_start = rtp;
const uint8_t* extension_end = extension_start + extension_length;
- while (rtp < extension_end) {
- const int id = (*rtp & 0xF0) >> 4;
- const size_t length = (*rtp & 0x0F) + 1;
- if (rtp + kOneByteExtensionHeaderLen + length > extension_end) {
+ // rtp + 1 since the minimum size per header extension is two bytes for both
+ // one- and two-byte header extensions.
+ while (rtp + 1 < extension_end) {
+ // See RFC8285 Section 4.2-4.3 for more information about one- and
+ // two-byte header extensions.
+ const int id =
+ profile_id == kOneByteExtensionProfileId ? (*rtp & 0xF0) >> 4 : *rtp;
+ const size_t length = profile_id == kOneByteExtensionProfileId
+ ? (*rtp & 0x0F) + 1
+ : *(rtp + 1);
+ if (rtp + extension_header_length + length > extension_end) {
return false;
}
- // The 4-bit length is the number minus one of data bytes of this header
- // extension element following the one-byte header.
if (id == extension_id) {
- UpdateAbsSendTimeExtensionValue(rtp + kOneByteExtensionHeaderLen,
- length, time_us);
+ UpdateAbsSendTimeExtensionValue(rtp + extension_header_length, length,
+ time_us);
found = true;
break;
}
- rtp += kOneByteExtensionHeaderLen + length;
+ rtp += extension_header_length + length;
// Counting padding bytes.
while ((rtp < extension_end) && (*rtp == 0)) {
++rtp;
diff --git a/media/base/rtp_utils_unittest.cc b/media/base/rtp_utils_unittest.cc
index 051508cd01..a5e8a810f4 100644
--- a/media/base/rtp_utils_unittest.cc
+++ b/media/base/rtp_utils_unittest.cc
@@ -71,15 +71,25 @@ static uint8_t kRtpMsgWith2ByteExtnHeader[] = {
// clang-format on
};
-// RTP packet with single byte extension header of length 4 bytes.
-// Extension id = 3 and length = 3
-static uint8_t kRtpMsgWithAbsSendTimeExtension[] = {
+// RTP packet with two one-byte header extensions. The last 4 bytes consist of
+// abs-send-time with extension id = 3 and length = 3.
+static uint8_t kRtpMsgWithOneByteAbsSendTimeExtension[] = {
0x90, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xBE, 0xDE, 0x00, 0x02, 0x22, 0x00, 0x02, 0x1c, 0x32, 0xaa, 0xbb, 0xcc,
};
-// Index of AbsSendTimeExtn data in message |kRtpMsgWithAbsSendTimeExtension|.
-static const int kAstIndexInRtpMsg = 21;
+// RTP packet with two two-byte header extensions. The last 5 bytes consist of
+// abs-send-time with extension id = 3 and length = 3.
+static uint8_t kRtpMsgWithTwoByteAbsSendTimeExtension[] = {
+ 0x90, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x10, 0x00, 0x00, 0x02, 0x02, 0x01, 0x02, 0x03, 0x03, 0xaa, 0xbb, 0xcc,
+};
+
+// Index of AbsSendTimeExtn data in message
+// |kRtpMsgWithOneByteAbsSendTimeExtension|.
+static const int kAstIndexInOneByteRtpMsg = 21;
+// and in message |kRtpMsgWithTwoByteAbsSendTimeExtension|.
+static const int kAstIndexInTwoByteRtpMsg = 21;
static const rtc::ArrayView<const char> kPcmuFrameArrayView =
rtc::MakeArrayView(reinterpret_cast<const char*>(kPcmuFrame),
@@ -213,19 +223,17 @@ TEST(RtpUtilsTest, Valid2ByteExtnHdrRtpMessage) {
}
// Valid RTP packet which has 1 byte header AbsSendTime extension in it.
-TEST(RtpUtilsTest, ValidRtpPacketWithAbsSendTimeExtension) {
- EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWithAbsSendTimeExtension,
- sizeof(kRtpMsgWithAbsSendTimeExtension),
+TEST(RtpUtilsTest, ValidRtpPacketWithOneByteAbsSendTimeExtension) {
+ EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWithOneByteAbsSendTimeExtension,
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension),
nullptr));
}
-// Verify handling of a 2 byte extension header RTP messsage. Currently these
-// messages are not supported.
-TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionIn2ByteHeaderExtn) {
- std::vector<uint8_t> data(
- kRtpMsgWith2ByteExtnHeader,
- kRtpMsgWith2ByteExtnHeader + sizeof(kRtpMsgWith2ByteExtnHeader));
- EXPECT_FALSE(UpdateRtpAbsSendTimeExtension(&data[0], data.size(), 3, 0));
+// Valid RTP packet which has 2 byte header AbsSendTime extension in it.
+TEST(RtpUtilsTest, ValidRtpPacketWithTwoByteAbsSendTimeExtension) {
+ EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWithTwoByteAbsSendTimeExtension,
+ sizeof(kRtpMsgWithTwoByteAbsSendTimeExtension),
+ nullptr));
}
// Verify finding an extension ID in the TURN send indication message.
@@ -276,19 +284,21 @@ TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionInTurnSendIndication) {
// without HMAC value in the packet.
TEST(RtpUtilsTest, ApplyPacketOptionsWithDefaultValues) {
rtc::PacketTimeUpdateParams packet_time_params;
- std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
- kRtpMsgWithAbsSendTimeExtension +
- sizeof(kRtpMsgWithAbsSendTimeExtension));
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithOneByteAbsSendTimeExtension,
+ kRtpMsgWithOneByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension));
rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
packet_time_params, 0));
// Making sure HMAC wasn't updated..
- EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
- kFakeTag, 4));
+ EXPECT_EQ(0,
+ memcmp(&rtp_packet[sizeof(kRtpMsgWithOneByteAbsSendTimeExtension)],
+ kFakeTag, 4));
// Verify AbsouluteSendTime extension field wasn't modified.
- EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kTestAstValue,
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInOneByteRtpMsg], kTestAstValue,
sizeof(kTestAstValue)));
}
@@ -299,34 +309,53 @@ TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParams) {
kTestKey + sizeof(kTestKey));
packet_time_params.srtp_auth_tag_len = 4;
- std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
- kRtpMsgWithAbsSendTimeExtension +
- sizeof(kRtpMsgWithAbsSendTimeExtension));
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithOneByteAbsSendTimeExtension,
+ kRtpMsgWithOneByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension));
rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
packet_time_params, 0));
uint8_t kExpectedTag[] = {0xc1, 0x7a, 0x8c, 0xa0};
- EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
- kExpectedTag, sizeof(kExpectedTag)));
+ EXPECT_EQ(0,
+ memcmp(&rtp_packet[sizeof(kRtpMsgWithOneByteAbsSendTimeExtension)],
+ kExpectedTag, sizeof(kExpectedTag)));
// Verify AbsouluteSendTime extension field is not modified.
- EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kTestAstValue,
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInOneByteRtpMsg], kTestAstValue,
sizeof(kTestAstValue)));
}
// Verify finding an extension ID in a raw rtp message.
-TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionInRtpPacket) {
- std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
- kRtpMsgWithAbsSendTimeExtension +
- sizeof(kRtpMsgWithAbsSendTimeExtension));
+TEST(RtpUtilsTest, UpdateOneByteAbsSendTimeExtensionInRtpPacket) {
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithOneByteAbsSendTimeExtension,
+ kRtpMsgWithOneByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension));
+
+ EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(&rtp_packet[0], rtp_packet.size(),
+ 3, 51183266));
+
+ // Verify that the timestamp was updated.
+ const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInOneByteRtpMsg], kExpectedTimestamp,
+ sizeof(kExpectedTimestamp)));
+}
+
+// Verify finding an extension ID in a raw rtp message.
+TEST(RtpUtilsTest, UpdateTwoByteAbsSendTimeExtensionInRtpPacket) {
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithTwoByteAbsSendTimeExtension,
+ kRtpMsgWithTwoByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithTwoByteAbsSendTimeExtension));
EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(&rtp_packet[0], rtp_packet.size(),
3, 51183266));
// Verify that the timestamp was updated.
const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
- EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kExpectedTimestamp,
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInTwoByteRtpMsg], kExpectedTimestamp,
sizeof(kExpectedTimestamp)));
}
@@ -339,20 +368,22 @@ TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParamsAndAbsSendTime) {
packet_time_params.rtp_sendtime_extension_id = 3;
// 3 is also present in the test message.
- std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
- kRtpMsgWithAbsSendTimeExtension +
- sizeof(kRtpMsgWithAbsSendTimeExtension));
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithOneByteAbsSendTimeExtension,
+ kRtpMsgWithOneByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension));
rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
packet_time_params, 51183266));
const uint8_t kExpectedTag[] = {0x81, 0xd1, 0x2c, 0x0e};
- EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
- kExpectedTag, sizeof(kExpectedTag)));
+ EXPECT_EQ(0,
+ memcmp(&rtp_packet[sizeof(kRtpMsgWithOneByteAbsSendTimeExtension)],
+ kExpectedTag, sizeof(kExpectedTag)));
// Verify that the timestamp was updated.
const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
- EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kExpectedTimestamp,
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInOneByteRtpMsg], kExpectedTimestamp,
sizeof(kExpectedTimestamp)));
}
diff --git a/media/base/video_adapter.cc b/media/base/video_adapter.cc
index 27b82646ac..73e77cc37e 100644
--- a/media/base/video_adapter.cc
+++ b/media/base/video_adapter.cc
@@ -145,8 +145,6 @@ VideoAdapter::VideoAdapter() : VideoAdapter(1) {}
VideoAdapter::~VideoAdapter() {}
bool VideoAdapter::KeepFrame(int64_t in_timestamp_ns) {
- rtc::CritScope cs(&critical_section_);
-
int max_fps = max_framerate_request_;
if (max_fps_)
max_fps = std::min(max_fps, *max_fps_);
@@ -192,7 +190,7 @@ bool VideoAdapter::AdaptFrameResolution(int in_width,
int* cropped_height,
int* out_width,
int* out_height) {
- rtc::CritScope cs(&critical_section_);
+ webrtc::MutexLock lock(&mutex_);
++frames_in_;
// The max output pixel count is the minimum of the requests from
@@ -331,7 +329,7 @@ void VideoAdapter::OnOutputFormatRequest(
const absl::optional<std::pair<int, int>>& target_portrait_aspect_ratio,
const absl::optional<int>& max_portrait_pixel_count,
const absl::optional<int>& max_fps) {
- rtc::CritScope cs(&critical_section_);
+ webrtc::MutexLock lock(&mutex_);
target_landscape_aspect_ratio_ = target_landscape_aspect_ratio;
max_landscape_pixel_count_ = max_landscape_pixel_count;
target_portrait_aspect_ratio_ = target_portrait_aspect_ratio;
@@ -341,7 +339,7 @@ void VideoAdapter::OnOutputFormatRequest(
}
void VideoAdapter::OnSinkWants(const rtc::VideoSinkWants& sink_wants) {
- rtc::CritScope cs(&critical_section_);
+ webrtc::MutexLock lock(&mutex_);
resolution_request_max_pixel_count_ = sink_wants.max_pixel_count;
resolution_request_target_pixel_count_ =
sink_wants.target_pixel_count.value_or(
diff --git a/media/base/video_adapter.h b/media/base/video_adapter.h
index 936cf8917e..2c42632762 100644
--- a/media/base/video_adapter.h
+++ b/media/base/video_adapter.h
@@ -19,7 +19,7 @@
#include "api/video/video_source_interface.h"
#include "media/base/video_common.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace cricket {
@@ -46,7 +46,7 @@ class VideoAdapter {
int* cropped_width,
int* cropped_height,
int* out_width,
- int* out_height);
+ int* out_height) RTC_LOCKS_EXCLUDED(mutex_);
// DEPRECATED. Please use OnOutputFormatRequest below.
// TODO(asapersson): Remove this once it is no longer used.
@@ -57,7 +57,8 @@ class VideoAdapter {
// maintain the input orientation, so it doesn't matter if e.g. 1280x720 or
// 720x1280 is requested.
// Note: Should be called from the source only.
- void OnOutputFormatRequest(const absl::optional<VideoFormat>& format);
+ void OnOutputFormatRequest(const absl::optional<VideoFormat>& format)
+ RTC_LOCKS_EXCLUDED(mutex_);
// Requests output frame size and frame interval from |AdaptFrameResolution|.
// |target_aspect_ratio|: The input frame size will be cropped to match the
@@ -70,7 +71,7 @@ class VideoAdapter {
void OnOutputFormatRequest(
const absl::optional<std::pair<int, int>>& target_aspect_ratio,
const absl::optional<int>& max_pixel_count,
- const absl::optional<int>& max_fps);
+ const absl::optional<int>& max_fps) RTC_LOCKS_EXCLUDED(mutex_);
// Same as above, but allows setting two different target aspect ratios
// depending on incoming frame orientation. This gives more fine-grained
@@ -81,7 +82,7 @@ class VideoAdapter {
const absl::optional<int>& max_landscape_pixel_count,
const absl::optional<std::pair<int, int>>& target_portrait_aspect_ratio,
const absl::optional<int>& max_portrait_pixel_count,
- const absl::optional<int>& max_fps);
+ const absl::optional<int>& max_fps) RTC_LOCKS_EXCLUDED(mutex_);
// Requests the output frame size from |AdaptFrameResolution| to have as close
// as possible to |sink_wants.target_pixel_count| pixels (if set)
@@ -93,18 +94,21 @@ class VideoAdapter {
// The sink resolution alignment requirement is given by
// |sink_wants.resolution_alignment|.
// Note: Should be called from the sink only.
- void OnSinkWants(const rtc::VideoSinkWants& sink_wants);
+ void OnSinkWants(const rtc::VideoSinkWants& sink_wants)
+ RTC_LOCKS_EXCLUDED(mutex_);
private:
// Determine if frame should be dropped based on input fps and requested fps.
- bool KeepFrame(int64_t in_timestamp_ns);
-
- int frames_in_; // Number of input frames.
- int frames_out_; // Number of output frames.
- int frames_scaled_; // Number of frames scaled.
- int adaption_changes_; // Number of changes in scale factor.
- int previous_width_; // Previous adapter output width.
- int previous_height_; // Previous adapter output height.
+ bool KeepFrame(int64_t in_timestamp_ns) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ int frames_in_ RTC_GUARDED_BY(mutex_); // Number of input frames.
+ int frames_out_ RTC_GUARDED_BY(mutex_); // Number of output frames.
+ int frames_scaled_ RTC_GUARDED_BY(mutex_); // Number of frames scaled.
+ int adaption_changes_
+ RTC_GUARDED_BY(mutex_); // Number of changes in scale factor.
+ int previous_width_ RTC_GUARDED_BY(mutex_); // Previous adapter output width.
+ int previous_height_
+ RTC_GUARDED_BY(mutex_); // Previous adapter output height.
const bool variable_start_scale_factor_;
// The fixed source resolution alignment requirement.
@@ -112,30 +116,27 @@ class VideoAdapter {
// The currently applied resolution alignment, as given by the requirements:
// - the fixed |source_resolution_alignment_|; and
// - the latest |sink_wants.resolution_alignment|.
- int resolution_alignment_ RTC_GUARDED_BY(critical_section_);
+ int resolution_alignment_ RTC_GUARDED_BY(mutex_);
// The target timestamp for the next frame based on requested format.
- absl::optional<int64_t> next_frame_timestamp_ns_
- RTC_GUARDED_BY(critical_section_);
+ absl::optional<int64_t> next_frame_timestamp_ns_ RTC_GUARDED_BY(mutex_);
// Max number of pixels/fps requested via calls to OnOutputFormatRequest,
// OnResolutionFramerateRequest respectively.
// The adapted output format is the minimum of these.
absl::optional<std::pair<int, int>> target_landscape_aspect_ratio_
- RTC_GUARDED_BY(critical_section_);
- absl::optional<int> max_landscape_pixel_count_
- RTC_GUARDED_BY(critical_section_);
+ RTC_GUARDED_BY(mutex_);
+ absl::optional<int> max_landscape_pixel_count_ RTC_GUARDED_BY(mutex_);
absl::optional<std::pair<int, int>> target_portrait_aspect_ratio_
- RTC_GUARDED_BY(critical_section_);
- absl::optional<int> max_portrait_pixel_count_
- RTC_GUARDED_BY(critical_section_);
- absl::optional<int> max_fps_ RTC_GUARDED_BY(critical_section_);
- int resolution_request_target_pixel_count_ RTC_GUARDED_BY(critical_section_);
- int resolution_request_max_pixel_count_ RTC_GUARDED_BY(critical_section_);
- int max_framerate_request_ RTC_GUARDED_BY(critical_section_);
+ RTC_GUARDED_BY(mutex_);
+ absl::optional<int> max_portrait_pixel_count_ RTC_GUARDED_BY(mutex_);
+ absl::optional<int> max_fps_ RTC_GUARDED_BY(mutex_);
+ int resolution_request_target_pixel_count_ RTC_GUARDED_BY(mutex_);
+ int resolution_request_max_pixel_count_ RTC_GUARDED_BY(mutex_);
+ int max_framerate_request_ RTC_GUARDED_BY(mutex_);
// The critical section to protect the above variables.
- rtc::CriticalSection critical_section_;
+ webrtc::Mutex mutex_;
RTC_DISALLOW_COPY_AND_ASSIGN(VideoAdapter);
};
diff --git a/media/base/video_broadcaster.cc b/media/base/video_broadcaster.cc
index 700478d4e1..e6a91368fc 100644
--- a/media/base/video_broadcaster.cc
+++ b/media/base/video_broadcaster.cc
@@ -28,7 +28,7 @@ void VideoBroadcaster::AddOrUpdateSink(
VideoSinkInterface<webrtc::VideoFrame>* sink,
const VideoSinkWants& wants) {
RTC_DCHECK(sink != nullptr);
- rtc::CritScope cs(&sinks_and_wants_lock_);
+ webrtc::MutexLock lock(&sinks_and_wants_lock_);
if (!FindSinkPair(sink)) {
// |Sink| is a new sink, which didn't receive previous frame.
previous_frame_sent_to_all_sinks_ = false;
@@ -40,23 +40,23 @@ void VideoBroadcaster::AddOrUpdateSink(
void VideoBroadcaster::RemoveSink(
VideoSinkInterface<webrtc::VideoFrame>* sink) {
RTC_DCHECK(sink != nullptr);
- rtc::CritScope cs(&sinks_and_wants_lock_);
+ webrtc::MutexLock lock(&sinks_and_wants_lock_);
VideoSourceBase::RemoveSink(sink);
UpdateWants();
}
bool VideoBroadcaster::frame_wanted() const {
- rtc::CritScope cs(&sinks_and_wants_lock_);
+ webrtc::MutexLock lock(&sinks_and_wants_lock_);
return !sink_pairs().empty();
}
VideoSinkWants VideoBroadcaster::wants() const {
- rtc::CritScope cs(&sinks_and_wants_lock_);
+ webrtc::MutexLock lock(&sinks_and_wants_lock_);
return current_wants_;
}
void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) {
- rtc::CritScope cs(&sinks_and_wants_lock_);
+ webrtc::MutexLock lock(&sinks_and_wants_lock_);
bool current_frame_was_discarded = false;
for (auto& sink_pair : sink_pairs()) {
if (sink_pair.wants.rotation_applied &&
diff --git a/media/base/video_broadcaster.h b/media/base/video_broadcaster.h
index 898ef2ac9a..0703862c4f 100644
--- a/media/base/video_broadcaster.h
+++ b/media/base/video_broadcaster.h
@@ -15,7 +15,7 @@
#include "api/video/video_frame_buffer.h"
#include "api/video/video_source_interface.h"
#include "media/base/video_source_base.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
@@ -56,7 +56,7 @@ class VideoBroadcaster : public VideoSourceBase,
int width,
int height) RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
- rtc::CriticalSection sinks_and_wants_lock_;
+ mutable webrtc::Mutex sinks_and_wants_lock_;
VideoSinkWants current_wants_ RTC_GUARDED_BY(sinks_and_wants_lock_);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> black_frame_buffer_;
diff --git a/media/base/vp9_profile.cc b/media/base/vp9_profile.cc
index cfecc5e545..abf2502fc8 100644
--- a/media/base/vp9_profile.cc
+++ b/media/base/vp9_profile.cc
@@ -24,6 +24,8 @@ std::string VP9ProfileToString(VP9Profile profile) {
switch (profile) {
case VP9Profile::kProfile0:
return "0";
+ case VP9Profile::kProfile1:
+ return "1";
case VP9Profile::kProfile2:
return "2";
}
@@ -38,6 +40,8 @@ absl::optional<VP9Profile> StringToVP9Profile(const std::string& str) {
switch (i.value()) {
case 0:
return VP9Profile::kProfile0;
+ case 1:
+ return VP9Profile::kProfile1;
case 2:
return VP9Profile::kProfile2;
default:
diff --git a/media/base/vp9_profile.h b/media/base/vp9_profile.h
index e2bbf19005..e47204fede 100644
--- a/media/base/vp9_profile.h
+++ b/media/base/vp9_profile.h
@@ -24,6 +24,7 @@ extern RTC_EXPORT const char kVP9FmtpProfileId[];
enum class VP9Profile {
kProfile0,
+ kProfile1,
kProfile2,
};
diff --git a/media/engine/fake_webrtc_call.cc b/media/engine/fake_webrtc_call.cc
index 78d4ba41e0..cb62d9fc0a 100644
--- a/media/engine/fake_webrtc_call.cc
+++ b/media/engine/fake_webrtc_call.cc
@@ -279,6 +279,14 @@ void FakeVideoSendStream::Stop() {
sending_ = false;
}
+void FakeVideoSendStream::AddAdaptationResource(
+ rtc::scoped_refptr<webrtc::Resource> resource) {}
+
+std::vector<rtc::scoped_refptr<webrtc::Resource>>
+FakeVideoSendStream::GetAdaptationResources() {
+ return {};
+}
+
void FakeVideoSendStream::SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const webrtc::DegradationPreference& degradation_preference) {
@@ -570,6 +578,9 @@ void FakeCall::DestroyFlexfecReceiveStream(
}
}
+void FakeCall::AddAdaptationResource(
+ rtc::scoped_refptr<webrtc::Resource> resource) {}
+
webrtc::PacketReceiver* FakeCall::Receiver() {
return this;
}
diff --git a/media/engine/fake_webrtc_call.h b/media/engine/fake_webrtc_call.h
index 4404dec5df..97eb49c897 100644
--- a/media/engine/fake_webrtc_call.h
+++ b/media/engine/fake_webrtc_call.h
@@ -173,6 +173,10 @@ class FakeVideoSendStream final
const std::vector<bool> active_layers) override;
void Start() override;
void Stop() override;
+ void AddAdaptationResource(
+ rtc::scoped_refptr<webrtc::Resource> resource) override;
+ std::vector<rtc::scoped_refptr<webrtc::Resource>> GetAdaptationResources()
+ override;
void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const webrtc::DegradationPreference& degradation_preference) override;
@@ -341,6 +345,9 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver {
void DestroyFlexfecReceiveStream(
webrtc::FlexfecReceiveStream* receive_stream) override;
+ void AddAdaptationResource(
+ rtc::scoped_refptr<webrtc::Resource> resource) override;
+
webrtc::PacketReceiver* Receiver() override;
DeliveryStatus DeliverPacket(webrtc::MediaType media_type,
diff --git a/media/engine/fake_webrtc_video_engine.cc b/media/engine/fake_webrtc_video_engine.cc
index 91f7e53956..d7675228cf 100644
--- a/media/engine/fake_webrtc_video_engine.cc
+++ b/media/engine/fake_webrtc_video_engine.cc
@@ -148,7 +148,7 @@ void FakeWebRtcVideoEncoder::SetFecControllerOverride(
int32_t FakeWebRtcVideoEncoder::InitEncode(
const webrtc::VideoCodec* codecSettings,
const VideoEncoder::Settings& settings) {
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
codec_settings_ = *codecSettings;
init_encode_event_.Set();
return WEBRTC_VIDEO_CODEC_OK;
@@ -157,7 +157,7 @@ int32_t FakeWebRtcVideoEncoder::InitEncode(
int32_t FakeWebRtcVideoEncoder::Encode(
const webrtc::VideoFrame& inputImage,
const std::vector<webrtc::VideoFrameType>* frame_types) {
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
++num_frames_encoded_;
init_encode_event_.Set();
return WEBRTC_VIDEO_CODEC_OK;
@@ -188,12 +188,12 @@ bool FakeWebRtcVideoEncoder::WaitForInitEncode() {
}
webrtc::VideoCodec FakeWebRtcVideoEncoder::GetCodecSettings() {
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return codec_settings_;
}
int FakeWebRtcVideoEncoder::GetNumEncodedFrames() {
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return num_frames_encoded_;
}
@@ -219,7 +219,7 @@ FakeWebRtcVideoEncoderFactory::GetSupportedFormats() const {
std::unique_ptr<webrtc::VideoEncoder>
FakeWebRtcVideoEncoderFactory::CreateVideoEncoder(
const webrtc::SdpVideoFormat& format) {
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
std::unique_ptr<webrtc::VideoEncoder> encoder;
if (IsFormatSupported(formats_, format)) {
if (absl::EqualsIgnoreCase(format.name, kVp8CodecName) &&
@@ -262,7 +262,7 @@ bool FakeWebRtcVideoEncoderFactory::WaitForCreatedVideoEncoders(
void FakeWebRtcVideoEncoderFactory::EncoderDestroyed(
FakeWebRtcVideoEncoder* encoder) {
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
encoders_.erase(std::remove(encoders_.begin(), encoders_.end(), encoder),
encoders_.end());
}
@@ -286,13 +286,13 @@ void FakeWebRtcVideoEncoderFactory::AddSupportedVideoCodecType(
}
int FakeWebRtcVideoEncoderFactory::GetNumCreatedEncoders() {
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return num_created_encoders_;
}
const std::vector<FakeWebRtcVideoEncoder*>
FakeWebRtcVideoEncoderFactory::encoders() {
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return encoders_;
}
diff --git a/media/engine/fake_webrtc_video_engine.h b/media/engine/fake_webrtc_video_engine.h
index 28dc4fe99b..9adb5a41ef 100644
--- a/media/engine/fake_webrtc_video_engine.h
+++ b/media/engine/fake_webrtc_video_engine.h
@@ -29,8 +29,8 @@
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "modules/video_coding/include/video_codec_interface.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace cricket {
@@ -101,10 +101,10 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
int GetNumEncodedFrames();
private:
- rtc::CriticalSection crit_;
+ webrtc::Mutex mutex_;
rtc::Event init_encode_event_;
- int num_frames_encoded_ RTC_GUARDED_BY(crit_);
- webrtc::VideoCodec codec_settings_ RTC_GUARDED_BY(crit_);
+ int num_frames_encoded_ RTC_GUARDED_BY(mutex_);
+ webrtc::VideoCodec codec_settings_ RTC_GUARDED_BY(mutex_);
FakeWebRtcVideoEncoderFactory* factory_;
};
@@ -128,11 +128,11 @@ class FakeWebRtcVideoEncoderFactory : public webrtc::VideoEncoderFactory {
const std::vector<FakeWebRtcVideoEncoder*> encoders();
private:
- rtc::CriticalSection crit_;
+ webrtc::Mutex mutex_;
rtc::Event created_video_encoder_event_;
std::vector<webrtc::SdpVideoFormat> formats_;
- std::vector<FakeWebRtcVideoEncoder*> encoders_ RTC_GUARDED_BY(crit_);
- int num_created_encoders_ RTC_GUARDED_BY(crit_);
+ std::vector<FakeWebRtcVideoEncoder*> encoders_ RTC_GUARDED_BY(mutex_);
+ int num_created_encoders_ RTC_GUARDED_BY(mutex_);
bool encoders_have_internal_sources_;
bool vp8_factory_mode_;
};
diff --git a/media/engine/internal_decoder_factory.cc b/media/engine/internal_decoder_factory.cc
index e68bb369b5..d512b731af 100644
--- a/media/engine/internal_decoder_factory.cc
+++ b/media/engine/internal_decoder_factory.cc
@@ -44,7 +44,7 @@ std::vector<SdpVideoFormat> InternalDecoderFactory::GetSupportedFormats()
const {
std::vector<SdpVideoFormat> formats;
formats.push_back(SdpVideoFormat(cricket::kVp8CodecName));
- for (const SdpVideoFormat& format : SupportedVP9Codecs())
+ for (const SdpVideoFormat& format : SupportedVP9DecoderCodecs())
formats.push_back(format);
for (const SdpVideoFormat& h264_format : SupportedH264Codecs())
formats.push_back(h264_format);
diff --git a/media/engine/internal_decoder_factory_unittest.cc b/media/engine/internal_decoder_factory_unittest.cc
index 705933d439..61be5e72df 100644
--- a/media/engine/internal_decoder_factory_unittest.cc
+++ b/media/engine/internal_decoder_factory_unittest.cc
@@ -13,6 +13,7 @@
#include "api/video_codecs/sdp_video_format.h"
#include "api/video_codecs/video_decoder.h"
#include "media/base/media_constants.h"
+#include "media/base/vp9_profile.h"
#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -30,6 +31,26 @@ TEST(InternalDecoderFactory, TestVP8) {
EXPECT_TRUE(decoder);
}
+#ifdef RTC_ENABLE_VP9
+TEST(InternalDecoderFactory, TestVP9Profile0) {
+ InternalDecoderFactory factory;
+ std::unique_ptr<VideoDecoder> decoder =
+ factory.CreateVideoDecoder(SdpVideoFormat(
+ cricket::kVp9CodecName,
+ {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}}));
+ EXPECT_TRUE(decoder);
+}
+
+TEST(InternalDecoderFactory, TestVP9Profile1) {
+ InternalDecoderFactory factory;
+ std::unique_ptr<VideoDecoder> decoder =
+ factory.CreateVideoDecoder(SdpVideoFormat(
+ cricket::kVp9CodecName,
+ {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}}));
+ EXPECT_TRUE(decoder);
+}
+#endif // RTC_ENABLE_VP9
+
TEST(InternalDecoderFactory, Av1) {
InternalDecoderFactory factory;
if (kIsLibaomAv1DecoderSupported) {
diff --git a/media/engine/payload_type_mapper.cc b/media/engine/payload_type_mapper.cc
index fcacd44883..e9f863ca63 100644
--- a/media/engine/payload_type_mapper.cc
+++ b/media/engine/payload_type_mapper.cc
@@ -67,7 +67,6 @@ PayloadTypeMapper::PayloadTypeMapper()
{{kIsacCodecName, 32000, 1}, 104},
{{kCnCodecName, 16000, 1}, 105},
{{kCnCodecName, 32000, 1}, 106},
- {{kGoogleSctpDataCodecName, 0, 0}, kGoogleSctpDataCodecPlType},
{{kOpusCodecName,
48000,
2,
diff --git a/media/engine/payload_type_mapper_unittest.cc b/media/engine/payload_type_mapper_unittest.cc
index c8b2234c25..fa6864b48a 100644
--- a/media/engine/payload_type_mapper_unittest.cc
+++ b/media/engine/payload_type_mapper_unittest.cc
@@ -52,7 +52,6 @@ TEST_F(PayloadTypeMapperTest, WebRTCPayloadTypes) {
return mapper_.FindMappingFor({name, 0, 0});
};
EXPECT_EQ(kGoogleRtpDataCodecPlType, data_mapping(kGoogleRtpDataCodecName));
- EXPECT_EQ(kGoogleSctpDataCodecPlType, data_mapping(kGoogleSctpDataCodecName));
EXPECT_EQ(102, mapper_.FindMappingFor({kIlbcCodecName, 8000, 1}));
EXPECT_EQ(103, mapper_.FindMappingFor({kIsacCodecName, 16000, 1}));
diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc
index b467c49166..9a23ca437a 100644
--- a/media/engine/simulcast_encoder_adapter_unittest.cc
+++ b/media/engine/simulcast_encoder_adapter_unittest.cc
@@ -199,23 +199,22 @@ class MockVideoEncoder : public VideoEncoder {
video_format_("unknown"),
callback_(nullptr) {}
- MOCK_METHOD1(SetFecControllerOverride,
- void(FecControllerOverride* fec_controller_override));
+ MOCK_METHOD(void,
+ SetFecControllerOverride,
+ (FecControllerOverride * fec_controller_override),
+ (override));
- // TODO(nisse): Valid overrides commented out, because the gmock
- // methods don't use any override declarations, and we want to avoid
- // warnings from -Winconsistent-missing-override. See
- // http://crbug.com/428099.
int32_t InitEncode(const VideoCodec* codecSettings,
const VideoEncoder::Settings& settings) override {
codec_ = *codecSettings;
return init_encode_return_value_;
}
- MOCK_METHOD2(
- Encode,
- int32_t(const VideoFrame& inputImage,
- const std::vector<VideoFrameType>* frame_types) /* override */);
+ MOCK_METHOD(int32_t,
+ Encode,
+ (const VideoFrame& inputImage,
+ const std::vector<VideoFrameType>* frame_types),
+ (override));
int32_t RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) override {
@@ -223,7 +222,7 @@ class MockVideoEncoder : public VideoEncoder {
return 0;
}
- MOCK_METHOD0(Release, int32_t() /* override */);
+ MOCK_METHOD(int32_t, Release, (), (override));
void SetRates(const RateControlParameters& parameters) {
last_set_rates_ = parameters;
@@ -334,8 +333,7 @@ std::vector<SdpVideoFormat> MockVideoEncoderFactory::GetSupportedFormats()
std::unique_ptr<VideoEncoder> MockVideoEncoderFactory::CreateVideoEncoder(
const SdpVideoFormat& format) {
- std::unique_ptr<MockVideoEncoder> encoder(
- new ::testing::NiceMock<MockVideoEncoder>(this));
+ auto encoder = std::make_unique<::testing::NiceMock<MockVideoEncoder>>(this);
encoder->set_init_encode_return_value(init_encode_return_value_);
const char* encoder_name = encoder_names_.empty()
? "codec_implementation_name"
diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc
index 71a0939cb9..26fa335cf6 100644
--- a/media/engine/webrtc_video_engine.cc
+++ b/media/engine/webrtc_video_engine.cc
@@ -20,7 +20,6 @@
#include "absl/algorithm/container.h"
#include "absl/strings/match.h"
#include "api/media_stream_interface.h"
-#include "api/transport/datagram_transport_interface.h"
#include "api/units/data_rate.h"
#include "api/video/video_codec_constants.h"
#include "api/video/video_codec_type.h"
@@ -246,7 +245,7 @@ static bool ValidateStreamParams(const StreamParams& sp) {
}
// Returns true if the given codec is disallowed from doing simulcast.
-bool IsCodecBlacklistedForSimulcast(const std::string& codec_name) {
+bool IsCodecDisabledForSimulcast(const std::string& codec_name) {
return !webrtc::field_trial::IsDisabled("WebRTC-H264Simulcast")
? absl::EqualsIgnoreCase(codec_name, kVp9CodecName)
: absl::EqualsIgnoreCase(codec_name, kH264CodecName) ||
@@ -609,7 +608,6 @@ WebRtcVideoEngine::GetRtpHeaderExtensions() const {
webrtc::RtpExtension::kPlayoutDelayUri,
webrtc::RtpExtension::kVideoContentTypeUri,
webrtc::RtpExtension::kVideoTimingUri,
- webrtc::RtpExtension::kFrameMarkingUri,
webrtc::RtpExtension::kColorSpaceUri, webrtc::RtpExtension::kMidUri,
webrtc::RtpExtension::kRidUri, webrtc::RtpExtension::kRepairedRidUri}) {
result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kSendRecv);
@@ -1299,13 +1297,6 @@ bool WebRtcVideoChannel::AddSendStream(const StreamParams& sp) {
config.rtp.extmap_allow_mixed = ExtmapAllowMixed();
config.rtcp_report_interval_ms = video_config_.rtcp_report_interval_ms;
- // If sending through Datagram Transport, limit packet size to maximum
- // packet size supported by datagram_transport.
- if (media_transport_config().rtp_max_packet_size) {
- config.rtp.max_packet_size =
- media_transport_config().rtp_max_packet_size.value();
- }
-
WebRtcVideoSendStream* stream = new WebRtcVideoSendStream(
call_, sp, std::move(config), default_send_options_,
video_config_.enable_cpu_adaptation, bitrate_config_.max_bitrate_bps,
@@ -1564,7 +1555,6 @@ bool WebRtcVideoChannel::GetStats(VideoMediaInfo* info) {
FillSendAndReceiveCodecStats(info);
// TODO(holmer): We should either have rtt available as a metric on
// VideoSend/ReceiveStreams, or we should remove rtt from VideoSenderInfo.
- // TODO(nisse): Arrange to get correct RTT also when using MediaTransport.
webrtc::Call::Stats stats = call_->GetStats();
if (stats.rtt_ms != -1) {
for (size_t i = 0; i < info->senders.size(); ++i) {
@@ -1587,6 +1577,8 @@ void WebRtcVideoChannel::FillSenderStats(VideoMediaInfo* video_media_info,
send_streams_.begin();
it != send_streams_.end(); ++it) {
auto infos = it->second->GetPerLayerVideoSenderInfos(log_stats);
+ if (infos.empty())
+ continue;
video_media_info->aggregated_senders.push_back(
it->second->GetAggregatedVideoSenderInfo(infos));
for (auto&& info : infos) {
@@ -1757,11 +1749,9 @@ void WebRtcVideoChannel::OnNetworkRouteChanged(
network_route.packet_overhead);
}
-void WebRtcVideoChannel::SetInterface(
- NetworkInterface* iface,
- const webrtc::MediaTransportConfig& media_transport_config) {
+void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) {
RTC_DCHECK_RUN_ON(&thread_checker_);
- MediaChannel::SetInterface(iface, media_transport_config);
+ MediaChannel::SetInterface(iface);
// Set the RTP recv/send buffer to a bigger size.
// The group should be a positive integer with an explicit size, in
@@ -2330,11 +2320,11 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig(
}
// By default, the stream count for the codec configuration should match the
- // number of negotiated ssrcs. But if the codec is blacklisted for simulcast
+ // number of negotiated ssrcs. But if the codec is disabled for simulcast
// or a screencast (and not in simulcast screenshare experiment), only
// configure a single stream.
encoder_config.number_of_streams = parameters_.config.rtp.ssrcs.size();
- if (IsCodecBlacklistedForSimulcast(codec.name)) {
+ if (IsCodecDisabledForSimulcast(codec.name)) {
encoder_config.number_of_streams = 1;
}
@@ -2594,7 +2584,7 @@ VideoSenderInfo
WebRtcVideoChannel::WebRtcVideoSendStream::GetAggregatedVideoSenderInfo(
const std::vector<VideoSenderInfo>& infos) const {
RTC_DCHECK_RUN_ON(&thread_checker_);
- RTC_DCHECK(!infos.empty());
+ RTC_CHECK(!infos.empty());
if (infos.size() == 1) {
return infos[0];
}
@@ -2962,7 +2952,7 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::
void WebRtcVideoChannel::WebRtcVideoReceiveStream::OnFrame(
const webrtc::VideoFrame& frame) {
- rtc::CritScope crit(&sink_lock_);
+ webrtc::MutexLock lock(&sink_lock_);
int64_t time_now_ms = rtc::TimeMillis();
if (first_frame_timestamp_ < 0)
@@ -3007,7 +2997,7 @@ int WebRtcVideoChannel::WebRtcVideoReceiveStream::GetBaseMinimumPlayoutDelayMs()
void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
- rtc::CritScope crit(&sink_lock_);
+ webrtc::MutexLock lock(&sink_lock_);
sink_ = sink;
}
@@ -3047,7 +3037,7 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo(
info.frame_height = stats.height;
{
- rtc::CritScope frame_cs(&sink_lock_);
+ webrtc::MutexLock frame_cs(&sink_lock_);
info.capture_start_ntp_time_ms = estimated_remote_start_ntp_time_ms_;
}
diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h
index 00d249541a..21033ffae7 100644
--- a/media/engine/webrtc_video_engine.h
+++ b/media/engine/webrtc_video_engine.h
@@ -32,8 +32,8 @@
#include "media/engine/constants.h"
#include "media/engine/unhandled_packets_buffer.h"
#include "rtc_base/async_invoker.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/network_route.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
@@ -169,9 +169,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
void OnReadyToSend(bool ready) override;
void OnNetworkRouteChanged(const std::string& transport_name,
const rtc::NetworkRoute& network_route) override;
- void SetInterface(
- NetworkInterface* iface,
- const webrtc::MediaTransportConfig& media_transport_config) override;
+ void SetInterface(NetworkInterface* iface) override;
// E2E Encrypted Video Frame API
// Set a frame decryptor to a particular ssrc that will intercept all
@@ -518,7 +516,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
webrtc::VideoDecoderFactory* const decoder_factory_;
- rtc::CriticalSection sink_lock_;
+ webrtc::Mutex sink_lock_;
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink_
RTC_GUARDED_BY(sink_lock_);
// Expands remote RTP timestamps to int64_t to be able to estimate how long
diff --git a/media/engine/webrtc_video_engine_unittest.cc b/media/engine/webrtc_video_engine_unittest.cc
index ce36073449..eae83938d4 100644
--- a/media/engine/webrtc_video_engine_unittest.cc
+++ b/media/engine/webrtc_video_engine_unittest.cc
@@ -28,7 +28,6 @@
#include "api/test/mock_video_encoder_factory.h"
#include "api/test/video/function_video_decoder_factory.h"
#include "api/transport/field_trial_based_config.h"
-#include "api/transport/media/media_transport_config.h"
#include "api/units/time_delta.h"
#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "api/video/i420_buffer.h"
@@ -209,11 +208,15 @@ int GetMaxDefaultBitrateBps(size_t width, size_t height) {
class MockVideoSource : public rtc::VideoSourceInterface<webrtc::VideoFrame> {
public:
- MOCK_METHOD2(AddOrUpdateSink,
- void(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
- const rtc::VideoSinkWants& wants));
- MOCK_METHOD1(RemoveSink,
- void(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink));
+ MOCK_METHOD(void,
+ AddOrUpdateSink,
+ (rtc::VideoSinkInterface<webrtc::VideoFrame> * sink,
+ const rtc::VideoSinkWants& wants),
+ (override));
+ MOCK_METHOD(void,
+ RemoveSink,
+ (rtc::VideoSinkInterface<webrtc::VideoFrame> * sink),
+ (override));
};
} // namespace
@@ -343,10 +346,6 @@ TEST_F(WebRtcVideoEngineTest, SupportsVideoTimingHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kVideoTimingUri, true);
}
-TEST_F(WebRtcVideoEngineTest, SupportsFrameMarkingHeaderExtension) {
- ExpectRtpCapabilitySupport(RtpExtension::kFrameMarkingUri, true);
-}
-
TEST_F(WebRtcVideoEngineTest, SupportsColorSpaceHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kColorSpaceUri, true);
}
@@ -1335,7 +1334,7 @@ class WebRtcVideoChannelEncodedFrameCallbackTest : public ::testing::Test {
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get())))) {
network_interface_.SetDestination(channel_.get());
- channel_->SetInterface(&network_interface_, webrtc::MediaTransportConfig());
+ channel_->SetInterface(&network_interface_);
cricket::VideoRecvParameters parameters;
parameters.codecs = engine_.recv_codecs();
channel_->SetRecvParameters(parameters);
@@ -1481,7 +1480,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
channel_->OnReadyToSend(true);
EXPECT_TRUE(channel_.get() != NULL);
network_interface_.SetDestination(channel_.get());
- channel_->SetInterface(&network_interface_, webrtc::MediaTransportConfig());
+ channel_->SetInterface(&network_interface_);
cricket::VideoRecvParameters parameters;
parameters.codecs = engine_.send_codecs();
channel_->SetRecvParameters(parameters);
@@ -5064,8 +5063,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
static_cast<cricket::WebRtcVideoChannel*>(engine_.CreateMediaChannel(
call_.get(), config, VideoOptions(), webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get())));
- channel->SetInterface(network_interface.get(),
- webrtc::MediaTransportConfig());
+ channel->SetInterface(network_interface.get());
// Default value when DSCP is disabled should be DSCP_DEFAULT.
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
@@ -5076,8 +5074,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
static_cast<cricket::WebRtcVideoChannel*>(engine_.CreateMediaChannel(
call_.get(), config, VideoOptions(), webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get())));
- channel->SetInterface(network_interface.get(),
- webrtc::MediaTransportConfig());
+ channel->SetInterface(network_interface.get());
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
// Create a send stream to configure
@@ -5106,8 +5103,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
static_cast<cricket::WebRtcVideoChannel*>(engine_.CreateMediaChannel(
call_.get(), config, VideoOptions(), webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get())));
- channel->SetInterface(network_interface.get(),
- webrtc::MediaTransportConfig());
+ channel->SetInterface(network_interface.get());
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
}
@@ -5595,6 +5591,27 @@ TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) {
EXPECT_EQ(sender.rid, absl::nullopt);
}
+TEST_F(WebRtcVideoChannelTest, MediaSubstreamMissingProducesEmpyStats) {
+ FakeVideoSendStream* stream = AddSendStream();
+
+ const uint32_t kRtxSsrc = 123u;
+ const uint32_t kMissingMediaSsrc = 124u;
+
+ // Set up a scenarios where we have a substream that is not kMedia (in this
+ // case: kRtx) but its associated kMedia stream does not exist yet. This
+ // results in zero GetPerLayerVideoSenderInfos despite non-empty substreams.
+ // Covers https://crbug.com/1090712.
+ auto stats = GetInitialisedStats();
+ auto& substream = stats.substreams[kRtxSsrc];
+ substream.type = webrtc::VideoSendStream::StreamStats::StreamType::kRtx;
+ substream.referenced_media_ssrc = kMissingMediaSsrc;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo video_media_info;
+ ASSERT_TRUE(channel_->GetStats(&video_media_info));
+ EXPECT_TRUE(video_media_info.senders.empty());
+}
+
TEST_F(WebRtcVideoChannelTest, GetStatsReportsUpperResolution) {
FakeVideoSendStream* stream = AddSendStream();
webrtc::VideoSendStream::Stats stats;
diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc
index 85c72804c1..38dc3462ac 100644
--- a/media/engine/webrtc_voice_engine.cc
+++ b/media/engine/webrtc_voice_engine.cc
@@ -36,7 +36,9 @@
#include "rtc_base/constructor_magic.h"
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/experiments/field_trial_units.h"
+#include "rtc_base/experiments/struct_parameters_parser.h"
#include "rtc_base/helpers.h"
+#include "rtc_base/ignore_wundef.h"
#include "rtc_base/logging.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/strings/audio_format_to_string.h"
@@ -46,6 +48,16 @@
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
+#if WEBRTC_ENABLE_PROTOBUF
+RTC_PUSH_IGNORING_WUNDEF()
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h"
+#else
+#include "modules/audio_coding/audio_network_adaptor/config.pb.h"
+#endif
+RTC_POP_IGNORING_WUNDEF()
+#endif
+
namespace cricket {
namespace {
@@ -99,6 +111,12 @@ std::string ToString(const AudioCodec& codec) {
return ss.Release();
}
+// If this field trial is enabled, we will negotiate and use RFC 2198
+// redundancy for opus audio.
+bool IsAudioRedForOpusFieldTrialEnabled() {
+ return webrtc::field_trial::IsEnabled("WebRTC-Audio-Red-For-Opus");
+}
+
bool IsCodec(const AudioCodec& codec, const char* ref_name) {
return absl::EqualsIgnoreCase(codec.name, ref_name);
}
@@ -185,6 +203,38 @@ absl::optional<int> ComputeSendBitrate(int max_send_bitrate_bps,
}
}
+struct AdaptivePtimeConfig {
+ bool enabled = false;
+ webrtc::DataRate min_payload_bitrate = webrtc::DataRate::KilobitsPerSec(16);
+ webrtc::DataRate min_encoder_bitrate = webrtc::DataRate::KilobitsPerSec(12);
+ bool use_slow_adaptation = true;
+
+ absl::optional<std::string> audio_network_adaptor_config;
+
+ std::unique_ptr<webrtc::StructParametersParser> Parser() {
+ return webrtc::StructParametersParser::Create( //
+ "enabled", &enabled, //
+ "min_payload_bitrate", &min_payload_bitrate, //
+ "min_encoder_bitrate", &min_encoder_bitrate, //
+ "use_slow_adaptation", &use_slow_adaptation);
+ }
+
+ AdaptivePtimeConfig() {
+ Parser()->Parse(
+ webrtc::field_trial::FindFullName("WebRTC-Audio-AdaptivePtime"));
+#if WEBRTC_ENABLE_PROTOBUF
+ webrtc::audio_network_adaptor::config::ControllerManager config;
+ auto* frame_length_controller =
+ config.add_controllers()->mutable_frame_length_controller_v2();
+ frame_length_controller->set_min_payload_bitrate_bps(
+ min_payload_bitrate.bps());
+ frame_length_controller->set_use_slow_adaptation(use_slow_adaptation);
+ config.add_controllers()->mutable_bitrate_controller();
+ audio_network_adaptor_config = config.SerializeAsString();
+#endif
+ }
+};
+
} // namespace
WebRtcVoiceEngine::WebRtcVoiceEngine(
@@ -682,6 +732,11 @@ std::vector<AudioCodec> WebRtcVoiceEngine::CollectCodecs(
}
}
+ // Add red codec.
+ if (IsAudioRedForOpusFieldTrialEnabled()) {
+ map_format({kRedCodecName, 48000, 2}, &out);
+ }
+
// Add telephone-event codecs last.
for (const auto& dtmf : generate_dtmf) {
if (dtmf.second) {
@@ -726,7 +781,6 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
config_.rtp.extensions = extensions;
config_.has_dscp =
rtp_parameters_.encodings[0].network_priority != webrtc::Priority::kLow;
- config_.audio_network_adaptor_config = audio_network_adaptor_config;
config_.encoder_factory = encoder_factory;
config_.codec_pair_id = codec_pair_id;
config_.track_id = track_id;
@@ -737,6 +791,9 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
rtp_parameters_.rtcp.cname = c_name;
rtp_parameters_.header_extensions = extensions;
+ audio_network_adaptor_config_from_options_ = audio_network_adaptor_config;
+ UpdateAudioNetworkAdaptorConfig();
+
if (send_codec_spec) {
UpdateSendCodecSpec(*send_codec_spec);
}
@@ -787,10 +844,12 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
void SetAudioNetworkAdaptorConfig(
const absl::optional<std::string>& audio_network_adaptor_config) {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
- if (config_.audio_network_adaptor_config == audio_network_adaptor_config) {
+ if (audio_network_adaptor_config_from_options_ ==
+ audio_network_adaptor_config) {
return;
}
- config_.audio_network_adaptor_config = audio_network_adaptor_config;
+ audio_network_adaptor_config_from_options_ = audio_network_adaptor_config;
+ UpdateAudioNetworkAdaptorConfig();
UpdateAllowedBitrateRange();
ReconfigureAudioSendStream();
}
@@ -937,6 +996,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
rtp_parameters_.encodings[0].max_bitrate_bps;
double old_priority = rtp_parameters_.encodings[0].bitrate_priority;
webrtc::Priority old_dscp = rtp_parameters_.encodings[0].network_priority;
+ bool old_adaptive_ptime = rtp_parameters_.encodings[0].adaptive_ptime;
rtp_parameters_ = parameters;
config_.bitrate_priority = rtp_parameters_.encodings[0].bitrate_priority;
config_.has_dscp = (rtp_parameters_.encodings[0].network_priority !=
@@ -945,15 +1005,19 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
bool reconfigure_send_stream =
(rtp_parameters_.encodings[0].max_bitrate_bps != old_rtp_max_bitrate) ||
(rtp_parameters_.encodings[0].bitrate_priority != old_priority) ||
- (rtp_parameters_.encodings[0].network_priority != old_dscp);
+ (rtp_parameters_.encodings[0].network_priority != old_dscp) ||
+ (rtp_parameters_.encodings[0].adaptive_ptime != old_adaptive_ptime);
if (rtp_parameters_.encodings[0].max_bitrate_bps != old_rtp_max_bitrate) {
// Update the bitrate range.
if (send_rate) {
config_.send_codec_spec->target_bitrate_bps = send_rate;
}
- UpdateAllowedBitrateRange();
}
if (reconfigure_send_stream) {
+ // Changing adaptive_ptime may update the audio network adaptor config
+ // used.
+ UpdateAudioNetworkAdaptorConfig();
+ UpdateAllowedBitrateRange();
ReconfigureAudioSendStream();
}
@@ -989,6 +1053,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
// The order of precedence, from lowest to highest is:
// - a reasonable default of 32kbps min/max
// - fixed target bitrate from codec spec
+ // - lower min bitrate if adaptive ptime is enabled
// - bitrate configured in the rtp_parameter encodings settings
const int kDefaultBitrateBps = 32000;
config_.min_bitrate_bps = kDefaultBitrateBps;
@@ -1000,6 +1065,12 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
config_.max_bitrate_bps = *config_.send_codec_spec->target_bitrate_bps;
}
+ if (rtp_parameters_.encodings[0].adaptive_ptime) {
+ config_.min_bitrate_bps = std::min(
+ config_.min_bitrate_bps,
+ static_cast<int>(adaptive_ptime_config_.min_encoder_bitrate.bps()));
+ }
+
if (rtp_parameters_.encodings[0].min_bitrate_bps) {
config_.min_bitrate_bps = *rtp_parameters_.encodings[0].min_bitrate_bps;
}
@@ -1033,12 +1104,24 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
UpdateAllowedBitrateRange();
}
+ void UpdateAudioNetworkAdaptorConfig() {
+ if (adaptive_ptime_config_.enabled ||
+ rtp_parameters_.encodings[0].adaptive_ptime) {
+ config_.audio_network_adaptor_config =
+ adaptive_ptime_config_.audio_network_adaptor_config;
+ return;
+ }
+ config_.audio_network_adaptor_config =
+ audio_network_adaptor_config_from_options_;
+ }
+
void ReconfigureAudioSendStream() {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
RTC_DCHECK(stream_);
stream_->Reconfigure(config_);
}
+ const AdaptivePtimeConfig adaptive_ptime_config_;
rtc::ThreadChecker worker_thread_checker_;
rtc::RaceChecker audio_capture_race_checker_;
webrtc::Call* call_ = nullptr;
@@ -1056,6 +1139,9 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
int max_send_bitrate_bps_;
webrtc::RtpParameters rtp_parameters_;
absl::optional<webrtc::AudioCodecSpec> audio_codec_spec_;
+ // TODO(webrtc:11717): Remove this once audio_network_adaptor in AudioOptions
+ // has been removed.
+ absl::optional<std::string> audio_network_adaptor_config_from_options_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioSendStream);
};
@@ -1541,7 +1627,9 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs(
<< old_codec.id << ")";
}
auto format = AudioCodecToSdpAudioFormat(codec);
- if (!IsCodec(codec, "cn") && !IsCodec(codec, "telephone-event") &&
+ if (!IsCodec(codec, kCnCodecName) && !IsCodec(codec, kDtmfCodecName) &&
+ (!IsAudioRedForOpusFieldTrialEnabled() ||
+ !IsCodec(codec, kRedCodecName)) &&
!engine()->decoder_factory_->IsSupportedDecoder(format)) {
RTC_LOG(LS_ERROR) << "Unsupported codec: " << rtc::ToString(format);
return false;
@@ -1692,6 +1780,19 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
}
}
+ if (IsAudioRedForOpusFieldTrialEnabled()) {
+ // Loop through the codecs to find the RED codec that matches opus
+ // with respect to clockrate and number of channels.
+ for (const AudioCodec& red_codec : codecs) {
+ if (IsCodec(red_codec, kRedCodecName) &&
+ red_codec.clockrate == send_codec_spec->format.clockrate_hz &&
+ red_codec.channels == send_codec_spec->format.num_channels) {
+ send_codec_spec->red_payload_type = red_codec.id;
+ break;
+ }
+ }
+ }
+
if (send_codec_spec_ != send_codec_spec) {
send_codec_spec_ = std::move(send_codec_spec);
// Apply new settings to all streams.
diff --git a/media/engine/webrtc_voice_engine_unittest.cc b/media/engine/webrtc_voice_engine_unittest.cc
index e7ebf8940f..d70019e9f3 100644
--- a/media/engine/webrtc_voice_engine_unittest.cc
+++ b/media/engine/webrtc_voice_engine_unittest.cc
@@ -59,6 +59,7 @@ const cricket::AudioCodec kG722CodecVoE(9, "G722", 16000, 64000, 1);
const cricket::AudioCodec kG722CodecSdp(9, "G722", 8000, 64000, 1);
const cricket::AudioCodec kCn8000Codec(13, "CN", 8000, 0, 1);
const cricket::AudioCodec kCn16000Codec(105, "CN", 16000, 0, 1);
+const cricket::AudioCodec kRed48000Codec(112, "RED", 48000, 32000, 2);
const cricket::AudioCodec kTelephoneEventCodec1(106,
"telephone-event",
8000,
@@ -1031,6 +1032,30 @@ TEST_P(WebRtcVoiceEngineTestFake, ChangeRecvCodecPayloadType) {
EXPECT_TRUE(channel_->SetRecvParameters(parameters));
}
+// Test that we set Opus/Red under the field trial.
+TEST_P(WebRtcVoiceEngineTestFake, RecvRed) {
+ webrtc::test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-Red-For-Opus/Enabled/");
+
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs.push_back(kRed48000Codec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map,
+ (ContainerEq<std::map<int, webrtc::SdpAudioFormat>>(
+ {{111, {"opus", 48000, 2}}, {112, {"red", 48000, 2}}})));
+}
+
+// Test that we do not allow setting Opus/Red by default.
+TEST_P(WebRtcVoiceEngineTestFake, RecvRedDefault) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs.push_back(kRed48000Codec);
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters));
+}
+
TEST_P(WebRtcVoiceEngineTestFake, SetSendBandwidthAuto) {
EXPECT_TRUE(SetupSendStream());
@@ -1194,6 +1219,46 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersEncodingsActive) {
EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
}
+TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersAdaptivePtime) {
+ EXPECT_TRUE(SetupSendStream());
+ // Get current parameters and change "adaptive_ptime" to true.
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX);
+ ASSERT_EQ(1u, parameters.encodings.size());
+ ASSERT_FALSE(parameters.encodings[0].adaptive_ptime);
+ parameters.encodings[0].adaptive_ptime = true;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok());
+ EXPECT_TRUE(GetAudioNetworkAdaptorConfig(kSsrcX));
+ EXPECT_EQ(12000, GetSendStreamConfig(kSsrcX).min_bitrate_bps);
+
+ parameters.encodings[0].adaptive_ptime = false;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok());
+ EXPECT_FALSE(GetAudioNetworkAdaptorConfig(kSsrcX));
+ EXPECT_EQ(32000, GetSendStreamConfig(kSsrcX).min_bitrate_bps);
+}
+
+TEST_P(WebRtcVoiceEngineTestFake,
+ DisablingAdaptivePtimeDoesNotRemoveAudioNetworkAdaptorFromOptions) {
+ EXPECT_TRUE(SetupSendStream());
+ send_parameters_.options.audio_network_adaptor = true;
+ send_parameters_.options.audio_network_adaptor_config = {"1234"};
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
+ GetAudioNetworkAdaptorConfig(kSsrcX));
+
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX);
+ parameters.encodings[0].adaptive_ptime = false;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok());
+ EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
+ GetAudioNetworkAdaptorConfig(kSsrcX));
+}
+
+TEST_P(WebRtcVoiceEngineTestFake, AdaptivePtimeFieldTrial) {
+ webrtc::test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-AdaptivePtime/enabled:true/");
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_TRUE(GetAudioNetworkAdaptorConfig(kSsrcX));
+}
+
// Test that SetRtpSendParameters configures the correct encoding channel for
// each SSRC.
TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) {
@@ -1442,6 +1507,37 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) {
EXPECT_FALSE(channel_->CanInsertDtmf());
}
+// Test that we set Opus/Red under the field trial.
+TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRed) {
+ webrtc::test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-Red-For-Opus/Enabled/");
+
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs.push_back(kRed48000Codec);
+ parameters.codecs[0].id = 96;
+ SetSendParameters(parameters);
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, send_codec_spec.payload_type);
+ EXPECT_STRCASEEQ("opus", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(112, send_codec_spec.red_payload_type);
+}
+
+// Test that we set do not interpret Opus/Red by default.
+TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedDefault) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs.push_back(kRed48000Codec);
+ parameters.codecs[0].id = 96;
+ SetSendParameters(parameters);
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, send_codec_spec.payload_type);
+ EXPECT_STRCASEEQ("opus", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(absl::nullopt, send_codec_spec.red_payload_type);
+}
+
// Test that WebRtcVoiceEngine reconfigures, rather than recreates its
// AudioSendStream.
TEST_P(WebRtcVoiceEngineTestFake, DontRecreateSendStream) {
@@ -2046,6 +2142,10 @@ class WebRtcVoiceEngineWithSendSideBweTest : public WebRtcVoiceEngineTestFake {
: WebRtcVoiceEngineTestFake("WebRTC-Audio-SendSideBwe/Enabled/") {}
};
+INSTANTIATE_TEST_SUITE_P(UnusedParameter,
+ WebRtcVoiceEngineWithSendSideBweTest,
+ ::testing::Values(true));
+
TEST_P(WebRtcVoiceEngineWithSendSideBweTest,
SupportsTransportSequenceNumberHeaderExtension) {
const std::vector<webrtc::RtpExtension> header_extensions =
@@ -3098,7 +3198,7 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
channel.reset(static_cast<cricket::WebRtcVoiceMediaChannel*>(
engine_->CreateMediaChannel(&call_, config, cricket::AudioOptions(),
webrtc::CryptoOptions())));
- channel->SetInterface(&network_interface, webrtc::MediaTransportConfig());
+ channel->SetInterface(&network_interface);
// Default value when DSCP is disabled should be DSCP_DEFAULT.
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp());
@@ -3106,7 +3206,7 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
channel.reset(static_cast<cricket::WebRtcVoiceMediaChannel*>(
engine_->CreateMediaChannel(&call_, config, cricket::AudioOptions(),
webrtc::CryptoOptions())));
- channel->SetInterface(&network_interface, webrtc::MediaTransportConfig());
+ channel->SetInterface(&network_interface);
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp());
// Create a send stream to configure
@@ -3134,11 +3234,11 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
channel.reset(static_cast<cricket::WebRtcVoiceMediaChannel*>(
engine_->CreateMediaChannel(&call_, config, cricket::AudioOptions(),
webrtc::CryptoOptions())));
- channel->SetInterface(&network_interface, webrtc::MediaTransportConfig());
+ channel->SetInterface(&network_interface);
// Default value when DSCP is disabled should be DSCP_DEFAULT.
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp());
- channel->SetInterface(nullptr, webrtc::MediaTransportConfig());
+ channel->SetInterface(nullptr);
}
TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolume) {
diff --git a/media/sctp/sctp_transport.cc b/media/sctp/sctp_transport.cc
index 40061a6048..8996288b83 100644
--- a/media/sctp/sctp_transport.cc
+++ b/media/sctp/sctp_transport.cc
@@ -16,12 +16,19 @@ enum PreservedErrno {
SCTP_EINPROGRESS = EINPROGRESS,
SCTP_EWOULDBLOCK = EWOULDBLOCK
};
+
+// Successful return value from usrsctp callbacks. Is not actually used by
+// usrsctp, but all example programs for usrsctp use 1 as their return value.
+constexpr int kSctpSuccessReturn = 1;
+
} // namespace
#include <stdarg.h>
#include <stdio.h>
+#include <usrsctp.h>
#include <memory>
+#include <unordered_map>
#include "absl/algorithm/container.h"
#include "absl/base/attributes.h"
@@ -34,14 +41,14 @@ enum PreservedErrno {
#include "p2p/base/dtls_transport_internal.h" // For PF_NORMAL
#include "rtc_base/arraysize.h"
#include "rtc_base/copy_on_write_buffer.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/helpers.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/string_utils.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/trace_event.h"
-#include "usrsctplib/usrsctp.h"
namespace {
@@ -51,7 +58,7 @@ static constexpr size_t kSctpMtu = 1200;
// Set the initial value of the static SCTP Data Engines reference count.
ABSL_CONST_INIT int g_usrsctp_usage_count = 0;
-ABSL_CONST_INIT rtc::GlobalLock g_usrsctp_lock_;
+ABSL_CONST_INIT webrtc::GlobalMutex g_usrsctp_lock_(absl::kConstInit);
// DataMessageType is used for the SCTP "Payload Protocol Identifier", as
// defined in http://tools.ietf.org/html/rfc4960#section-14.4
@@ -60,7 +67,7 @@ ABSL_CONST_INIT rtc::GlobalLock g_usrsctp_lock_;
// http://www.iana.org/assignments/sctp-parameters/sctp-parameters.xml
// The value is not used by SCTP itself. It indicates the protocol running
// on top of SCTP.
-enum PayloadProtocolIdentifier {
+enum {
PPID_NONE = 0, // No protocol is specified.
// Matches the PPIDs in mozilla source and
// https://datatracker.ietf.org/doc/draft-ietf-rtcweb-data-protocol Sec. 9
@@ -72,6 +79,59 @@ enum PayloadProtocolIdentifier {
PPID_TEXT_LAST = 51
};
+// Maps SCTP transport ID to SctpTransport object, necessary in send threshold
+// callback and outgoing packet callback.
+// TODO(crbug.com/1076703): Remove once the underlying problem is fixed or
+// workaround is provided in usrsctp.
+class SctpTransportMap {
+ public:
+ SctpTransportMap() = default;
+
+ // Assigns a new unused ID to the following transport.
+ uintptr_t Register(cricket::SctpTransport* transport) {
+ webrtc::MutexLock lock(&lock_);
+ // usrsctp_connect fails with a value of 0...
+ if (next_id_ == 0) {
+ ++next_id_;
+ }
+ // In case we've wrapped around and need to find an empty spot from a
+ // removed transport. Assumes we'll never be full.
+ while (map_.find(next_id_) != map_.end()) {
+ ++next_id_;
+ if (next_id_ == 0) {
+ ++next_id_;
+ }
+ };
+ map_[next_id_] = transport;
+ return next_id_++;
+ }
+
+ // Returns true if found.
+ bool Deregister(uintptr_t id) {
+ webrtc::MutexLock lock(&lock_);
+ return map_.erase(id) > 0;
+ }
+
+ cricket::SctpTransport* Retrieve(uintptr_t id) const {
+ webrtc::MutexLock lock(&lock_);
+ auto it = map_.find(id);
+ if (it == map_.end()) {
+ return nullptr;
+ }
+ return it->second;
+ }
+
+ private:
+ mutable webrtc::Mutex lock_;
+
+ uintptr_t next_id_ RTC_GUARDED_BY(lock_) = 0;
+ std::unordered_map<uintptr_t, cricket::SctpTransport*> map_
+ RTC_GUARDED_BY(lock_);
+};
+
+// Should only be modified by UsrSctpWrapper.
+ABSL_CONST_INIT SctpTransportMap* g_transport_map_ = nullptr;
+
// Helper for logging SCTP messages.
#if defined(__GNUC__)
__attribute__((__format__(__printf__, 1, 2)))
@@ -88,7 +148,7 @@ void DebugSctpPrintf(const char* format, ...) {
}
// Get the PPID to use for the terminating fragment of this type.
-PayloadProtocolIdentifier GetPpid(cricket::DataMessageType type) {
+uint32_t GetPpid(cricket::DataMessageType type) {
switch (type) {
default:
case cricket::DMT_NONE:
@@ -102,8 +162,7 @@ PayloadProtocolIdentifier GetPpid(cricket::DataMessageType type) {
}
}
-bool GetDataMediaType(PayloadProtocolIdentifier ppid,
- cricket::DataMessageType* dest) {
+bool GetDataMediaType(uint32_t ppid, cricket::DataMessageType* dest) {
RTC_DCHECK(dest != NULL);
switch (ppid) {
case PPID_BINARY_PARTIAL:
@@ -214,6 +273,11 @@ class SctpTransport::UsrSctpWrapper {
// TODO(ldixon): Consider turning this on/off.
usrsctp_sysctl_set_sctp_ecn_enable(0);
+ // WebRTC doesn't use these features, so disable them to reduce the
+ // potential attack surface.
+ usrsctp_sysctl_set_sctp_asconf_enable(0);
+ usrsctp_sysctl_set_sctp_auth_enable(0);
+
// This is harmless, but we should find out when the library default
// changes.
int send_size = usrsctp_sysctl_get_sctp_sendspace();
@@ -242,6 +306,8 @@ class SctpTransport::UsrSctpWrapper {
// Set the number of default outgoing streams. This is the number we'll
// send in the SCTP INIT message.
usrsctp_sysctl_set_sctp_nr_outgoing_streams_default(kMaxSctpStreams);
+
+ g_transport_map_ = new SctpTransportMap();
}
static void UninitializeUsrSctp() {
@@ -251,16 +317,20 @@ class SctpTransport::UsrSctpWrapper {
// closed. Wait and try again until it succeeds for up to 3 seconds.
for (size_t i = 0; i < 300; ++i) {
if (usrsctp_finish() == 0) {
+ delete g_transport_map_;
+ g_transport_map_ = nullptr;
return;
}
rtc::Thread::SleepMs(10);
}
+ delete g_transport_map_;
+ g_transport_map_ = nullptr;
RTC_LOG(LS_ERROR) << "Failed to shutdown usrsctp.";
}
static void IncrementUsrSctpUsageCount() {
- rtc::GlobalLockScope lock(&g_usrsctp_lock_);
+ webrtc::GlobalMutexLock lock(&g_usrsctp_lock_);
if (!g_usrsctp_usage_count) {
InitializeUsrSctp();
}
@@ -268,7 +338,7 @@ class SctpTransport::UsrSctpWrapper {
}
static void DecrementUsrSctpUsageCount() {
- rtc::GlobalLockScope lock(&g_usrsctp_lock_);
+ webrtc::GlobalMutexLock lock(&g_usrsctp_lock_);
--g_usrsctp_usage_count;
if (!g_usrsctp_usage_count) {
UninitializeUsrSctp();
@@ -282,7 +352,19 @@ class SctpTransport::UsrSctpWrapper {
size_t length,
uint8_t tos,
uint8_t set_df) {
- SctpTransport* transport = static_cast<SctpTransport*>(addr);
+ if (!g_transport_map_) {
+ RTC_LOG(LS_ERROR)
+ << "OnSctpOutboundPacket called after usrsctp uninitialized?";
+ return EINVAL;
+ }
+ SctpTransport* transport =
+ g_transport_map_->Retrieve(reinterpret_cast<uintptr_t>(addr));
+ if (!transport) {
+ RTC_LOG(LS_ERROR)
+ << "OnSctpOutboundPacket: Failed to get transport for socket ID "
+ << addr;
+ return EINVAL;
+ }
RTC_LOG(LS_VERBOSE) << "global OnSctpOutboundPacket():"
"addr: "
<< addr << "; length: " << length
@@ -312,77 +394,10 @@ class SctpTransport::UsrSctpWrapper {
int flags,
void* ulp_info) {
SctpTransport* transport = static_cast<SctpTransport*>(ulp_info);
- // Post data to the transport's receiver thread (copying it).
- // TODO(ldixon): Unclear if copy is needed as this method is responsible for
- // memory cleanup. But this does simplify code.
- const PayloadProtocolIdentifier ppid =
- static_cast<PayloadProtocolIdentifier>(
- rtc::NetworkToHost32(rcv.rcv_ppid));
- DataMessageType type = DMT_NONE;
- if (!GetDataMediaType(ppid, &type) && !(flags & MSG_NOTIFICATION)) {
- // It's neither a notification nor a recognized data packet. Drop it.
- RTC_LOG(LS_ERROR) << "Received an unknown PPID " << ppid
- << " on an SCTP packet. Dropping.";
- free(data);
- } else {
- ReceiveDataParams params;
-
- params.sid = rcv.rcv_sid;
- params.seq_num = rcv.rcv_ssn;
- params.timestamp = rcv.rcv_tsn;
- params.type = type;
-
- // Expect only continuation messages belonging to the same sid, the sctp
- // stack should ensure this.
- if ((transport->partial_incoming_message_.size() != 0) &&
- (rcv.rcv_sid != transport->partial_params_.sid)) {
- // A message with a new sid, but haven't seen the EOR for the
- // previous message. Deliver the previous partial message to avoid
- // merging messages from different sid's.
- transport->invoker_.AsyncInvoke<void>(
- RTC_FROM_HERE, transport->network_thread_,
- rtc::Bind(&SctpTransport::OnInboundPacketFromSctpToTransport,
- transport, transport->partial_incoming_message_,
- transport->partial_params_, transport->partial_flags_));
-
- transport->partial_incoming_message_.Clear();
- }
-
- transport->partial_incoming_message_.AppendData(
- reinterpret_cast<uint8_t*>(data), length);
- transport->partial_params_ = params;
- transport->partial_flags_ = flags;
-
- free(data);
-
- // Merge partial messages until they exceed the maximum send buffer size.
- // This enables messages from a single send to be delivered in a single
- // callback. Larger messages (originating from other implementations) will
- // still be delivered in chunks.
- if (!(flags & MSG_EOR) &&
- (transport->partial_incoming_message_.size() < kSctpSendBufferSize)) {
- return 1;
- }
-
- if (!(flags & MSG_EOR)) {
- // TODO(bugs.webrtc.org/7774): We currently chunk messages if they are
- // >= kSctpSendBufferSize. The better thing to do here is buffer up to
- // the size negotiated in the SDP, and if a larger message is received
- // close the channel and report the error. See discussion in the bug.
- RTC_LOG(LS_WARNING) << "Chunking SCTP message without the EOR bit set.";
- }
-
- // The ownership of the packet transfers to |invoker_|. Using
- // CopyOnWriteBuffer is the most convenient way to do this.
- transport->invoker_.AsyncInvoke<void>(
- RTC_FROM_HERE, transport->network_thread_,
- rtc::Bind(&SctpTransport::OnInboundPacketFromSctpToTransport,
- transport, transport->partial_incoming_message_, params,
- flags));
-
- transport->partial_incoming_message_.Clear();
- }
- return 1;
+ int result =
+ transport->OnDataOrNotificationFromSctp(data, length, rcv, flags);
+ free(data);
+ return result;
}
static SctpTransport* GetTransportFromSocket(struct socket* sock) {
@@ -392,14 +407,20 @@ class SctpTransport::UsrSctpWrapper {
return nullptr;
}
// usrsctp_getladdrs() returns the addresses bound to this socket, which
- // contains the SctpTransport* as sconn_addr. Read the pointer,
+ // contains the SctpTransport id as sconn_addr. Read the id,
// then free the list of addresses once we have the pointer. We only open
// AF_CONN sockets, and they should all have the sconn_addr set to the
- // pointer that created them, so [0] is as good as any other.
+ // id of the transport that created them, so [0] is as good as any other.
struct sockaddr_conn* sconn =
reinterpret_cast<struct sockaddr_conn*>(&addrs[0]);
- SctpTransport* transport =
- reinterpret_cast<SctpTransport*>(sconn->sconn_addr);
+ if (!g_transport_map_) {
+ RTC_LOG(LS_ERROR)
+ << "GetTransportFromSocket called after usrsctp uninitialized?";
+ usrsctp_freeladdrs(addrs);
+ return nullptr;
+ }
+ SctpTransport* transport = g_transport_map_->Retrieve(
+ reinterpret_cast<uintptr_t>(sconn->sconn_addr));
usrsctp_freeladdrs(addrs);
return transport;
@@ -779,9 +800,10 @@ bool SctpTransport::OpenSctpSocket() {
UsrSctpWrapper::DecrementUsrSctpUsageCount();
return false;
}
- // Register this class as an address for usrsctp. This is used by SCTP to
+ id_ = g_transport_map_->Register(this);
+ // Register our id as an address for usrsctp. This is used by SCTP to
// direct the packets received (by the created socket) to this class.
- usrsctp_register_address(this);
+ usrsctp_register_address(reinterpret_cast<void*>(id_));
return true;
}
@@ -872,7 +894,8 @@ void SctpTransport::CloseSctpSocket() {
// discarded instead of being sent.
usrsctp_close(sock_);
sock_ = nullptr;
- usrsctp_deregister_address(this);
+ usrsctp_deregister_address(reinterpret_cast<void*>(id_));
+ RTC_CHECK(g_transport_map_->Deregister(id_));
UsrSctpWrapper::DecrementUsrSctpUsageCount();
ready_to_send_data_ = false;
}
@@ -1003,7 +1026,7 @@ void SctpTransport::OnPacketRead(rtc::PacketTransportInternal* transport,
// will be will be given to the global OnSctpInboundData, and then,
// marshalled by the AsyncInvoker.
VerboseLogPacket(data, len, SCTP_DUMP_INBOUND);
- usrsctp_conninput(this, data, len, 0);
+ usrsctp_conninput(reinterpret_cast<void*>(id_), data, len, 0);
} else {
// TODO(ldixon): Consider caching the packet for very slightly better
// reliability.
@@ -1033,7 +1056,7 @@ sockaddr_conn SctpTransport::GetSctpSockAddr(int port) {
#endif
// Note: conversion from int to uint16_t happens here.
sconn.sconn_port = rtc::HostToNetwork16(port);
- sconn.sconn_addr = this;
+ sconn.sconn_addr = reinterpret_cast<void*>(id_);
return sconn;
}
@@ -1060,31 +1083,120 @@ void SctpTransport::OnPacketFromSctpToNetwork(
rtc::PacketOptions(), PF_NORMAL);
}
-void SctpTransport::OnInboundPacketFromSctpToTransport(
- const rtc::CopyOnWriteBuffer& buffer,
- ReceiveDataParams params,
+int SctpTransport::InjectDataOrNotificationFromSctpForTesting(
+ void* data,
+ size_t length,
+ struct sctp_rcvinfo rcv,
int flags) {
- RTC_DCHECK_RUN_ON(network_thread_);
- RTC_LOG(LS_VERBOSE) << debug_name_
- << "->OnInboundPacketFromSctpToTransport(...): "
- "Received SCTP data:"
- " sid="
- << params.sid
- << " notification: " << (flags & MSG_NOTIFICATION)
- << " length=" << buffer.size();
- // Sending a packet with data == NULL (no data) is SCTPs "close the
- // connection" message. This sets sock_ = NULL;
- if (!buffer.size() || !buffer.data()) {
+ return OnDataOrNotificationFromSctp(data, length, rcv, flags);
+}
+
+int SctpTransport::OnDataOrNotificationFromSctp(void* data,
+ size_t length,
+ struct sctp_rcvinfo rcv,
+ int flags) {
+ // If data is NULL, the SCTP association has been closed.
+ if (!data) {
RTC_LOG(LS_INFO) << debug_name_
- << "->OnInboundPacketFromSctpToTransport(...): "
+ << "->OnSctpInboundPacket(...): "
"No data, closing.";
- return;
+ return kSctpSuccessReturn;
}
+
+ // Handle notifications early.
+ // Note: Notifications are never split into chunks, so they can and should
+ // be handled early and entirely separate from the reassembly
+ // process.
if (flags & MSG_NOTIFICATION) {
- OnNotificationFromSctp(buffer);
- } else {
- OnDataFromSctpToTransport(params, buffer);
+ RTC_LOG(LS_VERBOSE) << debug_name_
+ << "->OnSctpInboundPacket(...): SCTP notification"
+ << " length=" << length;
+
+ // Copy and dispatch asynchronously
+ rtc::CopyOnWriteBuffer notification(reinterpret_cast<uint8_t*>(data),
+ length);
+ invoker_.AsyncInvoke<void>(
+ RTC_FROM_HERE, network_thread_,
+ rtc::Bind(&SctpTransport::OnNotificationFromSctp, this, notification));
+ return kSctpSuccessReturn;
}
+
+ // Log data chunk
+ const uint32_t ppid = rtc::NetworkToHost32(rcv.rcv_ppid);
+ RTC_LOG(LS_VERBOSE) << debug_name_
+ << "->OnSctpInboundPacket(...): SCTP data chunk"
+ << " length=" << length << ", sid=" << rcv.rcv_sid
+ << ", ppid=" << ppid << ", ssn=" << rcv.rcv_ssn
+ << ", cum-tsn=" << rcv.rcv_cumtsn
+ << ", eor=" << ((flags & MSG_EOR) ? "y" : "n");
+
+ // Validate payload protocol identifier
+ DataMessageType type = DMT_NONE;
+ if (!GetDataMediaType(ppid, &type)) {
+ // Unexpected PPID, dropping
+ RTC_LOG(LS_ERROR) << "Received an unknown PPID " << ppid
+ << " on an SCTP packet. Dropping.";
+ return kSctpSuccessReturn;
+ }
+
+ // Expect only continuation messages belonging to the same SID. The SCTP
+ // stack is expected to ensure this as long as the User Message
+ // Interleaving extension (RFC 8260) is not explicitly enabled, so this
+ // merely acts as a safeguard.
+ if ((partial_incoming_message_.size() != 0) &&
+ (rcv.rcv_sid != partial_params_.sid)) {
+ RTC_LOG(LS_ERROR) << "Received a new SID without EOR in the previous"
+ << " SCTP packet. Discarding the previous packet.";
+ partial_incoming_message_.Clear();
+ }
+
+ // Copy metadata of interest
+ ReceiveDataParams params;
+ params.type = type;
+ params.sid = rcv.rcv_sid;
+ // Note that the SSN is identical for each chunk of the same message.
+ // Furthermore, it is increased per stream and not on the whole
+ // association.
+ params.seq_num = rcv.rcv_ssn;
+ // There is no timestamp field in the SCTP API
+ params.timestamp = 0;
+
+ // Append the chunk's data to the message buffer
+ partial_incoming_message_.AppendData(reinterpret_cast<uint8_t*>(data),
+ length);
+ partial_params_ = params;
+ partial_flags_ = flags;
+
+ // If the message is not yet complete...
+ if (!(flags & MSG_EOR)) {
+ if (partial_incoming_message_.size() < kSctpSendBufferSize) {
+ // We still have space in the buffer. Continue buffering chunks until
+ // the message is complete before handing it out.
+ return kSctpSuccessReturn;
+ } else {
+ // The sender is exceeding the maximum message size that we announced.
+ // Spit out a warning but still hand out the partial message. Note that
+ // this behaviour is undesirable, see the discussion in issue 7774.
+ //
+ // TODO(lgrahl): Once sufficient time has passed and all supported
+ // browser versions obey the announced maximum message size, we should
+ // abort the SCTP association instead to prevent message integrity
+ // violation.
+ RTC_LOG(LS_ERROR) << "Handing out partial SCTP message.";
+ }
+ }
+
+ // Dispatch the complete message.
+ // The ownership of the packet transfers to |invoker_|. Using
+ // CopyOnWriteBuffer is the most convenient way to do this.
+ invoker_.AsyncInvoke<void>(
+ RTC_FROM_HERE, network_thread_,
+ rtc::Bind(&SctpTransport::OnDataFromSctpToTransport, this, params,
+ partial_incoming_message_));
+
+ // Reset the message buffer
+ partial_incoming_message_.Clear();
+ return kSctpSuccessReturn;
}
void SctpTransport::OnDataFromSctpToTransport(
@@ -1182,6 +1294,9 @@ void SctpTransport::OnNotificationAssocChange(const sctp_assoc_change& change) {
max_outbound_streams_ = change.sac_outbound_streams;
max_inbound_streams_ = change.sac_inbound_streams;
SignalAssociationChangeCommunicationUp();
+ // In case someone tried to close a stream before communication
+ // came up, send any queued resets.
+ SendQueuedStreamResets();
break;
case SCTP_COMM_LOST:
RTC_LOG(LS_INFO) << "Association change SCTP_COMM_LOST";
diff --git a/media/sctp/sctp_transport.h b/media/sctp/sctp_transport.h
index d346cfc71f..38029ffeb3 100644
--- a/media/sctp/sctp_transport.h
+++ b/media/sctp/sctp_transport.h
@@ -13,6 +13,7 @@
#include <errno.h>
+#include <cstdint>
#include <map>
#include <memory>
#include <set>
@@ -33,6 +34,7 @@
// Defined by "usrsctplib/usrsctp.h"
struct sockaddr_conn;
struct sctp_assoc_change;
+struct sctp_rcvinfo;
struct sctp_stream_reset_event;
struct sctp_sendv_spa;
@@ -57,8 +59,8 @@ struct SctpInboundPacket;
// 8. usrsctp_conninput(wrapped_data)
// [network thread returns; sctp thread then calls the following]
// 9. OnSctpInboundData(data)
+// 10. SctpTransport::OnDataFromSctpToTransport(data)
// [sctp thread returns having async invoked on the network thread]
-// 10. SctpTransport::OnInboundPacketFromSctpToTransport(inboundpacket)
// 11. SctpTransport::OnDataFromSctpToTransport(data)
// 12. SctpTransport::SignalDataReceived(data)
// [from the same thread, methods registered/connected to
@@ -93,6 +95,10 @@ class SctpTransport : public SctpTransportInternal,
void set_debug_name_for_testing(const char* debug_name) override {
debug_name_ = debug_name;
}
+ int InjectDataOrNotificationFromSctpForTesting(void* data,
+ size_t length,
+ struct sctp_rcvinfo rcv,
+ int flags);
// Exposed to allow Post call from c-callbacks.
// TODO(deadbeef): Remove this or at least make it return a const pointer.
@@ -172,14 +178,17 @@ class SctpTransport : public SctpTransportInternal,
// Called using |invoker_| to send packet on the network.
void OnPacketFromSctpToNetwork(const rtc::CopyOnWriteBuffer& buffer);
- // Called using |invoker_| to decide what to do with the packet.
- // The |flags| parameter is used by SCTP to distinguish notification packets
- // from other types of packets.
- void OnInboundPacketFromSctpToTransport(const rtc::CopyOnWriteBuffer& buffer,
- ReceiveDataParams params,
- int flags);
+
+ // Called on the SCTP thread.
+ // Flags are standard socket API flags (RFC 6458).
+ int OnDataOrNotificationFromSctp(void* data,
+ size_t length,
+ struct sctp_rcvinfo rcv,
+ int flags);
+ // Called using |invoker_| to decide what to do with the data.
void OnDataFromSctpToTransport(const ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& buffer);
+ // Called using |invoker_| to decide what to do with the notification.
void OnNotificationFromSctp(const rtc::CopyOnWriteBuffer& buffer);
void OnNotificationAssocChange(const sctp_assoc_change& change);
@@ -267,6 +276,10 @@ class SctpTransport : public SctpTransportInternal,
absl::optional<int> max_outbound_streams_;
absl::optional<int> max_inbound_streams_;
+ // Used for associating this transport with the underlying sctp socket in
+ // various callbacks.
+ uintptr_t id_ = 0;
+
RTC_DISALLOW_COPY_AND_ASSIGN(SctpTransport);
};
diff --git a/media/sctp/sctp_transport_reliability_unittest.cc b/media/sctp/sctp_transport_reliability_unittest.cc
index af9ddfeba7..e5dbf2933d 100644
--- a/media/sctp/sctp_transport_reliability_unittest.cc
+++ b/media/sctp/sctp_transport_reliability_unittest.cc
@@ -18,6 +18,7 @@
#include "rtc_base/gunit.h"
#include "rtc_base/logging.h"
#include "rtc_base/random.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread.h"
#include "test/gtest.h"
@@ -377,7 +378,7 @@ class SctpPingPong final {
CreateTwoConnectedSctpTransportsWithAllStreams();
{
- rtc::CritScope cs(&lock_);
+ webrtc::MutexLock lock(&lock_);
if (!errors_list_.empty()) {
return false;
}
@@ -397,7 +398,7 @@ class SctpPingPong final {
std::vector<std::string> GetErrorsList() const {
std::vector<std::string> result;
{
- rtc::CritScope cs(&lock_);
+ webrtc::MutexLock lock(&lock_);
result = errors_list_;
}
return result;
@@ -566,7 +567,7 @@ class SctpPingPong final {
}
void ReportError(std::string error) {
- rtc::CritScope cs(&lock_);
+ webrtc::MutexLock lock(&lock_);
errors_list_.push_back(std::move(error));
}
@@ -578,7 +579,7 @@ class SctpPingPong final {
std::unique_ptr<cricket::SctpTransport> sctp_transport2_;
std::unique_ptr<SctpDataSender> data_sender1_;
std::unique_ptr<SctpDataSender> data_sender2_;
- rtc::CriticalSection lock_;
+ mutable webrtc::Mutex lock_;
std::vector<std::string> errors_list_ RTC_GUARDED_BY(lock_);
const uint32_t id_;
diff --git a/media/sctp/sctp_transport_unittest.cc b/media/sctp/sctp_transport_unittest.cc
index ff3f2d70a9..540d2bd5e4 100644
--- a/media/sctp/sctp_transport_unittest.cc
+++ b/media/sctp/sctp_transport_unittest.cc
@@ -12,6 +12,7 @@
#include <stdio.h>
#include <string.h>
+#include <usrsctp.h>
#include <memory>
#include <string>
@@ -238,6 +239,73 @@ class SctpTransportTest : public ::testing::Test, public sigslot::has_slots<> {
void OnChan2ReadyToSend() { ++transport2_ready_to_send_count_; }
};
+TEST_F(SctpTransportTest, MessageInterleavedWithNotification) {
+ FakeDtlsTransport fake_dtls1("fake dtls 1", 0);
+ FakeDtlsTransport fake_dtls2("fake dtls 2", 0);
+ SctpFakeDataReceiver recv1;
+ SctpFakeDataReceiver recv2;
+ std::unique_ptr<SctpTransport> transport1(
+ CreateTransport(&fake_dtls1, &recv1));
+ std::unique_ptr<SctpTransport> transport2(
+ CreateTransport(&fake_dtls2, &recv2));
+
+ // Add a stream.
+ transport1->OpenStream(1);
+ transport2->OpenStream(1);
+
+ // Start SCTP transports.
+ transport1->Start(kSctpDefaultPort, kSctpDefaultPort, kSctpSendBufferSize);
+ transport2->Start(kSctpDefaultPort, kSctpDefaultPort, kSctpSendBufferSize);
+
+ // Connect the two fake DTLS transports.
+ fake_dtls1.SetDestination(&fake_dtls2, false);
+
+ // Ensure the SCTP association has been established
+ // Note: I'd rather watch for an assoc established state here but couldn't
+ // find any exposed...
+ SendDataResult result;
+ ASSERT_TRUE(SendData(transport2.get(), 1, "meow", &result));
+ EXPECT_TRUE_WAIT(ReceivedData(&recv1, 1, "meow"), kDefaultTimeout);
+
+ // Detach the DTLS transport to ensure only we will inject packets from here
+ // on.
+ transport1->SetDtlsTransport(nullptr);
+
+ // Prepare chunk buffer and metadata
+ auto chunk = rtc::CopyOnWriteBuffer(32);
+ struct sctp_rcvinfo meta = {0};
+ meta.rcv_sid = 1;
+ meta.rcv_ssn = 1337;
+ meta.rcv_ppid = rtc::HostToNetwork32(51); // text (complete)
+
+ // Inject chunk 1/2.
+ meta.rcv_tsn = 42;
+ meta.rcv_cumtsn = 42;
+ chunk.SetData("meow?", 5);
+ EXPECT_EQ(1, transport1->InjectDataOrNotificationFromSctpForTesting(
+ chunk.data(), chunk.size(), meta, 0));
+
+ // Inject a notification in between chunks.
+ union sctp_notification notification;
+ memset(&notification, 0, sizeof(notification));
+ // Type chosen since it's not handled apart from being logged
+ notification.sn_header.sn_type = SCTP_PEER_ADDR_CHANGE;
+ notification.sn_header.sn_flags = 0;
+ notification.sn_header.sn_length = sizeof(notification);
+ EXPECT_EQ(1, transport1->InjectDataOrNotificationFromSctpForTesting(
+ &notification, sizeof(notification), {0}, MSG_NOTIFICATION));
+
+ // Inject chunk 2/2
+ meta.rcv_tsn = 42;
+ meta.rcv_cumtsn = 43;
+ chunk.SetData(" rawr!", 6);
+ EXPECT_EQ(1, transport1->InjectDataOrNotificationFromSctpForTesting(
+ chunk.data(), chunk.size(), meta, MSG_EOR));
+
+ // Expect the message to contain both chunks.
+ EXPECT_TRUE_WAIT(ReceivedData(&recv1, 1, "meow? rawr!"), kDefaultTimeout);
+}
+
// Test that data can be sent end-to-end when an SCTP transport starts with one
// transport (which is unwritable), and then switches to another transport. A
// common scenario due to how BUNDLE works.
@@ -605,6 +673,15 @@ TEST_F(SctpTransportTest, ClosesRemoteStream) {
transport1()->ResetStream(1);
EXPECT_TRUE_WAIT(transport2_observer.WasStreamClosed(1), kDefaultTimeout);
}
+TEST_F(SctpTransportTest, ClosesRemoteStreamWithNoData) {
+ SetupConnectedTransportsWithTwoStreams();
+ SctpTransportObserver transport1_observer(transport1());
+ SctpTransportObserver transport2_observer(transport2());
+
+ // Close stream 1 on transport 1. Transport 2 should notify us.
+ transport1()->ResetStream(1);
+ EXPECT_TRUE_WAIT(transport2_observer.WasStreamClosed(1), kDefaultTimeout);
+}
TEST_F(SctpTransportTest, ClosesTwoRemoteStreams) {
SetupConnectedTransportsWithTwoStreams();
diff --git a/modules/BUILD.gn b/modules/BUILD.gn
index ffdd7016a1..f6f44bd4b3 100644
--- a/modules/BUILD.gn
+++ b/modules/BUILD.gn
@@ -31,10 +31,8 @@ group("modules") {
rtc_source_set("module_api_public") {
sources = [ "include/module_common_types_public.h" ]
- deps = [
- "..:webrtc_common",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "..:webrtc_common" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("module_api") {
diff --git a/modules/audio_coding/BUILD.gn b/modules/audio_coding/BUILD.gn
index ceee0c0f07..be9705138a 100644
--- a/modules/audio_coding/BUILD.gn
+++ b/modules/audio_coding/BUILD.gn
@@ -54,8 +54,11 @@ rtc_library("audio_coding") {
"../../rtc_base:checks",
"../../rtc_base:deprecation",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers",
"../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -71,8 +74,8 @@ rtc_library("legacy_encoded_audio_frame") {
"../../api/audio_codecs:audio_codecs_api",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("webrtc_cng") {
@@ -104,8 +107,8 @@ rtc_library("audio_encoder_cng") {
"../../api/units:time_delta",
"../../common_audio",
"../../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("red") {
@@ -122,8 +125,8 @@ rtc_library("red") {
"../../common_audio",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("g711") {
@@ -143,8 +146,8 @@ rtc_library("g711") {
"../../api/units:time_delta",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [ ":g711_c" ] # no-presubmit-check TODO(webrtc:8603)
}
@@ -175,8 +178,8 @@ rtc_library("g722") {
"../../api/units:time_delta",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [ ":g722_c" ] # no-presubmit-check TODO(webrtc:8603)
}
@@ -208,8 +211,8 @@ rtc_library("ilbc") {
"../../common_audio",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [ ":ilbc_c" ] # no-presubmit-check TODO(webrtc:8603)
}
@@ -384,8 +387,10 @@ rtc_source_set("isac_common") {
"../../api/units:time_delta",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
+ "../../rtc_base:safe_minmax",
+ "../../system_wrappers:field_trial",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("isac") {
@@ -723,6 +728,8 @@ rtc_library("audio_coding_opus_common") {
"../../api/audio_codecs:audio_codecs_api",
"../../rtc_base:checks",
"../../rtc_base:stringutils",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -751,6 +758,8 @@ rtc_library("webrtc_opus") {
"../../rtc_base:rtc_numerics",
"../../rtc_base:safe_minmax",
"../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -788,6 +797,8 @@ rtc_library("webrtc_multiopus") {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:safe_minmax",
"../../rtc_base:stringutils",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -848,7 +859,7 @@ rtc_library("audio_network_adaptor_config") {
"audio_network_adaptor/audio_network_adaptor_config.cc",
"audio_network_adaptor/include/audio_network_adaptor_config.h",
]
- deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_network_adaptor") {
@@ -874,6 +885,8 @@ rtc_library("audio_network_adaptor") {
"audio_network_adaptor/fec_controller_plr_based.h",
"audio_network_adaptor/frame_length_controller.cc",
"audio_network_adaptor/frame_length_controller.h",
+ "audio_network_adaptor/frame_length_controller_v2.cc",
+ "audio_network_adaptor/frame_length_controller_v2.h",
"audio_network_adaptor/include/audio_network_adaptor.h",
"audio_network_adaptor/util/threshold_curve.h",
]
@@ -893,6 +906,9 @@ rtc_library("audio_network_adaptor") {
"../../rtc_base/system:file_wrapper",
"../../system_wrappers",
"../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -992,9 +1008,12 @@ rtc_library("neteq") {
"../../rtc_base:safe_minmax",
"../../rtc_base:sanitizer",
"../../rtc_base/experiments:field_trial_parser",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -1052,8 +1071,8 @@ rtc_library("neteq_tools_minimal") {
"../../system_wrappers",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = audio_codec_defines
}
@@ -1090,8 +1109,8 @@ rtc_library("neteq_test_tools") {
"../../test:rtp_test_utils",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [
":neteq_tools",
@@ -1130,6 +1149,8 @@ rtc_library("neteq_tools") {
"../../rtc_base:rtc_base_approved",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -1172,8 +1193,8 @@ if (rtc_enable_protobuf) {
"../../rtc_base:rtc_base_approved",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = # no-presubmit-check TODO(webrtc:8603)
[ "../../logging:rtc_event_log_proto" ]
}
@@ -1230,10 +1251,11 @@ rtc_library("audio_coding_modules_tests_shared") {
"../../test:test_support",
"../rtp_rtcp:rtp_rtcp_format",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
-
defines = audio_coding_defines
if (rtc_enable_protobuf) {
@@ -1368,10 +1390,13 @@ if (rtc_include_tests) {
"../../common_audio",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:rw_lock_wrapper",
"../../system_wrappers",
"../../test:fileutils",
"../../test:test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -1417,9 +1442,9 @@ if (rtc_include_tests) {
":neteq_tools",
"../../rtc_base:rtc_base_approved",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/strings",
"//testing/gtest",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("acm_send_test") {
@@ -1432,6 +1457,7 @@ if (rtc_include_tests) {
defines = audio_coding_defines
deps = audio_coding_deps + [
+ "//third_party/abseil-cpp/absl/strings",
"../../api/audio:audio_frame_api",
"../../rtc_base:checks",
":audio_coding",
@@ -1497,8 +1523,8 @@ if (rtc_include_tests) {
deps = [
"../../rtc_base:checks",
"../../test:fileutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
sources = [
"neteq/tools/neteq_test_factory.cc",
"neteq/tools/neteq_test_factory.h",
@@ -1626,15 +1652,14 @@ if (rtc_include_tests) {
"../../test:fileutils",
"../../test:test_support",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/flags:flag",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
}
rtc_executable("rtp_encode") {
testonly = true
deps = audio_coding_deps + [
- "//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
":audio_coding",
@@ -1648,6 +1673,7 @@ if (rtc_include_tests) {
"../../api/audio_codecs/isac:audio_encoder_isac",
"../../api/audio_codecs/opus:audio_encoder_opus",
"../../rtc_base:safe_conversions",
+ "//third_party/abseil-cpp/absl/memory",
]
sources = [ "neteq/tools/rtp_encode.cc" ]
@@ -1916,6 +1942,7 @@ if (rtc_include_tests) {
"audio_network_adaptor/event_log_writer_unittest.cc",
"audio_network_adaptor/fec_controller_plr_based_unittest.cc",
"audio_network_adaptor/frame_length_controller_unittest.cc",
+ "audio_network_adaptor/frame_length_controller_v2_unittest.cc",
"audio_network_adaptor/util/threshold_curve_unittest.cc",
"codecs/builtin_audio_decoder_factory_unittest.cc",
"codecs/builtin_audio_encoder_factory_unittest.cc",
@@ -2037,6 +2064,7 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_base_tests_utils",
"../../rtc_base:sanitizer",
"../../rtc_base:timeutils",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/system:arch",
"../../system_wrappers",
"../../system_wrappers:cpu_features_api",
@@ -2050,8 +2078,11 @@ if (rtc_include_tests) {
"codecs/opus/test",
"codecs/opus/test:test_unittest",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/modules/audio_coding/acm2/acm_receiver.cc b/modules/audio_coding/acm2/acm_receiver.cc
index 29eff19e9e..33142c783c 100644
--- a/modules/audio_coding/acm2/acm_receiver.cc
+++ b/modules/audio_coding/acm2/acm_receiver.cc
@@ -86,7 +86,7 @@ int AcmReceiver::GetBaseMinimumDelayMs() const {
}
absl::optional<int> AcmReceiver::last_packet_sample_rate_hz() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (!last_decoder_) {
return absl::nullopt;
}
@@ -118,7 +118,7 @@ int AcmReceiver::InsertPacket(const RTPHeader& rtp_header,
}
{
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (absl::EqualsIgnoreCase(format->sdp_format.name, "cn")) {
if (last_decoder_ && last_decoder_->num_channels > 1) {
// This is a CNG and the audio codec is not mono, so skip pushing in
@@ -131,7 +131,7 @@ int AcmReceiver::InsertPacket(const RTPHeader& rtp_header,
/*num_channels=*/format->num_channels,
/*sdp_format=*/std::move(format->sdp_format)};
}
- } // |crit_sect_| is released.
+ } // |mutex_| is released.
if (neteq_->InsertPacket(rtp_header, incoming_payload) < 0) {
RTC_LOG(LERROR) << "AcmReceiver::InsertPacket "
@@ -147,7 +147,7 @@ int AcmReceiver::GetAudio(int desired_freq_hz,
bool* muted) {
RTC_DCHECK(muted);
// Accessing members, take the lock.
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (neteq_->GetAudio(audio_frame, muted) != NetEq::kOK) {
RTC_LOG(LERROR) << "AcmReceiver::GetAudio - NetEq Failed.";
@@ -217,7 +217,7 @@ void AcmReceiver::FlushBuffers() {
}
void AcmReceiver::RemoveAllCodecs() {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
neteq_->RemoveAllPayloadTypes();
last_decoder_ = absl::nullopt;
}
@@ -236,7 +236,7 @@ int AcmReceiver::TargetDelayMs() const {
absl::optional<std::pair<int, SdpAudioFormat>> AcmReceiver::LastDecoder()
const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (!last_decoder_) {
return absl::nullopt;
}
@@ -327,7 +327,7 @@ uint32_t AcmReceiver::NowInTimestamp(int decoder_sampling_rate) const {
void AcmReceiver::GetDecodingCallStatistics(
AudioDecodingCallStats* stats) const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
*stats = call_stats_.GetDecodingStatistics();
}
diff --git a/modules/audio_coding/acm2/acm_receiver.h b/modules/audio_coding/acm2/acm_receiver.h
index 15126566ae..d451a94ef7 100644
--- a/modules/audio_coding/acm2/acm_receiver.h
+++ b/modules/audio_coding/acm2/acm_receiver.h
@@ -26,7 +26,7 @@
#include "modules/audio_coding/acm2/acm_resampler.h"
#include "modules/audio_coding/acm2/call_statistics.h"
#include "modules/audio_coding/include/audio_coding_module.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -212,14 +212,14 @@ class AcmReceiver {
uint32_t NowInTimestamp(int decoder_sampling_rate) const;
- rtc::CriticalSection crit_sect_;
- absl::optional<DecoderInfo> last_decoder_ RTC_GUARDED_BY(crit_sect_);
- ACMResampler resampler_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<int16_t[]> last_audio_buffer_ RTC_GUARDED_BY(crit_sect_);
- CallStatistics call_stats_ RTC_GUARDED_BY(crit_sect_);
+ mutable Mutex mutex_;
+ absl::optional<DecoderInfo> last_decoder_ RTC_GUARDED_BY(mutex_);
+ ACMResampler resampler_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<int16_t[]> last_audio_buffer_ RTC_GUARDED_BY(mutex_);
+ CallStatistics call_stats_ RTC_GUARDED_BY(mutex_);
const std::unique_ptr<NetEq> neteq_; // NetEq is thread-safe; no lock needed.
Clock* const clock_;
- bool resampled_last_output_frame_ RTC_GUARDED_BY(crit_sect_);
+ bool resampled_last_output_frame_ RTC_GUARDED_BY(mutex_);
};
} // namespace acm2
diff --git a/modules/audio_coding/acm2/audio_coding_module.cc b/modules/audio_coding/acm2/audio_coding_module.cc
index a2d08ac004..648ae6e5ea 100644
--- a/modules/audio_coding/acm2/audio_coding_module.cc
+++ b/modules/audio_coding/acm2/audio_coding_module.cc
@@ -23,9 +23,9 @@
#include "modules/include/module_common_types_public.h"
#include "rtc_base/buffer.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/metrics.h"
@@ -105,7 +105,7 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
std::vector<int16_t> buffer;
};
- InputData input_data_ RTC_GUARDED_BY(acm_crit_sect_);
+ InputData input_data_ RTC_GUARDED_BY(acm_mutex_);
// This member class writes values to the named UMA histogram, but only if
// the value has changed since the last time (and always for the first call).
@@ -124,18 +124,18 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
};
int Add10MsDataInternal(const AudioFrame& audio_frame, InputData* input_data)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_mutex_);
// TODO(bugs.webrtc.org/10739): change |absolute_capture_timestamp_ms| to
// int64_t when it always receives a valid value.
int Encode(const InputData& input_data,
absl::optional<int64_t> absolute_capture_timestamp_ms)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_mutex_);
- int InitializeReceiverSafe() RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+ int InitializeReceiverSafe() RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_mutex_);
bool HaveValidEncoder(const char* caller_name) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_mutex_);
// Preprocessing of input audio, including resampling and down-mixing if
// required, before pushing audio into encoder's buffer.
@@ -150,38 +150,38 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
// 0: otherwise.
int PreprocessToAddData(const AudioFrame& in_frame,
const AudioFrame** ptr_out)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_mutex_);
// Change required states after starting to receive the codec corresponding
// to |index|.
int UpdateUponReceivingCodec(int index);
- rtc::CriticalSection acm_crit_sect_;
- rtc::Buffer encode_buffer_ RTC_GUARDED_BY(acm_crit_sect_);
- uint32_t expected_codec_ts_ RTC_GUARDED_BY(acm_crit_sect_);
- uint32_t expected_in_ts_ RTC_GUARDED_BY(acm_crit_sect_);
- acm2::ACMResampler resampler_ RTC_GUARDED_BY(acm_crit_sect_);
+ mutable Mutex acm_mutex_;
+ rtc::Buffer encode_buffer_ RTC_GUARDED_BY(acm_mutex_);
+ uint32_t expected_codec_ts_ RTC_GUARDED_BY(acm_mutex_);
+ uint32_t expected_in_ts_ RTC_GUARDED_BY(acm_mutex_);
+ acm2::ACMResampler resampler_ RTC_GUARDED_BY(acm_mutex_);
acm2::AcmReceiver receiver_; // AcmReceiver has it's own internal lock.
- ChangeLogger bitrate_logger_ RTC_GUARDED_BY(acm_crit_sect_);
+ ChangeLogger bitrate_logger_ RTC_GUARDED_BY(acm_mutex_);
// Current encoder stack, provided by a call to RegisterEncoder.
- std::unique_ptr<AudioEncoder> encoder_stack_ RTC_GUARDED_BY(acm_crit_sect_);
+ std::unique_ptr<AudioEncoder> encoder_stack_ RTC_GUARDED_BY(acm_mutex_);
// This is to keep track of CN instances where we can send DTMFs.
- uint8_t previous_pltype_ RTC_GUARDED_BY(acm_crit_sect_);
+ uint8_t previous_pltype_ RTC_GUARDED_BY(acm_mutex_);
- bool receiver_initialized_ RTC_GUARDED_BY(acm_crit_sect_);
+ bool receiver_initialized_ RTC_GUARDED_BY(acm_mutex_);
- AudioFrame preprocess_frame_ RTC_GUARDED_BY(acm_crit_sect_);
- bool first_10ms_data_ RTC_GUARDED_BY(acm_crit_sect_);
+ AudioFrame preprocess_frame_ RTC_GUARDED_BY(acm_mutex_);
+ bool first_10ms_data_ RTC_GUARDED_BY(acm_mutex_);
- bool first_frame_ RTC_GUARDED_BY(acm_crit_sect_);
- uint32_t last_timestamp_ RTC_GUARDED_BY(acm_crit_sect_);
- uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(acm_crit_sect_);
+ bool first_frame_ RTC_GUARDED_BY(acm_mutex_);
+ uint32_t last_timestamp_ RTC_GUARDED_BY(acm_mutex_);
+ uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(acm_mutex_);
- rtc::CriticalSection callback_crit_sect_;
+ Mutex callback_mutex_;
AudioPacketizationCallback* packetization_callback_
- RTC_GUARDED_BY(callback_crit_sect_);
+ RTC_GUARDED_BY(callback_mutex_);
int codec_histogram_bins_log_[static_cast<size_t>(
AudioEncoder::CodecType::kMaxLoggedAudioCodecTypes)];
@@ -298,7 +298,7 @@ int32_t AudioCodingModuleImpl::Encode(
}
{
- rtc::CritScope lock(&callback_crit_sect_);
+ MutexLock lock(&callback_mutex_);
if (packetization_callback_) {
packetization_callback_->SendData(
frame_type, encoded_info.payload_type, encoded_info.encoded_timestamp,
@@ -316,7 +316,7 @@ int32_t AudioCodingModuleImpl::Encode(
void AudioCodingModuleImpl::ModifyEncoder(
rtc::FunctionView<void(std::unique_ptr<AudioEncoder>*)> modifier) {
- rtc::CritScope lock(&acm_crit_sect_);
+ MutexLock lock(&acm_mutex_);
modifier(&encoder_stack_);
}
@@ -324,14 +324,14 @@ void AudioCodingModuleImpl::ModifyEncoder(
// the encoded buffers.
int AudioCodingModuleImpl::RegisterTransportCallback(
AudioPacketizationCallback* transport) {
- rtc::CritScope lock(&callback_crit_sect_);
+ MutexLock lock(&callback_mutex_);
packetization_callback_ = transport;
return 0;
}
// Add 10MS of raw (PCM) audio data to the encoder.
int AudioCodingModuleImpl::Add10MsData(const AudioFrame& audio_frame) {
- rtc::CritScope lock(&acm_crit_sect_);
+ MutexLock lock(&acm_mutex_);
int r = Add10MsDataInternal(audio_frame, &input_data_);
// TODO(bugs.webrtc.org/10739): add dcheck that
// |audio_frame.absolute_capture_timestamp_ms()| always has a value.
@@ -519,7 +519,7 @@ int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame,
//
int AudioCodingModuleImpl::SetPacketLossRate(int loss_rate) {
- rtc::CritScope lock(&acm_crit_sect_);
+ MutexLock lock(&acm_mutex_);
if (HaveValidEncoder("SetPacketLossRate")) {
encoder_stack_->OnReceivedUplinkPacketLossFraction(loss_rate / 100.0);
}
@@ -531,7 +531,7 @@ int AudioCodingModuleImpl::SetPacketLossRate(int loss_rate) {
//
int AudioCodingModuleImpl::InitializeReceiver() {
- rtc::CritScope lock(&acm_crit_sect_);
+ MutexLock lock(&acm_mutex_);
return InitializeReceiverSafe();
}
@@ -550,7 +550,7 @@ int AudioCodingModuleImpl::InitializeReceiverSafe() {
void AudioCodingModuleImpl::SetReceiveCodecs(
const std::map<int, SdpAudioFormat>& codecs) {
- rtc::CritScope lock(&acm_crit_sect_);
+ MutexLock lock(&acm_mutex_);
receiver_.SetCodecs(codecs);
}
@@ -597,7 +597,7 @@ bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {
}
ANAStats AudioCodingModuleImpl::GetANAStats() const {
- rtc::CritScope lock(&acm_crit_sect_);
+ MutexLock lock(&acm_mutex_);
if (encoder_stack_)
return encoder_stack_->GetANAStats();
// If no encoder is set, return default stats.
diff --git a/modules/audio_coding/acm2/audio_coding_module_unittest.cc b/modules/audio_coding/acm2/audio_coding_module_unittest.cc
index 6c9b242e00..efd7b04a90 100644
--- a/modules/audio_coding/acm2/audio_coding_module_unittest.cc
+++ b/modules/audio_coding/acm2/audio_coding_module_unittest.cc
@@ -39,12 +39,12 @@
#include "modules/audio_coding/neteq/tools/output_wav_file.h"
#include "modules/audio_coding/neteq/tools/packet.h"
#include "modules/audio_coding/neteq/tools/rtp_file_source.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/message_digest.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/arch.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
@@ -113,7 +113,7 @@ class PacketizationCallbackStubOldApi : public AudioPacketizationCallback {
const uint8_t* payload_data,
size_t payload_len_bytes,
int64_t absolute_capture_timestamp_ms) override {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
++num_calls_;
last_frame_type_ = frame_type;
last_payload_type_ = payload_type;
@@ -123,42 +123,42 @@ class PacketizationCallbackStubOldApi : public AudioPacketizationCallback {
}
int num_calls() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return num_calls_;
}
int last_payload_len_bytes() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return rtc::checked_cast<int>(last_payload_vec_.size());
}
AudioFrameType last_frame_type() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return last_frame_type_;
}
int last_payload_type() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return last_payload_type_;
}
uint32_t last_timestamp() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return last_timestamp_;
}
void SwapBuffers(std::vector<uint8_t>* payload) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
last_payload_vec_.swap(*payload);
}
private:
- int num_calls_ RTC_GUARDED_BY(crit_sect_);
- AudioFrameType last_frame_type_ RTC_GUARDED_BY(crit_sect_);
- int last_payload_type_ RTC_GUARDED_BY(crit_sect_);
- uint32_t last_timestamp_ RTC_GUARDED_BY(crit_sect_);
- std::vector<uint8_t> last_payload_vec_ RTC_GUARDED_BY(crit_sect_);
- rtc::CriticalSection crit_sect_;
+ int num_calls_ RTC_GUARDED_BY(mutex_);
+ AudioFrameType last_frame_type_ RTC_GUARDED_BY(mutex_);
+ int last_payload_type_ RTC_GUARDED_BY(mutex_);
+ uint32_t last_timestamp_ RTC_GUARDED_BY(mutex_);
+ std::vector<uint8_t> last_payload_vec_ RTC_GUARDED_BY(mutex_);
+ mutable Mutex mutex_;
};
class AudioCodingModuleTestOldApi : public ::testing::Test {
@@ -252,6 +252,9 @@ class AudioCodingModuleTestOldApi : public ::testing::Test {
Clock* clock_;
};
+class AudioCodingModuleTestOldApiDeathTest
+ : public AudioCodingModuleTestOldApi {};
+
TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
AudioFrame audio_frame;
const int kSampleRateHz = 32000;
@@ -271,7 +274,7 @@ TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
// http://crbug.com/615050
#if !defined(WEBRTC_WIN) && defined(__clang__) && RTC_DCHECK_IS_ON && \
GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(AudioCodingModuleTestOldApi, FailOnZeroDesiredFrequency) {
+TEST_F(AudioCodingModuleTestOldApiDeathTest, FailOnZeroDesiredFrequency) {
AudioFrame audio_frame;
bool muted;
RTC_EXPECT_DEATH(acm_->PlayoutData10Ms(0, &audio_frame, &muted),
@@ -469,7 +472,7 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
virtual bool TestDone() {
if (packet_cb_.num_calls() > kNumPackets) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (pull_audio_count_ > kNumPullCalls) {
// Both conditions for completion are met. End the test.
return true;
@@ -512,7 +515,7 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
void CbInsertPacketImpl() {
SleepMs(1);
{
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) {
return;
}
@@ -534,7 +537,7 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
void CbPullAudioImpl() {
SleepMs(1);
{
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
// Don't let the insert thread fall behind.
if (next_insert_packet_time_ms_ < clock_->TimeInMilliseconds()) {
return;
@@ -555,9 +558,9 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
rtc::Event test_complete_;
int send_count_;
int insert_packet_count_;
- int pull_audio_count_ RTC_GUARDED_BY(crit_sect_);
- rtc::CriticalSection crit_sect_;
- int64_t next_insert_packet_time_ms_ RTC_GUARDED_BY(crit_sect_);
+ int pull_audio_count_ RTC_GUARDED_BY(mutex_);
+ Mutex mutex_;
+ int64_t next_insert_packet_time_ms_ RTC_GUARDED_BY(mutex_);
std::unique_ptr<SimulatedClock> fake_clock_;
};
@@ -655,7 +658,7 @@ class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
// run).
bool TestDone() override {
if (packet_cb_.num_calls() > kNumPackets) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (pull_audio_count_ > kNumPullCalls) {
// Both conditions for completion are met. End the test.
return true;
@@ -755,7 +758,7 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
rtc::Buffer encoded;
AudioEncoder::EncodedInfo info;
{
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) {
return true;
}
@@ -809,7 +812,7 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
// End the test early if a fatal failure (ASSERT_*) has occurred.
test_complete_.Set();
}
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (!codec_registered_ &&
receive_packet_count_ > kRegisterAfterNumPackets) {
// Register the iSAC encoder.
@@ -828,10 +831,10 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
std::atomic<bool> quit_;
rtc::Event test_complete_;
- rtc::CriticalSection crit_sect_;
- bool codec_registered_ RTC_GUARDED_BY(crit_sect_);
- int receive_packet_count_ RTC_GUARDED_BY(crit_sect_);
- int64_t next_insert_packet_time_ms_ RTC_GUARDED_BY(crit_sect_);
+ Mutex mutex_;
+ bool codec_registered_ RTC_GUARDED_BY(mutex_);
+ int receive_packet_count_ RTC_GUARDED_BY(mutex_);
+ int64_t next_insert_packet_time_ms_ RTC_GUARDED_BY(mutex_);
std::unique_ptr<AudioEncoderIsacFloatImpl> isac_encoder_;
std::unique_ptr<SimulatedClock> fake_clock_;
test::AudioLoop audio_loop_;
diff --git a/modules/audio_coding/audio_coding.gni b/modules/audio_coding/audio_coding.gni
index 9b0aba856a..bf67d9cb8d 100644
--- a/modules/audio_coding/audio_coding.gni
+++ b/modules/audio_coding/audio_coding.gni
@@ -25,9 +25,6 @@ if (current_cpu == "arm") {
} else {
audio_codec_defines += [ "WEBRTC_CODEC_ISAC" ]
}
-if (!build_with_mozilla && !build_with_chromium) {
- audio_codec_defines += [ "WEBRTC_CODEC_RED" ]
-}
audio_coding_defines = audio_codec_defines
neteq_defines = audio_codec_defines
diff --git a/modules/audio_coding/audio_network_adaptor/config.proto b/modules/audio_coding/audio_network_adaptor/config.proto
index 90c58e5c7d..347372e8d9 100644
--- a/modules/audio_coding/audio_network_adaptor/config.proto
+++ b/modules/audio_coding/audio_network_adaptor/config.proto
@@ -1,8 +1,10 @@
syntax = "proto2";
+
+package webrtc.audio_network_adaptor.config;
+
option optimize_for = LITE_RUNTIME;
option java_package = "org.webrtc.AudioNetworkAdaptor";
option java_outer_classname = "Config";
-package webrtc.audio_network_adaptor.config;
message FecController {
message Threshold {
@@ -116,6 +118,19 @@ message FrameLengthController {
optional int32 fl_60ms_to_40ms_bandwidth_bps = 12;
}
+message FrameLengthControllerV2 {
+ // FrameLengthControllerV2 chooses the frame length by taking the target
+ // bitrate and subtracting the overhead bitrate to obtain the remaining
+ // bitrate for the payload. The chosen frame length is the shortest possible
+ // where the payload bitrate is more than |min_payload_bitrate_bps|.
+ optional int32 min_payload_bitrate_bps = 1;
+
+ // If true, uses the stable target bitrate to decide the frame length. This
+ // will result in less frame length toggling but spending more time at longer
+ // frame lengths compared to using the normal target bitrate.
+ optional bool use_slow_adaptation = 2;
+}
+
message ChannelController {
// Uplink bandwidth above which the number of encoded channels should switch
// from 1 to 2.
@@ -164,6 +179,7 @@ message Controller {
DtxController dtx_controller = 24;
BitrateController bitrate_controller = 25;
FecControllerRplrBased fec_controller_rplr_based = 26;
+ FrameLengthControllerV2 frame_length_controller_v2 = 27;
}
}
@@ -177,4 +193,3 @@ message ControllerManager {
// made.
optional float min_reordering_squared_distance = 3;
}
-
diff --git a/modules/audio_coding/audio_network_adaptor/controller_manager.cc b/modules/audio_coding/audio_network_adaptor/controller_manager.cc
index c7aad1da87..415b9fcf52 100644
--- a/modules/audio_coding/audio_network_adaptor/controller_manager.cc
+++ b/modules/audio_coding/audio_network_adaptor/controller_manager.cc
@@ -11,6 +11,7 @@
#include "modules/audio_coding/audio_network_adaptor/controller_manager.h"
#include <cmath>
+#include <memory>
#include <string>
#include <utility>
@@ -20,6 +21,7 @@
#include "modules/audio_coding/audio_network_adaptor/dtx_controller.h"
#include "modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h"
#include "modules/audio_coding/audio_network_adaptor/frame_length_controller.h"
+#include "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h"
#include "modules/audio_coding/audio_network_adaptor/util/threshold_curve.h"
#include "rtc_base/ignore_wundef.h"
#include "rtc_base/logging.h"
@@ -197,6 +199,14 @@ std::unique_ptr<BitrateController> CreateBitrateController(
initial_bitrate_bps, initial_frame_length_ms,
fl_increase_overhead_offset, fl_decrease_overhead_offset)));
}
+
+std::unique_ptr<FrameLengthControllerV2> CreateFrameLengthControllerV2(
+ const audio_network_adaptor::config::FrameLengthControllerV2& config,
+ rtc::ArrayView<const int> encoder_frame_lengths_ms) {
+ return std::make_unique<FrameLengthControllerV2>(
+ encoder_frame_lengths_ms, config.min_payload_bitrate_bps(),
+ config.use_slow_adaptation());
+}
#endif // WEBRTC_ENABLE_PROTOBUF
} // namespace
@@ -277,6 +287,11 @@ std::unique_ptr<ControllerManager> ControllerManagerImpl::Create(
controller_config.bitrate_controller(), initial_bitrate_bps,
initial_frame_length_ms);
break;
+ case audio_network_adaptor::config::Controller::kFrameLengthControllerV2:
+ controller = CreateFrameLengthControllerV2(
+ controller_config.frame_length_controller_v2(),
+ encoder_frame_lengths_ms);
+ break;
default:
RTC_NOTREACHED();
}
diff --git a/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc b/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc
index 4286434b5b..c71bbc9e2a 100644
--- a/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc
+++ b/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc
@@ -260,6 +260,14 @@ void AddFrameLengthControllerConfig(
kChracteristicPacketLossFraction[1]);
}
+void AddFrameLengthControllerV2Config(
+ audio_network_adaptor::config::ControllerManager* config) {
+ auto controller =
+ config->add_controllers()->mutable_frame_length_controller_v2();
+ controller->set_min_payload_bitrate_bps(16000);
+ controller->set_use_slow_adaptation(true);
+}
+
constexpr int kInitialBitrateBps = 24000;
constexpr size_t kIntialChannelsToEncode = 1;
constexpr bool kInitialDtxEnabled = true;
@@ -464,6 +472,14 @@ TEST(ControllerManagerTest, CreateFromConfigStringAndCheckReordering) {
ControllerType::CHANNEL, ControllerType::DTX,
ControllerType::BIT_RATE});
}
+
+TEST(ControllerManagerTest, CreateFrameLengthControllerV2) {
+ audio_network_adaptor::config::ControllerManager config;
+ AddFrameLengthControllerV2Config(&config);
+ auto states = CreateControllerManager(config.SerializeAsString());
+ auto controllers = states.controller_manager->GetControllers();
+ EXPECT_TRUE(controllers.size() == 1);
+}
#endif // WEBRTC_ENABLE_PROTOBUF
} // namespace webrtc
diff --git a/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc
new file mode 100644
index 0000000000..36fc10ba82
--- /dev/null
+++ b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h"
+
+#include <algorithm>
+
+#include "absl/algorithm/container.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+
+int OverheadBps(int overhead_bytes_per_packet, int frame_length_ms) {
+ return overhead_bytes_per_packet * 8 * 1000 / frame_length_ms;
+}
+
+} // namespace
+
+FrameLengthControllerV2::FrameLengthControllerV2(
+ rtc::ArrayView<const int> encoder_frame_lengths_ms,
+ int min_payload_bitrate_bps,
+ bool use_slow_adaptation)
+ : encoder_frame_lengths_ms_(encoder_frame_lengths_ms.begin(),
+ encoder_frame_lengths_ms.end()),
+ min_payload_bitrate_bps_(min_payload_bitrate_bps),
+ use_slow_adaptation_(use_slow_adaptation) {
+ RTC_CHECK(!encoder_frame_lengths_ms_.empty());
+ absl::c_sort(encoder_frame_lengths_ms_);
+}
+
+void FrameLengthControllerV2::UpdateNetworkMetrics(
+ const NetworkMetrics& network_metrics) {
+ if (network_metrics.target_audio_bitrate_bps) {
+ target_bitrate_bps_ = network_metrics.target_audio_bitrate_bps;
+ }
+ if (network_metrics.overhead_bytes_per_packet) {
+ overhead_bytes_per_packet_ = network_metrics.overhead_bytes_per_packet;
+ }
+ if (network_metrics.uplink_bandwidth_bps) {
+ uplink_bandwidth_bps_ = network_metrics.uplink_bandwidth_bps;
+ }
+}
+
+void FrameLengthControllerV2::MakeDecision(AudioEncoderRuntimeConfig* config) {
+ if (!target_bitrate_bps_ || !overhead_bytes_per_packet_ ||
+ !uplink_bandwidth_bps_) {
+ return;
+ }
+
+ auto it =
+ absl::c_find_if(encoder_frame_lengths_ms_, [&](int frame_length_ms) {
+ int target = use_slow_adaptation_ ? *uplink_bandwidth_bps_
+ : *target_bitrate_bps_;
+ return target -
+ OverheadBps(*overhead_bytes_per_packet_, frame_length_ms) >
+ min_payload_bitrate_bps_;
+ });
+
+ // Longest frame length is chosen if none match our criteria.
+ config->frame_length_ms = it != encoder_frame_lengths_ms_.end()
+ ? *it
+ : encoder_frame_lengths_ms_.back();
+}
+
+} // namespace webrtc
diff --git a/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h
new file mode 100644
index 0000000000..d7102b0b44
--- /dev/null
+++ b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_FRAME_LENGTH_CONTROLLER_V2_H_
+#define MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_FRAME_LENGTH_CONTROLLER_V2_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "modules/audio_coding/audio_network_adaptor/controller.h"
+#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h"
+
+namespace webrtc {
+
+class FrameLengthControllerV2 final : public Controller {
+ public:
+ FrameLengthControllerV2(rtc::ArrayView<const int> encoder_frame_lengths_ms,
+ int min_payload_bitrate_bps,
+ bool use_slow_adaptation);
+
+ void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override;
+
+ void MakeDecision(AudioEncoderRuntimeConfig* config) override;
+
+ private:
+ std::vector<int> encoder_frame_lengths_ms_;
+ const int min_payload_bitrate_bps_;
+ const bool use_slow_adaptation_;
+
+ absl::optional<int> uplink_bandwidth_bps_;
+ absl::optional<int> target_bitrate_bps_;
+ absl::optional<int> overhead_bytes_per_packet_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_FRAME_LENGTH_CONTROLLER_V2_H_
diff --git a/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc
new file mode 100644
index 0000000000..1c88f47c58
--- /dev/null
+++ b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h"
+
+#include <algorithm>
+#include <memory>
+
+#include "modules/audio_coding/audio_network_adaptor/controller.h"
+#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+constexpr int kANASupportedFrameLengths[] = {20, 40, 60, 120};
+constexpr int kMinPayloadBitrateBps = 16000;
+
+} // namespace
+
+class FrameLengthControllerV2Test : public testing::Test {
+ protected:
+ AudioEncoderRuntimeConfig GetDecision() {
+ AudioEncoderRuntimeConfig config;
+ controller_->MakeDecision(&config);
+ return config;
+ }
+
+ void SetOverhead(int overhead_bytes_per_packet) {
+ overhead_bytes_per_packet_ = overhead_bytes_per_packet;
+ Controller::NetworkMetrics metrics;
+ metrics.overhead_bytes_per_packet = overhead_bytes_per_packet;
+ controller_->UpdateNetworkMetrics(metrics);
+ }
+
+ void SetTargetBitrate(int target_audio_bitrate_bps) {
+ target_audio_bitrate_bps_ = target_audio_bitrate_bps;
+ Controller::NetworkMetrics metrics;
+ metrics.target_audio_bitrate_bps = target_audio_bitrate_bps;
+ controller_->UpdateNetworkMetrics(metrics);
+ }
+
+ void SetUplinkBandwidth(int uplink_bandwidth_bps) {
+ Controller::NetworkMetrics metrics;
+ metrics.uplink_bandwidth_bps = uplink_bandwidth_bps;
+ controller_->UpdateNetworkMetrics(metrics);
+ }
+
+ void ExpectFrameLengthDecision(int expected_frame_length_ms) {
+ auto config = GetDecision();
+ EXPECT_EQ(*config.frame_length_ms, expected_frame_length_ms);
+ }
+
+ std::unique_ptr<FrameLengthControllerV2> controller_ =
+ std::make_unique<FrameLengthControllerV2>(kANASupportedFrameLengths,
+ kMinPayloadBitrateBps,
+ /*use_slow_adaptation=*/false);
+ absl::optional<int> target_audio_bitrate_bps_;
+ absl::optional<int> overhead_bytes_per_packet_;
+};
+
+// Don't return any decision if we haven't received all required network
+// metrics.
+TEST_F(FrameLengthControllerV2Test, RequireNetworkMetrics) {
+ auto config = GetDecision();
+ EXPECT_FALSE(config.bitrate_bps);
+ EXPECT_FALSE(config.frame_length_ms);
+
+ SetOverhead(30);
+ config = GetDecision();
+ EXPECT_FALSE(config.frame_length_ms);
+
+ SetTargetBitrate(32000);
+ config = GetDecision();
+ EXPECT_FALSE(config.frame_length_ms);
+
+ SetUplinkBandwidth(32000);
+ config = GetDecision();
+ EXPECT_TRUE(config.frame_length_ms);
+}
+
+TEST_F(FrameLengthControllerV2Test, UseFastAdaptation) {
+ SetOverhead(50);
+ SetTargetBitrate(50000);
+ SetUplinkBandwidth(50000);
+ ExpectFrameLengthDecision(20);
+
+ SetTargetBitrate(20000);
+ ExpectFrameLengthDecision(120);
+
+ SetTargetBitrate(30000);
+ ExpectFrameLengthDecision(40);
+
+ SetTargetBitrate(25000);
+ ExpectFrameLengthDecision(60);
+}
+
+TEST_F(FrameLengthControllerV2Test, UseSlowAdaptation) {
+ controller_ = std::make_unique<FrameLengthControllerV2>(
+ kANASupportedFrameLengths, kMinPayloadBitrateBps,
+ /*use_slow_adaptation=*/true);
+ SetOverhead(50);
+ SetTargetBitrate(50000);
+ SetUplinkBandwidth(20000);
+ ExpectFrameLengthDecision(120);
+
+ SetUplinkBandwidth(30000);
+ ExpectFrameLengthDecision(40);
+
+ SetUplinkBandwidth(40000);
+ ExpectFrameLengthDecision(20);
+}
+
+} // namespace webrtc
diff --git a/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h b/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h
index 8c048496ca..26a9061745 100644
--- a/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h
+++ b/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h
@@ -18,27 +18,38 @@ namespace webrtc {
class MockAudioNetworkAdaptor : public AudioNetworkAdaptor {
public:
- virtual ~MockAudioNetworkAdaptor() { Die(); }
- MOCK_METHOD0(Die, void());
+ ~MockAudioNetworkAdaptor() override { Die(); }
+ MOCK_METHOD(void, Die, ());
- MOCK_METHOD1(SetUplinkBandwidth, void(int uplink_bandwidth_bps));
+ MOCK_METHOD(void, SetUplinkBandwidth, (int uplink_bandwidth_bps), (override));
- MOCK_METHOD1(SetUplinkPacketLossFraction,
- void(float uplink_packet_loss_fraction));
+ MOCK_METHOD(void,
+ SetUplinkPacketLossFraction,
+ (float uplink_packet_loss_fraction),
+ (override));
- MOCK_METHOD1(SetRtt, void(int rtt_ms));
+ MOCK_METHOD(void, SetRtt, (int rtt_ms), (override));
- MOCK_METHOD1(SetTargetAudioBitrate, void(int target_audio_bitrate_bps));
+ MOCK_METHOD(void,
+ SetTargetAudioBitrate,
+ (int target_audio_bitrate_bps),
+ (override));
- MOCK_METHOD1(SetOverhead, void(size_t overhead_bytes_per_packet));
+ MOCK_METHOD(void,
+ SetOverhead,
+ (size_t overhead_bytes_per_packet),
+ (override));
- MOCK_METHOD0(GetEncoderRuntimeConfig, AudioEncoderRuntimeConfig());
+ MOCK_METHOD(AudioEncoderRuntimeConfig,
+ GetEncoderRuntimeConfig,
+ (),
+ (override));
- MOCK_METHOD1(StartDebugDump, void(FILE* file_handle));
+ MOCK_METHOD(void, StartDebugDump, (FILE * file_handle), (override));
- MOCK_METHOD0(StopDebugDump, void());
+ MOCK_METHOD(void, StopDebugDump, (), (override));
- MOCK_CONST_METHOD0(GetStats, ANAStats());
+ MOCK_METHOD(ANAStats, GetStats, (), (const, override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h b/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h
index df28e9e26f..de554c0517 100644
--- a/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h
+++ b/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h
@@ -18,11 +18,16 @@ namespace webrtc {
class MockController : public Controller {
public:
- virtual ~MockController() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD1(UpdateNetworkMetrics,
- void(const NetworkMetrics& network_metrics));
- MOCK_METHOD1(MakeDecision, void(AudioEncoderRuntimeConfig* config));
+ ~MockController() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void,
+ UpdateNetworkMetrics,
+ (const NetworkMetrics& network_metrics),
+ (override));
+ MOCK_METHOD(void,
+ MakeDecision,
+ (AudioEncoderRuntimeConfig * config),
+ (override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h b/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h
index 8d410a742d..9e2fa466fc 100644
--- a/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h
+++ b/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h
@@ -20,12 +20,13 @@ namespace webrtc {
class MockControllerManager : public ControllerManager {
public:
- virtual ~MockControllerManager() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD1(
- GetSortedControllers,
- std::vector<Controller*>(const Controller::NetworkMetrics& metrics));
- MOCK_CONST_METHOD0(GetControllers, std::vector<Controller*>());
+ ~MockControllerManager() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(std::vector<Controller*>,
+ GetSortedControllers,
+ (const Controller::NetworkMetrics& metrics),
+ (override));
+ MOCK_METHOD(std::vector<Controller*>, GetControllers, (), (const, override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h b/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h
index 06650abbd6..0c6a9efe1d 100644
--- a/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h
+++ b/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h
@@ -18,20 +18,24 @@ namespace webrtc {
class MockDebugDumpWriter : public DebugDumpWriter {
public:
- virtual ~MockDebugDumpWriter() { Die(); }
- MOCK_METHOD0(Die, void());
+ ~MockDebugDumpWriter() override { Die(); }
+ MOCK_METHOD(void, Die, ());
- MOCK_METHOD2(DumpEncoderRuntimeConfig,
- void(const AudioEncoderRuntimeConfig& config,
- int64_t timestamp));
- MOCK_METHOD2(DumpNetworkMetrics,
- void(const Controller::NetworkMetrics& metrics,
- int64_t timestamp));
+ MOCK_METHOD(void,
+ DumpEncoderRuntimeConfig,
+ (const AudioEncoderRuntimeConfig& config, int64_t timestamp),
+ (override));
+ MOCK_METHOD(void,
+ DumpNetworkMetrics,
+ (const Controller::NetworkMetrics& metrics, int64_t timestamp),
+ (override));
#if WEBRTC_ENABLE_PROTOBUF
- MOCK_METHOD2(DumpControllerManagerConfig,
- void(const audio_network_adaptor::config::ControllerManager&
- controller_manager_config,
- int64_t timestamp));
+ MOCK_METHOD(void,
+ DumpControllerManagerConfig,
+ (const audio_network_adaptor::config::ControllerManager&
+ controller_manager_config,
+ int64_t timestamp),
+ (override));
#endif
};
diff --git a/modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc b/modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc
index 9984049d50..dc3aec0b18 100644
--- a/modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc
+++ b/modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc
@@ -621,7 +621,7 @@ TEST(ThresholdCurveTest, NearlyIdenticalCurvesSecondContinuesOnOtherRightSide) {
// The higher-left point must be given as the first point, and the lower-right
// point must be given as the second.
// This necessarily produces a non-positive slope.
-TEST(ThresholdCurveTest, WrongOrderPoints) {
+TEST(ThresholdCurveDeathTest, WrongOrderPoints) {
std::unique_ptr<ThresholdCurve> curve;
constexpr ThresholdCurve::Point left{5, 10};
constexpr ThresholdCurve::Point right{10, 5};
diff --git a/modules/audio_coding/codecs/cng/cng_unittest.cc b/modules/audio_coding/codecs/cng/cng_unittest.cc
index 80349e2504..0e6ab79394 100644
--- a/modules/audio_coding/codecs/cng/cng_unittest.cc
+++ b/modules/audio_coding/codecs/cng/cng_unittest.cc
@@ -40,6 +40,8 @@ class CngTest : public ::testing::Test {
int16_t speech_data_[640]; // Max size of CNG internal buffers.
};
+class CngDeathTest : public CngTest {};
+
void CngTest::SetUp() {
FILE* input_file;
const std::string file_name =
@@ -69,7 +71,7 @@ void CngTest::TestCngEncode(int sample_rate_hz, int quality) {
#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Create CNG encoder, init with faulty values, free CNG encoder.
-TEST_F(CngTest, CngInitFail) {
+TEST_F(CngDeathTest, CngInitFail) {
// Call with too few parameters.
EXPECT_DEATH(
{
@@ -86,7 +88,7 @@ TEST_F(CngTest, CngInitFail) {
}
// Encode Cng with too long input vector.
-TEST_F(CngTest, CngEncodeTooLong) {
+TEST_F(CngDeathTest, CngEncodeTooLong) {
rtc::Buffer sid_data;
// Create encoder.
diff --git a/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
index a3b8e76a30..d99e9c893f 100644
--- a/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
+++ b/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
@@ -19,6 +19,7 @@
#include "api/scoped_refptr.h"
#include "api/units/time_delta.h"
#include "rtc_base/constructor_magic.h"
+#include "system_wrappers/include/field_trial.h"
namespace webrtc {
@@ -48,6 +49,13 @@ class AudioEncoderIsacT final : public AudioEncoder {
size_t Num10MsFramesInNextPacket() const override;
size_t Max10MsFramesInAPacket() const override;
int GetTargetBitrate() const override;
+ void SetTargetBitrate(int target_bps) override;
+ void OnReceivedTargetAudioBitrate(int target_bps) override;
+ void OnReceivedUplinkBandwidth(
+ int target_audio_bitrate_bps,
+ absl::optional<int64_t> bwe_period_ms) override;
+ void OnReceivedUplinkAllocation(BitrateAllocationUpdate update) override;
+ void OnReceivedOverhead(size_t overhead_bytes_per_packet) override;
EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) override;
@@ -60,7 +68,13 @@ class AudioEncoderIsacT final : public AudioEncoder {
// STREAM_MAXW16_60MS for iSAC fix (60 ms).
static const size_t kSufficientEncodeBufferSizeBytes = 400;
- static const int kDefaultBitRate = 32000;
+ static constexpr int kDefaultBitRate = 32000;
+ static constexpr int kMinBitrateBps = 10000;
+ static constexpr int MaxBitrateBps(int sample_rate_hz) {
+ return sample_rate_hz == 32000 ? 56000 : 32000;
+ }
+
+ void SetTargetBitrate(int target_bps, bool subtract_per_packet_overhead);
// Recreate the iSAC encoder instance with the given settings, and save them.
void RecreateEncoderInstance(const Config& config);
@@ -77,6 +91,15 @@ class AudioEncoderIsacT final : public AudioEncoder {
// Timestamp of the previously encoded packet.
uint32_t last_encoded_timestamp_;
+ // Cache the value of the "WebRTC-SendSideBwe-WithOverhead" field trial.
+ const bool send_side_bwe_with_overhead_ =
+ field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead");
+
+ // When we send a packet, expect this many bytes of headers to be added to it.
+ // Start out with a reasonable default that we can use until we receive a real
+ // value.
+ DataSize overhead_per_packet_ = DataSize::Bytes(28);
+
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderIsacT);
};
diff --git a/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h b/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
index 9ddb94326d..0bde3f797f 100644
--- a/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
+++ b/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
@@ -12,6 +12,7 @@
#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_
#include "rtc_base/checks.h"
+#include "rtc_base/numerics/safe_minmax.h"
namespace webrtc {
@@ -81,6 +82,51 @@ int AudioEncoderIsacT<T>::GetTargetBitrate() const {
}
template <typename T>
+void AudioEncoderIsacT<T>::SetTargetBitrate(int target_bps) {
+ // Set target bitrate directly without subtracting per-packet overhead,
+ // because that's what AudioEncoderOpus does.
+ SetTargetBitrate(target_bps,
+ /*subtract_per_packet_overhead=*/false);
+}
+
+template <typename T>
+void AudioEncoderIsacT<T>::OnReceivedTargetAudioBitrate(int target_bps) {
+ // Set target bitrate directly without subtracting per-packet overhead,
+ // because that's what AudioEncoderOpus does.
+ SetTargetBitrate(target_bps,
+ /*subtract_per_packet_overhead=*/false);
+}
+
+template <typename T>
+void AudioEncoderIsacT<T>::OnReceivedUplinkBandwidth(
+ int target_audio_bitrate_bps,
+ absl::optional<int64_t> /*bwe_period_ms*/) {
+ // Set target bitrate, subtracting the per-packet overhead if
+ // WebRTC-SendSideBwe-WithOverhead is enabled, because that's what
+ // AudioEncoderOpus does.
+ SetTargetBitrate(
+ target_audio_bitrate_bps,
+ /*subtract_per_packet_overhead=*/send_side_bwe_with_overhead_);
+}
+
+template <typename T>
+void AudioEncoderIsacT<T>::OnReceivedUplinkAllocation(
+ BitrateAllocationUpdate update) {
+ // Set target bitrate, subtracting the per-packet overhead if
+ // WebRTC-SendSideBwe-WithOverhead is enabled, because that's what
+ // AudioEncoderOpus does.
+ SetTargetBitrate(
+ update.target_bitrate.bps<int>(),
+ /*subtract_per_packet_overhead=*/send_side_bwe_with_overhead_);
+}
+
+template <typename T>
+void AudioEncoderIsacT<T>::OnReceivedOverhead(
+ size_t overhead_bytes_per_packet) {
+ overhead_per_packet_ = DataSize::Bytes(overhead_bytes_per_packet);
+}
+
+template <typename T>
AudioEncoder::EncodedInfo AudioEncoderIsacT<T>::EncodeImpl(
uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
@@ -127,6 +173,21 @@ AudioEncoderIsacT<T>::GetFrameLengthRange() const {
}
template <typename T>
+void AudioEncoderIsacT<T>::SetTargetBitrate(int target_bps,
+ bool subtract_per_packet_overhead) {
+ if (subtract_per_packet_overhead) {
+ const DataRate overhead_rate =
+ overhead_per_packet_ / TimeDelta::Millis(config_.frame_size_ms);
+ target_bps -= overhead_rate.bps();
+ }
+ target_bps = rtc::SafeClamp(target_bps, kMinBitrateBps,
+ MaxBitrateBps(config_.sample_rate_hz));
+ int result = T::Control(isac_state_, target_bps, config_.frame_size_ms);
+ RTC_DCHECK_EQ(result, 0);
+ config_.bit_rate = target_bps;
+}
+
+template <typename T>
void AudioEncoderIsacT<T>::RecreateEncoderInstance(const Config& config) {
RTC_CHECK(config.IsOk());
packet_in_progress_ = false;
diff --git a/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc b/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc
index c4d7ab8fa8..a2e1e088e6 100644
--- a/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc
+++ b/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc
@@ -9,6 +9,7 @@
*/
#include <array>
+#include <map>
#include <memory>
#include <vector>
@@ -159,6 +160,33 @@ TEST_P(EncoderTest, TestDifferentBitrates) {
EXPECT_LT(num_bytes_low, num_bytes_high);
}
+// Encodes an input audio sequence first with a low, then with a high target
+// bitrate *using the same encoder* and checks that the number of emitted bytes
+// in the first case is less than in the second case.
+TEST_P(EncoderTest, TestDynamicBitrateChange) {
+ constexpr int kLowBps = 20000;
+ constexpr int kHighBps = 25000;
+ constexpr int kStartBps = 30000;
+ auto encoder = CreateEncoder(GetIsacImpl(), GetSampleRateHz(),
+ GetFrameSizeMs(), kStartBps);
+ std::map<int, int> num_bytes;
+ constexpr int kNumFrames = 200; // 2 seconds.
+ for (int bitrate_bps : {kLowBps, kHighBps}) {
+ auto pcm_file = GetPcmTestFileReader(GetSampleRateHz());
+ encoder->OnReceivedTargetAudioBitrate(bitrate_bps);
+ for (int i = 0; i < kNumFrames; ++i) {
+ AudioFrame in;
+ pcm_file->Read10MsData(in);
+ rtc::Buffer buf;
+ encoder->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &buf);
+ num_bytes[bitrate_bps] += buf.size();
+ }
+ }
+ // kHighBps / kLowBps == 1.25, so require the high-bitrate run to produce at
+ // least 1.2 times the number of bytes.
+ EXPECT_LT(1.2 * num_bytes[kLowBps], num_bytes[kHighBps]);
+}
+
// Checks that, given a target bitrate, the encoder does not overshoot too much.
TEST_P(EncoderTest, DoNotOvershootTargetBitrate) {
for (int bitrate_bps : {10000, 15000, 20000, 26000, 32000}) {
diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
index ef32f4ce02..2b16920714 100644
--- a/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
+++ b/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
@@ -66,46 +66,7 @@ constexpr int kOpusSupportedFrameLengths[] = {10, 20, 40, 60};
// PacketLossFractionSmoother uses an exponential filter with a time constant
// of -1.0 / ln(0.9999) = 10000 ms.
constexpr float kAlphaForPacketLossFractionSmoother = 0.9999f;
-
-// Optimize the loss rate to configure Opus. Basically, optimized loss rate is
-// the input loss rate rounded down to various levels, because a robustly good
-// audio quality is achieved by lowering the packet loss down.
-// Additionally, to prevent toggling, margins are used, i.e., when jumping to
-// a loss rate from below, a higher threshold is used than jumping to the same
-// level from above.
-float OptimizePacketLossRate(float new_loss_rate, float old_loss_rate) {
- RTC_DCHECK_GE(new_loss_rate, 0.0f);
- RTC_DCHECK_LE(new_loss_rate, 1.0f);
- RTC_DCHECK_GE(old_loss_rate, 0.0f);
- RTC_DCHECK_LE(old_loss_rate, 1.0f);
- constexpr float kPacketLossRate20 = 0.20f;
- constexpr float kPacketLossRate10 = 0.10f;
- constexpr float kPacketLossRate5 = 0.05f;
- constexpr float kPacketLossRate1 = 0.01f;
- constexpr float kLossRate20Margin = 0.02f;
- constexpr float kLossRate10Margin = 0.01f;
- constexpr float kLossRate5Margin = 0.01f;
- if (new_loss_rate >=
- kPacketLossRate20 +
- kLossRate20Margin *
- (kPacketLossRate20 - old_loss_rate > 0 ? 1 : -1)) {
- return kPacketLossRate20;
- } else if (new_loss_rate >=
- kPacketLossRate10 +
- kLossRate10Margin *
- (kPacketLossRate10 - old_loss_rate > 0 ? 1 : -1)) {
- return kPacketLossRate10;
- } else if (new_loss_rate >=
- kPacketLossRate5 +
- kLossRate5Margin *
- (kPacketLossRate5 - old_loss_rate > 0 ? 1 : -1)) {
- return kPacketLossRate5;
- } else if (new_loss_rate >= kPacketLossRate1) {
- return kPacketLossRate1;
- } else {
- return 0.0f;
- }
-}
+constexpr float kMaxPacketLossFraction = 0.2f;
int CalculateDefaultBitrate(int max_playback_rate, size_t num_channels) {
const int bitrate = [&] {
@@ -201,35 +162,6 @@ int GetBitrateBps(const AudioEncoderOpusConfig& config) {
return *config.bitrate_bps;
}
-bool IsValidPacketLossRate(int value) {
- return value >= 0 && value <= 100;
-}
-
-float ToFraction(int percent) {
- return static_cast<float>(percent) / 100;
-}
-
-float GetMinPacketLossRate() {
- constexpr char kPacketLossFieldTrial[] = "WebRTC-Audio-OpusMinPacketLossRate";
- const bool use_opus_min_packet_loss_rate =
- webrtc::field_trial::IsEnabled(kPacketLossFieldTrial);
- if (use_opus_min_packet_loss_rate) {
- const std::string field_trial_string =
- webrtc::field_trial::FindFullName(kPacketLossFieldTrial);
- constexpr int kDefaultMinPacketLossRate = 1;
- int value = kDefaultMinPacketLossRate;
- if (sscanf(field_trial_string.c_str(), "Enabled-%d", &value) == 1 &&
- !IsValidPacketLossRate(value)) {
- RTC_LOG(LS_WARNING) << "Invalid parameter for " << kPacketLossFieldTrial
- << ", using default value: "
- << kDefaultMinPacketLossRate;
- value = kDefaultMinPacketLossRate;
- }
- return ToFraction(value);
- }
- return 0.0;
-}
-
std::vector<float> GetBitrateMultipliers() {
constexpr char kBitrateMultipliersName[] =
"WebRTC-Audio-OpusBitrateMultipliers";
@@ -425,19 +357,17 @@ AudioEncoderOpusImpl::AudioEncoderOpusImpl(
: payload_type_(payload_type),
send_side_bwe_with_overhead_(
webrtc::field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")),
- use_stable_target_for_adaptation_(webrtc::field_trial::IsEnabled(
+ use_stable_target_for_adaptation_(!webrtc::field_trial::IsDisabled(
"WebRTC-Audio-StableTargetAdaptation")),
adjust_bandwidth_(
webrtc::field_trial::IsEnabled("WebRTC-AdjustOpusBandwidth")),
bitrate_changed_(true),
bitrate_multipliers_(GetBitrateMultipliers()),
packet_loss_rate_(0.0),
- min_packet_loss_rate_(GetMinPacketLossRate()),
inst_(nullptr),
packet_loss_fraction_smoother_(new PacketLossFractionSmoother()),
audio_network_adaptor_creator_(audio_network_adaptor_creator),
- bitrate_smoother_(std::move(bitrate_smoother)),
- consecutive_dtx_frames_(0) {
+ bitrate_smoother_(std::move(bitrate_smoother)) {
RTC_DCHECK(0 <= payload_type && payload_type <= 127);
// Sanity check of the redundant payload type field that we want to get rid
@@ -541,14 +471,14 @@ void AudioEncoderOpusImpl::DisableAudioNetworkAdaptor() {
void AudioEncoderOpusImpl::OnReceivedUplinkPacketLossFraction(
float uplink_packet_loss_fraction) {
- if (!audio_network_adaptor_) {
- packet_loss_fraction_smoother_->AddSample(uplink_packet_loss_fraction);
- float average_fraction_loss = packet_loss_fraction_smoother_->GetAverage();
- return SetProjectedPacketLossRate(average_fraction_loss);
+ if (audio_network_adaptor_) {
+ audio_network_adaptor_->SetUplinkPacketLossFraction(
+ uplink_packet_loss_fraction);
+ ApplyAudioNetworkAdaptor();
}
- audio_network_adaptor_->SetUplinkPacketLossFraction(
- uplink_packet_loss_fraction);
- ApplyAudioNetworkAdaptor();
+ packet_loss_fraction_smoother_->AddSample(uplink_packet_loss_fraction);
+ float average_fraction_loss = packet_loss_fraction_smoother_->GetAverage();
+ SetProjectedPacketLossRate(average_fraction_loss);
}
void AudioEncoderOpusImpl::OnReceivedTargetAudioBitrate(
@@ -659,6 +589,7 @@ AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl(
Num10msFramesPerPacket() * SamplesPer10msFrame());
const size_t max_encoded_bytes = SufficientOutputBufferSize();
+ const size_t start_offset_bytes = encoded->size();
EncodedInfo info;
info.encoded_bytes = encoded->AppendData(
max_encoded_bytes, [&](rtc::ArrayView<uint8_t> encoded) {
@@ -673,8 +604,6 @@ AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl(
});
input_buffer_.clear();
- bool dtx_frame = (info.encoded_bytes <= 2);
-
// Will use new packet size for next encoding.
config_.frame_size_ms = next_frame_length_ms_;
@@ -689,14 +618,18 @@ AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl(
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = payload_type_;
info.send_even_if_empty = true; // Allows Opus to send empty packets.
- // After 20 DTX frames (MAX_CONSECUTIVE_DTX) Opus will send a frame
- // coding the background noise. Avoid flagging this frame as speech
- // (even though there is a probability of the frame being speech).
- info.speech = !dtx_frame && (consecutive_dtx_frames_ != 20);
info.encoder_type = CodecType::kOpus;
- // Increase or reset DTX counter.
- consecutive_dtx_frames_ = (dtx_frame) ? (consecutive_dtx_frames_ + 1) : (0);
+ // Extract the VAD result from the encoded packet.
+ int has_voice = WebRtcOpus_PacketHasVoiceActivity(
+ &encoded->data()[start_offset_bytes], info.encoded_bytes);
+ if (has_voice == -1) {
+ // CELT mode packet or there was an error. This had set the speech flag to
+ // true historically.
+ info.speech = true;
+ } else {
+ info.speech = has_voice;
+ }
return info;
}
@@ -789,8 +722,7 @@ void AudioEncoderOpusImpl::SetNumChannelsToEncode(
}
void AudioEncoderOpusImpl::SetProjectedPacketLossRate(float fraction) {
- fraction = OptimizePacketLossRate(fraction, packet_loss_rate_);
- fraction = std::max(fraction, min_packet_loss_rate_);
+ fraction = std::min(std::max(fraction, 0.0f), kMaxPacketLossFraction);
if (packet_loss_rate_ != fraction) {
packet_loss_rate_ = fraction;
RTC_CHECK_EQ(
@@ -828,10 +760,6 @@ void AudioEncoderOpusImpl::ApplyAudioNetworkAdaptor() {
SetTargetBitrate(*config.bitrate_bps);
if (config.frame_length_ms)
SetFrameLength(*config.frame_length_ms);
- if (config.enable_fec)
- SetFec(*config.enable_fec);
- if (config.uplink_packet_loss_fraction)
- SetProjectedPacketLossRate(*config.uplink_packet_loss_fraction);
if (config.enable_dtx)
SetDtx(*config.enable_dtx);
if (config.num_channels)
diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/modules/audio_coding/codecs/opus/audio_encoder_opus.h
index 540413290d..dc955cec23 100644
--- a/modules/audio_coding/codecs/opus/audio_encoder_opus.h
+++ b/modules/audio_coding/codecs/opus/audio_encoder_opus.h
@@ -160,7 +160,6 @@ class AudioEncoderOpusImpl final : public AudioEncoder {
// 1 kbps range.
std::vector<float> bitrate_multipliers_;
float packet_loss_rate_;
- const float min_packet_loss_rate_;
std::vector<int16_t> input_buffer_;
OpusEncInst* inst_;
uint32_t first_timestamp_in_buffer_;
@@ -173,7 +172,6 @@ class AudioEncoderOpusImpl final : public AudioEncoder {
absl::optional<size_t> overhead_bytes_per_packet_;
const std::unique_ptr<SmoothingFilter> bitrate_smoother_;
absl::optional<int64_t> bitrate_smoother_last_update_time_;
- int consecutive_dtx_frames_;
friend struct AudioEncoderOpus;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderOpusImpl);
diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc b/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
index b469885c1f..1cbc4a3ff7 100644
--- a/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
+++ b/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
@@ -93,17 +93,13 @@ std::unique_ptr<AudioEncoderOpusStates> CreateCodec(int sample_rate_hz,
AudioEncoderRuntimeConfig CreateEncoderRuntimeConfig() {
constexpr int kBitrate = 40000;
constexpr int kFrameLength = 60;
- constexpr bool kEnableFec = true;
constexpr bool kEnableDtx = false;
constexpr size_t kNumChannels = 1;
- constexpr float kPacketLossFraction = 0.1f;
AudioEncoderRuntimeConfig config;
config.bitrate_bps = kBitrate;
config.frame_length_ms = kFrameLength;
- config.enable_fec = kEnableFec;
config.enable_dtx = kEnableDtx;
config.num_channels = kNumChannels;
- config.uplink_packet_loss_fraction = kPacketLossFraction;
return config;
}
@@ -111,7 +107,6 @@ void CheckEncoderRuntimeConfig(const AudioEncoderOpusImpl* encoder,
const AudioEncoderRuntimeConfig& config) {
EXPECT_EQ(*config.bitrate_bps, encoder->GetTargetBitrate());
EXPECT_EQ(*config.frame_length_ms, encoder->next_frame_length_ms());
- EXPECT_EQ(*config.enable_fec, encoder->fec_enabled());
EXPECT_EQ(*config.enable_dtx, encoder->GetDtx());
EXPECT_EQ(*config.num_channels, encoder->num_channels_to_encode());
}
@@ -222,84 +217,6 @@ TEST_P(AudioEncoderOpusTest,
}
}
-namespace {
-
-// Returns a vector with the n evenly-spaced numbers a, a + (b - a)/(n - 1),
-// ..., b.
-std::vector<float> IntervalSteps(float a, float b, size_t n) {
- RTC_DCHECK_GT(n, 1u);
- const float step = (b - a) / (n - 1);
- std::vector<float> points;
- points.push_back(a);
- for (size_t i = 1; i < n - 1; ++i)
- points.push_back(a + i * step);
- points.push_back(b);
- return points;
-}
-
-// Sets the packet loss rate to each number in the vector in turn, and verifies
-// that the loss rate as reported by the encoder is |expected_return| for all
-// of them.
-void TestSetPacketLossRate(const AudioEncoderOpusStates* states,
- const std::vector<float>& losses,
- float expected_return) {
- // |kSampleIntervalMs| is chosen to ease the calculation since
- // 0.9999 ^ 184198 = 1e-8. Which minimizes the effect of
- // PacketLossFractionSmoother used in AudioEncoderOpus.
- constexpr int64_t kSampleIntervalMs = 184198;
- for (float loss : losses) {
- states->encoder->OnReceivedUplinkPacketLossFraction(loss);
- states->fake_clock->AdvanceTime(TimeDelta::Millis(kSampleIntervalMs));
- EXPECT_FLOAT_EQ(expected_return, states->encoder->packet_loss_rate());
- }
-}
-
-} // namespace
-
-TEST_P(AudioEncoderOpusTest, PacketLossRateOptimized) {
- auto states = CreateCodec(sample_rate_hz_, 1);
- auto I = [](float a, float b) { return IntervalSteps(a, b, 10); };
- constexpr float eps = 1e-8f;
-
- // Note that the order of the following calls is critical.
-
- // clang-format off
- TestSetPacketLossRate(states.get(), I(0.00f , 0.01f - eps), 0.00f);
- TestSetPacketLossRate(states.get(), I(0.01f + eps, 0.06f - eps), 0.01f);
- TestSetPacketLossRate(states.get(), I(0.06f + eps, 0.11f - eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.11f + eps, 0.22f - eps), 0.10f);
- TestSetPacketLossRate(states.get(), I(0.22f + eps, 1.00f ), 0.20f);
-
- TestSetPacketLossRate(states.get(), I(1.00f , 0.18f + eps), 0.20f);
- TestSetPacketLossRate(states.get(), I(0.18f - eps, 0.09f + eps), 0.10f);
- TestSetPacketLossRate(states.get(), I(0.09f - eps, 0.04f + eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.04f - eps, 0.01f + eps), 0.01f);
- TestSetPacketLossRate(states.get(), I(0.01f - eps, 0.00f ), 0.00f);
- // clang-format on
-}
-
-TEST_P(AudioEncoderOpusTest, PacketLossRateLowerBounded) {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Audio-OpusMinPacketLossRate/Enabled-5/");
- auto states = CreateCodec(sample_rate_hz_, 1);
- auto I = [](float a, float b) { return IntervalSteps(a, b, 10); };
- constexpr float eps = 1e-8f;
-
- // clang-format off
- TestSetPacketLossRate(states.get(), I(0.00f , 0.01f - eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.01f + eps, 0.06f - eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.06f + eps, 0.11f - eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.11f + eps, 0.22f - eps), 0.10f);
- TestSetPacketLossRate(states.get(), I(0.22f + eps, 1.00f ), 0.20f);
-
- TestSetPacketLossRate(states.get(), I(1.00f , 0.18f + eps), 0.20f);
- TestSetPacketLossRate(states.get(), I(0.18f - eps, 0.09f + eps), 0.10f);
- TestSetPacketLossRate(states.get(), I(0.09f - eps, 0.04f + eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.04f - eps, 0.01f + eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.01f - eps, 0.00f ), 0.05f);
- // clang-format on
-}
-
TEST_P(AudioEncoderOpusTest, SetReceiverFrameLengthRange) {
auto states = CreateCodec(sample_rate_hz_, 2);
// Before calling to |SetReceiverFrameLengthRange|,
@@ -337,6 +254,8 @@ TEST_P(AudioEncoderOpusTest,
TEST_P(AudioEncoderOpusTest,
InvokeAudioNetworkAdaptorOnReceivedUplinkBandwidth) {
+ test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-StableTargetAdaptation/Disabled/");
auto states = CreateCodec(sample_rate_hz_, 2);
states->encoder->EnableAudioNetworkAdaptor("", nullptr);
@@ -358,6 +277,28 @@ TEST_P(AudioEncoderOpusTest,
CheckEncoderRuntimeConfig(states->encoder.get(), config);
}
+TEST_P(AudioEncoderOpusTest,
+ InvokeAudioNetworkAdaptorOnReceivedUplinkAllocation) {
+ auto states = CreateCodec(sample_rate_hz_, 2);
+ states->encoder->EnableAudioNetworkAdaptor("", nullptr);
+
+ auto config = CreateEncoderRuntimeConfig();
+ EXPECT_CALL(*states->mock_audio_network_adaptor, GetEncoderRuntimeConfig())
+ .WillOnce(Return(config));
+
+ BitrateAllocationUpdate update;
+ update.target_bitrate = DataRate::BitsPerSec(30000);
+ update.stable_target_bitrate = DataRate::BitsPerSec(20000);
+ update.bwe_period = TimeDelta::Millis(200);
+ EXPECT_CALL(*states->mock_audio_network_adaptor,
+ SetTargetAudioBitrate(update.target_bitrate.bps()));
+ EXPECT_CALL(*states->mock_audio_network_adaptor,
+ SetUplinkBandwidth(update.stable_target_bitrate.bps()));
+ states->encoder->OnReceivedUplinkAllocation(update);
+
+ CheckEncoderRuntimeConfig(states->encoder.get(), config);
+}
+
TEST_P(AudioEncoderOpusTest, InvokeAudioNetworkAdaptorOnReceivedRtt) {
auto states = CreateCodec(sample_rate_hz_, 2);
states->encoder->EnableAudioNetworkAdaptor("", nullptr);
@@ -404,16 +345,21 @@ TEST_P(AudioEncoderOpusTest,
// First time, no filtering.
states->encoder->OnReceivedUplinkPacketLossFraction(kPacketLossFraction_1);
- EXPECT_FLOAT_EQ(0.01f, states->encoder->packet_loss_rate());
+ EXPECT_FLOAT_EQ(0.02f, states->encoder->packet_loss_rate());
states->fake_clock->AdvanceTime(TimeDelta::Millis(kSecondSampleTimeMs));
states->encoder->OnReceivedUplinkPacketLossFraction(kPacketLossFraction_2);
// Now the output of packet loss fraction smoother should be
- // (0.02 + 0.198) / 2 = 0.109, which reach the threshold for the optimized
- // packet loss rate to increase to 0.05. If no smoothing has been made, the
- // optimized packet loss rate should have been increase to 0.1.
- EXPECT_FLOAT_EQ(0.05f, states->encoder->packet_loss_rate());
+ // (0.02 + 0.198) / 2 = 0.109.
+ EXPECT_NEAR(0.109f, states->encoder->packet_loss_rate(), 0.001);
+}
+
+TEST_P(AudioEncoderOpusTest, PacketLossRateUpperBounded) {
+ auto states = CreateCodec(sample_rate_hz_, 2);
+
+ states->encoder->OnReceivedUplinkPacketLossFraction(0.5);
+ EXPECT_FLOAT_EQ(0.2f, states->encoder->packet_loss_rate());
}
TEST_P(AudioEncoderOpusTest, DoNotInvokeSetTargetBitrateIfOverheadUnknown) {
@@ -477,29 +423,6 @@ TEST_P(AudioEncoderOpusTest, BitrateBounded) {
EXPECT_EQ(kMaxBitrateBps, states->encoder->GetTargetBitrate());
}
-TEST_P(AudioEncoderOpusTest, MinPacketLossRate) {
- constexpr float kDefaultMinPacketLossRate = 0.01;
- {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Audio-OpusMinPacketLossRate/Enabled/");
- auto states = CreateCodec(sample_rate_hz_, 1);
- EXPECT_EQ(kDefaultMinPacketLossRate, states->encoder->packet_loss_rate());
- }
- {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Audio-OpusMinPacketLossRate/Enabled-200/");
- auto states = CreateCodec(sample_rate_hz_, 1);
- EXPECT_EQ(kDefaultMinPacketLossRate, states->encoder->packet_loss_rate());
- }
- {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Audio-OpusMinPacketLossRate/Enabled-50/");
- constexpr float kMinPacketLossRate = 0.5;
- auto states = CreateCodec(sample_rate_hz_, 1);
- EXPECT_EQ(kMinPacketLossRate, states->encoder->packet_loss_rate());
- }
-}
-
// Verifies that the complexity adaptation in the config works as intended.
TEST(AudioEncoderOpusTest, ConfigComplexityAdaptation) {
AudioEncoderOpusConfig config;
@@ -602,6 +525,8 @@ TEST_P(AudioEncoderOpusTest, EmptyConfigDoesNotAffectEncoderSettings) {
}
TEST_P(AudioEncoderOpusTest, UpdateUplinkBandwidthInAudioNetworkAdaptor) {
+ test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-StableTargetAdaptation/Disabled/");
auto states = CreateCodec(sample_rate_hz_, 2);
states->encoder->EnableAudioNetworkAdaptor("", nullptr);
const size_t opus_rate_khz = rtc::CheckedDivExact(sample_rate_hz_, 1000);
diff --git a/modules/audio_coding/codecs/opus/opus_interface.cc b/modules/audio_coding/codecs/opus/opus_interface.cc
index 4bac365a89..455f175464 100644
--- a/modules/audio_coding/codecs/opus/opus_interface.cc
+++ b/modules/audio_coding/codecs/opus/opus_interface.cc
@@ -678,33 +678,7 @@ int WebRtcOpus_FecDurationEst(const uint8_t* payload,
return samples;
}
-// This method is based on Definition of the Opus Audio Codec
-// (https://tools.ietf.org/html/rfc6716). Basically, this method is based on
-// parsing the LP layer of an Opus packet, particularly the LBRR flag.
-int WebRtcOpus_PacketHasFec(const uint8_t* payload,
- size_t payload_length_bytes) {
- if (payload == NULL || payload_length_bytes == 0)
- return 0;
-
- // In CELT_ONLY mode, packets should not have FEC.
- if (payload[0] & 0x80)
- return 0;
-
- // Max number of frames in an Opus packet is 48.
- opus_int16 frame_sizes[48];
- const unsigned char* frame_data[48];
-
- // Parse packet to get the frames. But we only care about the first frame,
- // since we can only decode the FEC from the first one.
- if (opus_packet_parse(payload, static_cast<opus_int32>(payload_length_bytes),
- NULL, frame_data, frame_sizes, NULL) < 0) {
- return 0;
- }
-
- if (frame_sizes[0] <= 1) {
- return 0;
- }
-
+int WebRtcOpus_NumSilkFrames(const uint8_t* payload) {
// For computing the payload length in ms, the sample rate is not important
// since it cancels out. We use 48 kHz, but any valid sample rate would work.
int payload_length_ms =
@@ -727,10 +701,43 @@ int WebRtcOpus_PacketHasFec(const uint8_t* payload,
default:
return 0; // It is actually even an invalid packet.
}
+ return silk_frames;
+}
+
+// This method is based on Definition of the Opus Audio Codec
+// (https://tools.ietf.org/html/rfc6716). Basically, this method is based on
+// parsing the LP layer of an Opus packet, particularly the LBRR flag.
+int WebRtcOpus_PacketHasFec(const uint8_t* payload,
+ size_t payload_length_bytes) {
+ if (payload == NULL || payload_length_bytes == 0)
+ return 0;
+
+ // In CELT_ONLY mode, packets should not have FEC.
+ if (payload[0] & 0x80)
+ return 0;
+
+ int silk_frames = WebRtcOpus_NumSilkFrames(payload);
+ if (silk_frames == 0)
+ return 0; // Not valid.
const int channels = opus_packet_get_nb_channels(payload);
RTC_DCHECK(channels == 1 || channels == 2);
+ // Max number of frames in an Opus packet is 48.
+ opus_int16 frame_sizes[48];
+ const unsigned char* frame_data[48];
+
+ // Parse packet to get the frames. But we only care about the first frame,
+ // since we can only decode the FEC from the first one.
+ if (opus_packet_parse(payload, static_cast<opus_int32>(payload_length_bytes),
+ NULL, frame_data, frame_sizes, NULL) < 0) {
+ return 0;
+ }
+
+ if (frame_sizes[0] < 1) {
+ return 0;
+ }
+
// A frame starts with the LP layer. The LP layer begins with two to eight
// header bits.These consist of one VAD bit per SILK frame (up to 3),
// followed by a single flag indicating the presence of LBRR frames.
@@ -748,3 +755,45 @@ int WebRtcOpus_PacketHasFec(const uint8_t* payload,
return 0;
}
+
+int WebRtcOpus_PacketHasVoiceActivity(const uint8_t* payload,
+ size_t payload_length_bytes) {
+ if (payload == NULL || payload_length_bytes == 0)
+ return 0;
+
+ // In CELT_ONLY mode we can not determine whether there is VAD.
+ if (payload[0] & 0x80)
+ return -1;
+
+ int silk_frames = WebRtcOpus_NumSilkFrames(payload);
+ if (silk_frames == 0)
+ return 0;
+
+ const int channels = opus_packet_get_nb_channels(payload);
+ RTC_DCHECK(channels == 1 || channels == 2);
+
+ // Max number of frames in an Opus packet is 48.
+ opus_int16 frame_sizes[48];
+ const unsigned char* frame_data[48];
+
+ // Parse packet to get the frames.
+ int frames =
+ opus_packet_parse(payload, static_cast<opus_int32>(payload_length_bytes),
+ NULL, frame_data, frame_sizes, NULL);
+ if (frames < 0)
+ return -1;
+
+ // Iterate over all Opus frames which may contain multiple SILK frames.
+ for (int frame = 0; frame < frames; frame++) {
+ if (frame_sizes[frame] < 1) {
+ continue;
+ }
+ if (frame_data[frame][0] >> (8 - silk_frames))
+ return 1;
+ if (channels == 2 &&
+ (frame_data[frame][0] << (silk_frames + 1)) >> (8 - silk_frames))
+ return 1;
+ }
+
+ return 0;
+}
diff --git a/modules/audio_coding/codecs/opus/opus_interface.h b/modules/audio_coding/codecs/opus/opus_interface.h
index e8de973010..2a3ceaa7d3 100644
--- a/modules/audio_coding/codecs/opus/opus_interface.h
+++ b/modules/audio_coding/codecs/opus/opus_interface.h
@@ -510,6 +510,22 @@ int WebRtcOpus_FecDurationEst(const uint8_t* payload,
int WebRtcOpus_PacketHasFec(const uint8_t* payload,
size_t payload_length_bytes);
+/****************************************************************************
+ * WebRtcOpus_PacketHasVoiceActivity(...)
+ *
+ * This function returns the SILK VAD information encoded in the opus packet.
+ * For CELT-only packets that do not have VAD information, it returns -1.
+ * Input:
+ * - payload : Encoded data pointer
+ * - payload_length_bytes : Bytes of encoded data
+ *
+ * Return value : 0 - no frame had the VAD flag set.
+ * 1 - at least one frame had the VAD flag set.
+ * -1 - VAD status could not be determined.
+ */
+int WebRtcOpus_PacketHasVoiceActivity(const uint8_t* payload,
+ size_t payload_length_bytes);
+
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/modules/audio_coding/codecs/opus/opus_unittest.cc b/modules/audio_coding/codecs/opus/opus_unittest.cc
index 3407d7d3cf..66ac5e7346 100644
--- a/modules/audio_coding/codecs/opus/opus_unittest.cc
+++ b/modules/audio_coding/codecs/opus/opus_unittest.cc
@@ -949,4 +949,47 @@ TEST_P(OpusTest, OpusDecodeRepacketized) {
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_));
}
+TEST(OpusVadTest, CeltUnknownStatus) {
+ const uint8_t celt[] = {0x80};
+ EXPECT_EQ(WebRtcOpus_PacketHasVoiceActivity(celt, 1), -1);
+}
+
+TEST(OpusVadTest, Mono20msVadSet) {
+ uint8_t silk20msMonoVad[] = {0x78, 0x80};
+ EXPECT_TRUE(WebRtcOpus_PacketHasVoiceActivity(silk20msMonoVad, 2));
+}
+
+TEST(OpusVadTest, Mono20MsVadUnset) {
+ uint8_t silk20msMonoSilence[] = {0x78, 0x00};
+ EXPECT_FALSE(WebRtcOpus_PacketHasVoiceActivity(silk20msMonoSilence, 2));
+}
+
+TEST(OpusVadTest, Stereo20MsVadOnSideChannel) {
+ uint8_t silk20msStereoVadSideChannel[] = {0x78 | 0x04, 0x20};
+ EXPECT_TRUE(
+ WebRtcOpus_PacketHasVoiceActivity(silk20msStereoVadSideChannel, 2));
+}
+
+TEST(OpusVadTest, TwoOpusMonoFramesVadOnSecond) {
+ uint8_t twoMonoFrames[] = {0x78 | 0x1, 0x00, 0x80};
+ EXPECT_TRUE(WebRtcOpus_PacketHasVoiceActivity(twoMonoFrames, 3));
+}
+
+TEST(OpusVadTest, DtxEmptyPacket) {
+ const uint8_t dtx[] = {0x78};
+ EXPECT_FALSE(WebRtcOpus_PacketHasVoiceActivity(dtx, 1));
+}
+
+TEST(OpusVadTest, DtxBackgroundNoisePacket) {
+ // DTX sends a frame coding background noise every 20 packets:
+ // https://tools.ietf.org/html/rfc6716#section-2.1.9
+ // The packet below represents such a frame and was captured using
+ // Wireshark while disabling encryption.
+ const uint8_t dtx[] = {0x78, 0x07, 0xc9, 0x79, 0xc8, 0xc9, 0x57, 0xc0, 0xa2,
+ 0x12, 0x23, 0xfa, 0xef, 0x67, 0xf3, 0x2e, 0xe3, 0xd3,
+ 0xd5, 0xe9, 0xec, 0xdb, 0x3e, 0xbc, 0x80, 0xb6, 0x6e,
+ 0x2a, 0xb7, 0x8c, 0x83, 0xcd, 0x83, 0xcd, 0x00};
+ EXPECT_FALSE(WebRtcOpus_PacketHasVoiceActivity(dtx, 35));
+}
+
} // namespace webrtc
diff --git a/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
index e75806af10..2bfd2c44df 100644
--- a/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
+++ b/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
@@ -15,9 +15,11 @@
#include <utility>
#include <vector>
+#include "rtc_base/byte_order.h"
#include "rtc_base/checks.h"
namespace webrtc {
+static const int kRedMaxPacketSize = 1 << 10;
AudioEncoderCopyRed::Config::Config() = default;
AudioEncoderCopyRed::Config::Config(Config&&) = default;
@@ -55,40 +57,101 @@ int AudioEncoderCopyRed::GetTargetBitrate() const {
return speech_encoder_->GetTargetBitrate();
}
+size_t AudioEncoderCopyRed::CalculateHeaderLength() const {
+ size_t header_size = 1;
+ if (secondary_info_.encoded_bytes > 0) {
+ header_size += 4;
+ }
+ if (tertiary_info_.encoded_bytes > 0) {
+ header_size += 4;
+ }
+ return header_size > 1 ? header_size : 0;
+}
+
AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeImpl(
uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) {
- const size_t primary_offset = encoded->size();
- EncodedInfo info = speech_encoder_->Encode(rtp_timestamp, audio, encoded);
-
+ rtc::Buffer primary_encoded;
+ EncodedInfo info =
+ speech_encoder_->Encode(rtp_timestamp, audio, &primary_encoded);
RTC_CHECK(info.redundant.empty()) << "Cannot use nested redundant encoders.";
- RTC_DCHECK_EQ(encoded->size() - primary_offset, info.encoded_bytes);
-
- if (info.encoded_bytes > 0) {
- // |info| will be implicitly cast to an EncodedInfoLeaf struct, effectively
- // discarding the (empty) vector of redundant information. This is
- // intentional.
- info.redundant.push_back(info);
- RTC_DCHECK_EQ(info.redundant.size(), 1);
- if (secondary_info_.encoded_bytes > 0) {
- encoded->AppendData(secondary_encoded_);
- info.redundant.push_back(secondary_info_);
- RTC_DCHECK_EQ(info.redundant.size(), 2);
- }
- // Save primary to secondary.
- secondary_encoded_.SetData(encoded->data() + primary_offset,
- info.encoded_bytes);
- secondary_info_ = info;
- RTC_DCHECK_EQ(info.speech, info.redundant[0].speech);
+ RTC_DCHECK_EQ(primary_encoded.size(), info.encoded_bytes);
+
+ if (info.encoded_bytes == 0) {
+ return info;
+ }
+
+ // Allocate room for RFC 2198 header if there is redundant data.
+ // Otherwise this will send the primary payload type without
+ // wrapping in RED.
+ const size_t header_length_bytes = CalculateHeaderLength();
+ encoded->SetSize(header_length_bytes);
+
+ size_t header_offset = 0;
+ if (tertiary_info_.encoded_bytes > 0 &&
+ tertiary_info_.encoded_bytes < kRedMaxPacketSize) {
+ encoded->AppendData(tertiary_encoded_);
+
+ const uint32_t timestamp_delta =
+ info.encoded_timestamp - tertiary_info_.encoded_timestamp;
+
+ encoded->data()[header_offset] = tertiary_info_.payload_type | 0x80;
+ rtc::SetBE16(static_cast<uint8_t*>(encoded->data()) + header_offset + 1,
+ (timestamp_delta << 2) | (tertiary_info_.encoded_bytes >> 8));
+ encoded->data()[header_offset + 3] = tertiary_info_.encoded_bytes & 0xff;
+ header_offset += 4;
+ }
+
+ if (secondary_info_.encoded_bytes > 0 &&
+ secondary_info_.encoded_bytes < kRedMaxPacketSize) {
+ encoded->AppendData(secondary_encoded_);
+
+ const uint32_t timestamp_delta =
+ info.encoded_timestamp - secondary_info_.encoded_timestamp;
+
+ encoded->data()[header_offset] = secondary_info_.payload_type | 0x80;
+ rtc::SetBE16(static_cast<uint8_t*>(encoded->data()) + header_offset + 1,
+ (timestamp_delta << 2) | (secondary_info_.encoded_bytes >> 8));
+ encoded->data()[header_offset + 3] = secondary_info_.encoded_bytes & 0xff;
+ header_offset += 4;
+ }
+
+ encoded->AppendData(primary_encoded);
+ if (header_length_bytes > 0) {
+ RTC_DCHECK_EQ(header_offset, header_length_bytes - 1);
+ encoded->data()[header_offset] = info.payload_type;
+ }
+
+ // |info| will be implicitly cast to an EncodedInfoLeaf struct, effectively
+ // discarding the (empty) vector of redundant information. This is
+ // intentional.
+ info.redundant.push_back(info);
+ RTC_DCHECK_EQ(info.redundant.size(), 1);
+ RTC_DCHECK_EQ(info.speech, info.redundant[0].speech);
+ if (secondary_info_.encoded_bytes > 0) {
+ info.redundant.push_back(secondary_info_);
+ RTC_DCHECK_EQ(info.redundant.size(), 2);
+ }
+ if (tertiary_info_.encoded_bytes > 0) {
+ info.redundant.push_back(tertiary_info_);
+ RTC_DCHECK_EQ(info.redundant.size(),
+ 2 + (secondary_info_.encoded_bytes > 0 ? 1 : 0));
}
+
+ // Save secondary to tertiary.
+ tertiary_encoded_.SetData(secondary_encoded_);
+ tertiary_info_ = secondary_info_;
+
+ // Save primary to secondary.
+ secondary_encoded_.SetData(primary_encoded);
+ secondary_info_ = info;
+
// Update main EncodedInfo.
- info.payload_type = red_payload_type_;
- info.encoded_bytes = 0;
- for (std::vector<EncodedInfoLeaf>::const_iterator it = info.redundant.begin();
- it != info.redundant.end(); ++it) {
- info.encoded_bytes += it->encoded_bytes;
+ if (header_length_bytes > 0) {
+ info.payload_type = red_payload_type_;
}
+ info.encoded_bytes = encoded->size();
return info;
}
diff --git a/modules/audio_coding/codecs/red/audio_encoder_copy_red.h b/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
index c6e829eeb6..4d7fc404f1 100644
--- a/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
+++ b/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
@@ -71,10 +71,13 @@ class AudioEncoderCopyRed final : public AudioEncoder {
rtc::Buffer* encoded) override;
private:
+ size_t CalculateHeaderLength() const;
std::unique_ptr<AudioEncoder> speech_encoder_;
int red_payload_type_;
rtc::Buffer secondary_encoded_;
EncodedInfoLeaf secondary_info_;
+ rtc::Buffer tertiary_encoded_;
+ EncodedInfoLeaf tertiary_info_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderCopyRed);
};
diff --git a/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc b/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
index e20515a165..fbc0b8aa38 100644
--- a/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
+++ b/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
@@ -139,6 +139,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckImmediateEncode) {
// new data, even if the RED codec is loaded with a secondary encoding.
TEST_F(AudioEncoderCopyRedTest, CheckNoOutput) {
static const size_t kEncodedSize = 17;
+ static const size_t kHeaderLenBytes = 5;
{
InSequence s;
EXPECT_CALL(*mock_encoder_, EncodeImpl(_, _, _))
@@ -160,7 +161,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckNoOutput) {
// Final call to the speech encoder will produce output.
Encode();
- EXPECT_EQ(2 * kEncodedSize, encoded_info_.encoded_bytes);
+ EXPECT_EQ(2 * kEncodedSize + kHeaderLenBytes, encoded_info_.encoded_bytes);
ASSERT_EQ(2u, encoded_info_.redundant.size());
}
@@ -182,12 +183,19 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadSizes) {
EXPECT_EQ(1u, encoded_info_.redundant.size());
EXPECT_EQ(1u, encoded_info_.encoded_bytes);
- for (size_t i = 2; i <= kNumPackets; ++i) {
+ // Second call is also special since it does not include a ternary
+ // payload.
+ Encode();
+ EXPECT_EQ(2u, encoded_info_.redundant.size());
+ EXPECT_EQ(8u, encoded_info_.encoded_bytes);
+
+ for (size_t i = 3; i <= kNumPackets; ++i) {
Encode();
- ASSERT_EQ(2u, encoded_info_.redundant.size());
+ ASSERT_EQ(3u, encoded_info_.redundant.size());
EXPECT_EQ(i, encoded_info_.redundant[0].encoded_bytes);
EXPECT_EQ(i - 1, encoded_info_.redundant[1].encoded_bytes);
- EXPECT_EQ(i + i - 1, encoded_info_.encoded_bytes);
+ EXPECT_EQ(i - 2, encoded_info_.redundant[2].encoded_bytes);
+ EXPECT_EQ(9 + i + (i - 1) + (i - 2), encoded_info_.encoded_bytes);
}
}
@@ -224,6 +232,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloads) {
// Let the mock encoder write payloads with increasing values. The first
// payload will have values 0, 1, 2, ..., kPayloadLenBytes - 1.
static const size_t kPayloadLenBytes = 5;
+ static const size_t kHeaderLenBytes = 5;
uint8_t payload[kPayloadLenBytes];
for (uint8_t i = 0; i < kPayloadLenBytes; ++i) {
payload[i] = i;
@@ -239,7 +248,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloads) {
EXPECT_EQ(i, encoded_.data()[i]);
}
- for (int j = 0; j < 5; ++j) {
+ for (int j = 0; j < 1; ++j) {
// Increment all values of the payload by 10.
for (size_t i = 0; i < kPayloadLenBytes; ++i)
payload[i] += 10;
@@ -249,16 +258,17 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloads) {
EXPECT_EQ(kPayloadLenBytes, encoded_info_.redundant[0].encoded_bytes);
EXPECT_EQ(kPayloadLenBytes, encoded_info_.redundant[1].encoded_bytes);
for (size_t i = 0; i < kPayloadLenBytes; ++i) {
- // Check primary payload.
- EXPECT_EQ((j + 1) * 10 + i, encoded_.data()[i]);
// Check secondary payload.
- EXPECT_EQ(j * 10 + i, encoded_.data()[i + kPayloadLenBytes]);
+ EXPECT_EQ(j * 10 + i, encoded_.data()[kHeaderLenBytes + i]);
+
+ // Check primary payload.
+ EXPECT_EQ((j + 1) * 10 + i,
+ encoded_.data()[kHeaderLenBytes + i + kPayloadLenBytes]);
}
}
}
// Checks correct propagation of payload type.
-// Checks that the correct timestamps are returned.
TEST_F(AudioEncoderCopyRedTest, CheckPayloadType) {
const int primary_payload_type = red_payload_type_ + 1;
AudioEncoder::EncodedInfo info;
@@ -272,7 +282,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadType) {
Encode();
ASSERT_EQ(1u, encoded_info_.redundant.size());
EXPECT_EQ(primary_payload_type, encoded_info_.redundant[0].payload_type);
- EXPECT_EQ(red_payload_type_, encoded_info_.payload_type);
+ EXPECT_EQ(primary_payload_type, encoded_info_.payload_type);
const int secondary_payload_type = red_payload_type_ + 2;
info.payload_type = secondary_payload_type;
@@ -286,6 +296,65 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadType) {
EXPECT_EQ(red_payload_type_, encoded_info_.payload_type);
}
+TEST_F(AudioEncoderCopyRedTest, CheckRFC2198Header) {
+ const int primary_payload_type = red_payload_type_ + 1;
+ AudioEncoder::EncodedInfo info;
+ info.encoded_bytes = 10;
+ info.encoded_timestamp = timestamp_;
+ info.payload_type = primary_payload_type;
+
+ EXPECT_CALL(*mock_encoder_, EncodeImpl(_, _, _))
+ .WillOnce(Invoke(MockAudioEncoder::FakeEncoding(info)));
+ Encode();
+ info.encoded_timestamp = timestamp_; // update timestamp.
+ EXPECT_CALL(*mock_encoder_, EncodeImpl(_, _, _))
+ .WillOnce(Invoke(MockAudioEncoder::FakeEncoding(info)));
+ Encode(); // Second call will produce a redundant encoding.
+
+ EXPECT_EQ(encoded_.size(),
+ 5u + 2 * 10u); // header size + two encoded payloads.
+ EXPECT_EQ(encoded_[0], primary_payload_type | 0x80);
+
+ uint32_t timestamp_delta = encoded_info_.encoded_timestamp -
+ encoded_info_.redundant[1].encoded_timestamp;
+ // Timestamp delta is encoded as a 14 bit value.
+ EXPECT_EQ(encoded_[1], timestamp_delta >> 6);
+ EXPECT_EQ(static_cast<uint8_t>(encoded_[2] >> 2), timestamp_delta & 0x3f);
+ // Redundant length is encoded as 10 bit value.
+ EXPECT_EQ(encoded_[2] & 0x3u, encoded_info_.redundant[1].encoded_bytes >> 8);
+ EXPECT_EQ(encoded_[3], encoded_info_.redundant[1].encoded_bytes & 0xff);
+ EXPECT_EQ(encoded_[4], primary_payload_type);
+
+ EXPECT_CALL(*mock_encoder_, EncodeImpl(_, _, _))
+ .WillOnce(Invoke(MockAudioEncoder::FakeEncoding(info)));
+ Encode(); // Third call will produce a redundant encoding with double
+ // redundancy.
+
+ EXPECT_EQ(encoded_.size(),
+ 9u + 3 * 10u); // header size + two encoded payloads.
+ EXPECT_EQ(encoded_[0], primary_payload_type | 0x80);
+
+ timestamp_delta = encoded_info_.encoded_timestamp -
+ encoded_info_.redundant[2].encoded_timestamp;
+ // Timestamp delta is encoded as a 14 bit value.
+ EXPECT_EQ(encoded_[1], timestamp_delta >> 6);
+ EXPECT_EQ(static_cast<uint8_t>(encoded_[2] >> 2), timestamp_delta & 0x3f);
+ // Redundant length is encoded as 10 bit value.
+ EXPECT_EQ(encoded_[2] & 0x3u, encoded_info_.redundant[2].encoded_bytes >> 8);
+ EXPECT_EQ(encoded_[3], encoded_info_.redundant[2].encoded_bytes & 0xff);
+
+ EXPECT_EQ(encoded_[4], primary_payload_type | 0x80);
+ timestamp_delta = encoded_info_.encoded_timestamp -
+ encoded_info_.redundant[1].encoded_timestamp;
+ // Timestamp delta is encoded as a 14 bit value.
+ EXPECT_EQ(encoded_[5], timestamp_delta >> 6);
+ EXPECT_EQ(static_cast<uint8_t>(encoded_[6] >> 2), timestamp_delta & 0x3f);
+ // Redundant length is encoded as 10 bit value.
+ EXPECT_EQ(encoded_[6] & 0x3u, encoded_info_.redundant[2].encoded_bytes >> 8);
+ EXPECT_EQ(encoded_[7], encoded_info_.redundant[2].encoded_bytes & 0xff);
+ EXPECT_EQ(encoded_[8], primary_payload_type);
+}
+
#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// This test fixture tests various error conditions that makes the
diff --git a/modules/audio_coding/neteq/audio_decoder_unittest.cc b/modules/audio_coding/neteq/audio_decoder_unittest.cc
index 836c49c12f..d1e1ec1e30 100644
--- a/modules/audio_coding/neteq/audio_decoder_unittest.cc
+++ b/modules/audio_coding/neteq/audio_decoder_unittest.cc
@@ -536,7 +536,11 @@ TEST_F(AudioDecoderIsacFloatTest, EncodeDecode) {
}
TEST_F(AudioDecoderIsacFloatTest, SetTargetBitrate) {
- TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 32000);
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 9999));
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 10000));
+ EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), 23456));
+ EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), 32000));
+ EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), 32001));
}
TEST_F(AudioDecoderIsacSwbTest, EncodeDecode) {
@@ -549,7 +553,11 @@ TEST_F(AudioDecoderIsacSwbTest, EncodeDecode) {
}
TEST_F(AudioDecoderIsacSwbTest, SetTargetBitrate) {
- TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 32000);
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 9999));
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 10000));
+ EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), 23456));
+ EXPECT_EQ(56000, SetAndGetTargetBitrate(audio_encoder_.get(), 56000));
+ EXPECT_EQ(56000, SetAndGetTargetBitrate(audio_encoder_.get(), 56001));
}
TEST_F(AudioDecoderIsacFixTest, EncodeDecode) {
@@ -569,7 +577,11 @@ TEST_F(AudioDecoderIsacFixTest, EncodeDecode) {
}
TEST_F(AudioDecoderIsacFixTest, SetTargetBitrate) {
- TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 32000);
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 9999));
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 10000));
+ EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), 23456));
+ EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), 32000));
+ EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), 32001));
}
TEST_F(AudioDecoderG722Test, EncodeDecode) {
diff --git a/modules/audio_coding/neteq/merge.cc b/modules/audio_coding/neteq/merge.cc
index 552192d910..f1f2cc97e3 100644
--- a/modules/audio_coding/neteq/merge.cc
+++ b/modules/audio_coding/neteq/merge.cc
@@ -80,7 +80,7 @@ size_t Merge::Process(int16_t* input,
if (channel == 0) {
// Downsample, correlate, and find strongest correlation period for the
- // master (i.e., first) channel only.
+ // reference (i.e., first) channel only.
// Downsample to 4kHz sample rate.
Downsample(input_channel.get(), input_length_per_channel,
expanded_channel.get(), expanded_length);
diff --git a/modules/audio_coding/neteq/mock/mock_decoder_database.h b/modules/audio_coding/neteq/mock/mock_decoder_database.h
index d83dc7f62c..b8dc031fa4 100644
--- a/modules/audio_coding/neteq/mock/mock_decoder_database.h
+++ b/modules/audio_coding/neteq/mock/mock_decoder_database.h
@@ -23,22 +23,28 @@ class MockDecoderDatabase : public DecoderDatabase {
explicit MockDecoderDatabase(
rtc::scoped_refptr<AudioDecoderFactory> factory = nullptr)
: DecoderDatabase(factory, absl::nullopt) {}
- virtual ~MockDecoderDatabase() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_CONST_METHOD0(Empty, bool());
- MOCK_CONST_METHOD0(Size, int());
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD2(RegisterPayload,
- int(int rtp_payload_type, const SdpAudioFormat& audio_format));
- MOCK_METHOD1(Remove, int(uint8_t rtp_payload_type));
- MOCK_METHOD0(RemoveAll, void());
- MOCK_CONST_METHOD1(GetDecoderInfo,
- const DecoderInfo*(uint8_t rtp_payload_type));
- MOCK_METHOD2(SetActiveDecoder,
- int(uint8_t rtp_payload_type, bool* new_decoder));
- MOCK_CONST_METHOD0(GetActiveDecoder, AudioDecoder*());
- MOCK_METHOD1(SetActiveCngDecoder, int(uint8_t rtp_payload_type));
- MOCK_CONST_METHOD0(GetActiveCngDecoder, ComfortNoiseDecoder*());
+ ~MockDecoderDatabase() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(bool, Empty, (), (const, override));
+ MOCK_METHOD(int, Size, (), (const, override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int,
+ RegisterPayload,
+ (int rtp_payload_type, const SdpAudioFormat& audio_format),
+ (override));
+ MOCK_METHOD(int, Remove, (uint8_t rtp_payload_type), (override));
+ MOCK_METHOD(void, RemoveAll, (), (override));
+ MOCK_METHOD(const DecoderInfo*,
+ GetDecoderInfo,
+ (uint8_t rtp_payload_type),
+ (const, override));
+ MOCK_METHOD(int,
+ SetActiveDecoder,
+ (uint8_t rtp_payload_type, bool* new_decoder),
+ (override));
+ MOCK_METHOD(AudioDecoder*, GetActiveDecoder, (), (const, override));
+ MOCK_METHOD(int, SetActiveCngDecoder, (uint8_t rtp_payload_type), (override));
+ MOCK_METHOD(ComfortNoiseDecoder*, GetActiveCngDecoder, (), (const, override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h b/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
index d9fe5d4492..c60c56d36b 100644
--- a/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
+++ b/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
@@ -19,13 +19,16 @@ namespace webrtc {
class MockDtmfBuffer : public DtmfBuffer {
public:
MockDtmfBuffer(int fs) : DtmfBuffer(fs) {}
- virtual ~MockDtmfBuffer() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Flush, void());
- MOCK_METHOD1(InsertEvent, int(const DtmfEvent& event));
- MOCK_METHOD2(GetEvent, bool(uint32_t current_timestamp, DtmfEvent* event));
- MOCK_CONST_METHOD0(Length, size_t());
- MOCK_CONST_METHOD0(Empty, bool());
+ ~MockDtmfBuffer() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void, Flush, (), (override));
+ MOCK_METHOD(int, InsertEvent, (const DtmfEvent& event), (override));
+ MOCK_METHOD(bool,
+ GetEvent,
+ (uint32_t current_timestamp, DtmfEvent* event),
+ (override));
+ MOCK_METHOD(size_t, Length, (), (const, override));
+ MOCK_METHOD(bool, Empty, (), (const, override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h b/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
index eea8bee1c2..60de167c29 100644
--- a/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
+++ b/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
@@ -18,12 +18,15 @@ namespace webrtc {
class MockDtmfToneGenerator : public DtmfToneGenerator {
public:
- virtual ~MockDtmfToneGenerator() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD3(Init, int(int fs, int event, int attenuation));
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD2(Generate, int(size_t num_samples, AudioMultiVector* output));
- MOCK_CONST_METHOD0(initialized, bool());
+ ~MockDtmfToneGenerator() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(int, Init, (int fs, int event, int attenuation), (override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int,
+ Generate,
+ (size_t num_samples, AudioMultiVector* output),
+ (override));
+ MOCK_METHOD(bool, initialized, (), (const, override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/mock/mock_expand.h b/modules/audio_coding/neteq/mock/mock_expand.h
index 286325b841..9d66779021 100644
--- a/modules/audio_coding/neteq/mock/mock_expand.h
+++ b/modules/audio_coding/neteq/mock/mock_expand.h
@@ -30,13 +30,13 @@ class MockExpand : public Expand {
statistics,
fs,
num_channels) {}
- virtual ~MockExpand() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD1(Process, int(AudioMultiVector* output));
- MOCK_METHOD0(SetParametersForNormalAfterExpand, void());
- MOCK_METHOD0(SetParametersForMergeAfterExpand, void());
- MOCK_CONST_METHOD0(overlap_length, size_t());
+ ~MockExpand() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int, Process, (AudioMultiVector * output), (override));
+ MOCK_METHOD(void, SetParametersForNormalAfterExpand, (), (override));
+ MOCK_METHOD(void, SetParametersForMergeAfterExpand, (), (override));
+ MOCK_METHOD(size_t, overlap_length, (), (const, override));
};
} // namespace webrtc
@@ -45,13 +45,15 @@ namespace webrtc {
class MockExpandFactory : public ExpandFactory {
public:
- MOCK_CONST_METHOD6(Create,
- Expand*(BackgroundNoise* background_noise,
- SyncBuffer* sync_buffer,
- RandomVector* random_vector,
- StatisticsCalculator* statistics,
- int fs,
- size_t num_channels));
+ MOCK_METHOD(Expand*,
+ Create,
+ (BackgroundNoise * background_noise,
+ SyncBuffer* sync_buffer,
+ RandomVector* random_vector,
+ StatisticsCalculator* statistics,
+ int fs,
+ size_t num_channels),
+ (const, override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/mock/mock_histogram.h b/modules/audio_coding/neteq/mock/mock_histogram.h
index 91ae18f5e8..03abbc1d4b 100644
--- a/modules/audio_coding/neteq/mock/mock_histogram.h
+++ b/modules/audio_coding/neteq/mock/mock_histogram.h
@@ -22,8 +22,8 @@ class MockHistogram : public Histogram {
: Histogram(num_buckets, forget_factor) {}
virtual ~MockHistogram() {}
- MOCK_METHOD1(Add, void(int));
- MOCK_METHOD1(Quantile, int(int));
+ MOCK_METHOD(void, Add, (int), (override));
+ MOCK_METHOD(int, Quantile, (int), (override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/mock/mock_neteq_controller.h b/modules/audio_coding/neteq/mock/mock_neteq_controller.h
index d1008c8a30..b7df85fb20 100644
--- a/modules/audio_coding/neteq/mock/mock_neteq_controller.h
+++ b/modules/audio_coding/neteq/mock/mock_neteq_controller.h
@@ -19,46 +19,45 @@ namespace webrtc {
class MockNetEqController : public NetEqController {
public:
MockNetEqController() = default;
- virtual ~MockNetEqController() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD0(SoftReset, void());
- MOCK_METHOD2(GetDecision,
- NetEq::Operation(const NetEqStatus& neteq_status,
- bool* reset_decoder));
- MOCK_METHOD6(Update,
- void(uint16_t sequence_number,
- uint32_t timestamp,
- uint32_t last_played_out_timestamp,
- bool new_codec,
- bool cng_or_dtmf,
- size_t packet_length_samples));
- MOCK_METHOD0(RegisterEmptyPacket, void());
- MOCK_METHOD2(SetSampleRate, void(int fs_hz, size_t output_size_samples));
- MOCK_METHOD1(SetMaximumDelay, bool(int delay_ms));
- MOCK_METHOD1(SetMinimumDelay, bool(int delay_ms));
- MOCK_METHOD1(SetBaseMinimumDelay, bool(int delay_ms));
- MOCK_CONST_METHOD0(GetBaseMinimumDelay, int());
- MOCK_CONST_METHOD0(CngRfc3389On, bool());
- MOCK_CONST_METHOD0(CngOff, bool());
- MOCK_METHOD0(SetCngOff, void());
- MOCK_METHOD1(ExpandDecision, void(NetEq::Operation operation));
- MOCK_METHOD1(AddSampleMemory, void(int32_t value));
- MOCK_METHOD0(TargetLevelMs, int());
- MOCK_METHOD6(PacketArrived,
- absl::optional<int>(bool last_cng_or_dtmf,
- size_t packet_length_samples,
- bool should_update_stats,
- uint16_t main_sequence_number,
- uint32_t main_timestamp,
- int fs_hz));
- MOCK_CONST_METHOD0(PeakFound, bool());
- MOCK_CONST_METHOD0(GetFilteredBufferLevel, int());
- MOCK_METHOD1(set_sample_memory, void(int32_t value));
- MOCK_CONST_METHOD0(noise_fast_forward, size_t());
- MOCK_CONST_METHOD0(packet_length_samples, size_t());
- MOCK_METHOD1(set_packet_length_samples, void(size_t value));
- MOCK_METHOD1(set_prev_time_scale, void(bool value));
+ ~MockNetEqController() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(void, SoftReset, (), (override));
+ MOCK_METHOD(NetEq::Operation,
+ GetDecision,
+ (const NetEqStatus& neteq_status, bool* reset_decoder),
+ (override));
+ MOCK_METHOD(void, RegisterEmptyPacket, (), (override));
+ MOCK_METHOD(void,
+ SetSampleRate,
+ (int fs_hz, size_t output_size_samples),
+ (override));
+ MOCK_METHOD(bool, SetMaximumDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, SetMinimumDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, SetBaseMinimumDelay, (int delay_ms), (override));
+ MOCK_METHOD(int, GetBaseMinimumDelay, (), (const, override));
+ MOCK_METHOD(bool, CngRfc3389On, (), (const, override));
+ MOCK_METHOD(bool, CngOff, (), (const, override));
+ MOCK_METHOD(void, SetCngOff, (), (override));
+ MOCK_METHOD(void, ExpandDecision, (NetEq::Operation operation), (override));
+ MOCK_METHOD(void, AddSampleMemory, (int32_t value), (override));
+ MOCK_METHOD(int, TargetLevelMs, (), (override));
+ MOCK_METHOD(absl::optional<int>,
+ PacketArrived,
+ (bool last_cng_or_dtmf,
+ size_t packet_length_samples,
+ bool should_update_stats,
+ uint16_t main_sequence_number,
+ uint32_t main_timestamp,
+ int fs_hz),
+ (override));
+ MOCK_METHOD(bool, PeakFound, (), (const, override));
+ MOCK_METHOD(int, GetFilteredBufferLevel, (), (const, override));
+ MOCK_METHOD(void, set_sample_memory, (int32_t value), (override));
+ MOCK_METHOD(size_t, noise_fast_forward, (), (const, override));
+ MOCK_METHOD(size_t, packet_length_samples, (), (const, override));
+ MOCK_METHOD(void, set_packet_length_samples, (size_t value), (override));
+ MOCK_METHOD(void, set_prev_time_scale, (bool value), (override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/mock/mock_packet_buffer.h b/modules/audio_coding/neteq/mock/mock_packet_buffer.h
index 7efeb15e47..e466ea6c8b 100644
--- a/modules/audio_coding/neteq/mock/mock_packet_buffer.h
+++ b/modules/audio_coding/neteq/mock/mock_packet_buffer.h
@@ -20,39 +20,47 @@ class MockPacketBuffer : public PacketBuffer {
public:
MockPacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer)
: PacketBuffer(max_number_of_packets, tick_timer) {}
- virtual ~MockPacketBuffer() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Flush, void());
- MOCK_CONST_METHOD0(Empty, bool());
- int InsertPacket(Packet&& packet, StatisticsCalculator* stats) {
- return InsertPacketWrapped(&packet, stats);
- }
- // Since gtest does not properly support move-only types, InsertPacket is
- // implemented as a wrapper. You'll have to implement InsertPacketWrapped
- // instead and move from |*packet|.
- MOCK_METHOD2(InsertPacketWrapped,
- int(Packet* packet, StatisticsCalculator* stats));
- MOCK_METHOD5(InsertPacketList,
- int(PacketList* packet_list,
- const DecoderDatabase& decoder_database,
- absl::optional<uint8_t>* current_rtp_payload_type,
- absl::optional<uint8_t>* current_cng_rtp_payload_type,
- StatisticsCalculator* stats));
- MOCK_CONST_METHOD1(NextTimestamp, int(uint32_t* next_timestamp));
- MOCK_CONST_METHOD2(NextHigherTimestamp,
- int(uint32_t timestamp, uint32_t* next_timestamp));
- MOCK_CONST_METHOD0(PeekNextPacket, const Packet*());
- MOCK_METHOD0(GetNextPacket, absl::optional<Packet>());
- MOCK_METHOD1(DiscardNextPacket, int(StatisticsCalculator* stats));
- MOCK_METHOD3(DiscardOldPackets,
- void(uint32_t timestamp_limit,
- uint32_t horizon_samples,
- StatisticsCalculator* stats));
- MOCK_METHOD2(DiscardAllOldPackets,
- void(uint32_t timestamp_limit, StatisticsCalculator* stats));
- MOCK_CONST_METHOD0(NumPacketsInBuffer, size_t());
- MOCK_METHOD1(IncrementWaitingTimes, void(int));
- MOCK_CONST_METHOD0(current_memory_bytes, int());
+ ~MockPacketBuffer() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void, Flush, (), (override));
+ MOCK_METHOD(bool, Empty, (), (const, override));
+ MOCK_METHOD(int,
+ InsertPacket,
+ (Packet && packet, StatisticsCalculator* stats),
+ (override));
+ MOCK_METHOD(int,
+ InsertPacketList,
+ (PacketList * packet_list,
+ const DecoderDatabase& decoder_database,
+ absl::optional<uint8_t>* current_rtp_payload_type,
+ absl::optional<uint8_t>* current_cng_rtp_payload_type,
+ StatisticsCalculator* stats),
+ (override));
+ MOCK_METHOD(int,
+ NextTimestamp,
+ (uint32_t * next_timestamp),
+ (const, override));
+ MOCK_METHOD(int,
+ NextHigherTimestamp,
+ (uint32_t timestamp, uint32_t* next_timestamp),
+ (const, override));
+ MOCK_METHOD(const Packet*, PeekNextPacket, (), (const, override));
+ MOCK_METHOD(absl::optional<Packet>, GetNextPacket, (), (override));
+ MOCK_METHOD(int,
+ DiscardNextPacket,
+ (StatisticsCalculator * stats),
+ (override));
+ MOCK_METHOD(void,
+ DiscardOldPackets,
+ (uint32_t timestamp_limit,
+ uint32_t horizon_samples,
+ StatisticsCalculator* stats),
+ (override));
+ MOCK_METHOD(void,
+ DiscardAllOldPackets,
+ (uint32_t timestamp_limit, StatisticsCalculator* stats),
+ (override));
+ MOCK_METHOD(size_t, NumPacketsInBuffer, (), (const, override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h b/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h
index 68fd3566c9..9daf571a80 100644
--- a/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h
+++ b/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h
@@ -18,10 +18,12 @@ namespace webrtc {
class MockRedPayloadSplitter : public RedPayloadSplitter {
public:
- MOCK_METHOD1(SplitRed, bool(PacketList* packet_list));
- MOCK_METHOD2(CheckRedPayloads,
- void(PacketList* packet_list,
- const DecoderDatabase& decoder_database));
+ MOCK_METHOD(bool, SplitRed, (PacketList * packet_list), (override));
+ MOCK_METHOD(void,
+ CheckRedPayloads,
+ (PacketList * packet_list,
+ const DecoderDatabase& decoder_database),
+ (override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/mock/mock_statistics_calculator.h b/modules/audio_coding/neteq/mock/mock_statistics_calculator.h
index 086c7c5564..f8812478d6 100644
--- a/modules/audio_coding/neteq/mock/mock_statistics_calculator.h
+++ b/modules/audio_coding/neteq/mock/mock_statistics_calculator.h
@@ -18,9 +18,12 @@ namespace webrtc {
class MockStatisticsCalculator : public StatisticsCalculator {
public:
- MOCK_METHOD1(PacketsDiscarded, void(size_t num_packets));
- MOCK_METHOD1(SecondaryPacketsDiscarded, void(size_t num_packets));
- MOCK_METHOD1(RelativePacketArrivalDelay, void(size_t delay_ms));
+ MOCK_METHOD(void, PacketsDiscarded, (size_t num_packets), (override));
+ MOCK_METHOD(void,
+ SecondaryPacketsDiscarded,
+ (size_t num_packets),
+ (override));
+ MOCK_METHOD(void, RelativePacketArrivalDelay, (size_t delay_ms), (override));
};
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/neteq_impl.cc b/modules/audio_coding/neteq/neteq_impl.cc
index 0b7510d341..643fb1e2dd 100644
--- a/modules/audio_coding/neteq/neteq_impl.cc
+++ b/modules/audio_coding/neteq/neteq_impl.cc
@@ -51,6 +51,7 @@
#include "rtc_base/strings/audio_format_to_string.h"
#include "rtc_base/trace_event.h"
#include "system_wrappers/include/clock.h"
+#include "system_wrappers/include/field_trial.h"
namespace webrtc {
namespace {
@@ -73,6 +74,24 @@ std::unique_ptr<NetEqController> CreateNetEqController(
return controller_factory.CreateNetEqController(config);
}
+int GetDelayChainLengthMs(int config_extra_delay_ms) {
+ constexpr char kExtraDelayFieldTrial[] = "WebRTC-Audio-NetEqExtraDelay";
+ if (webrtc::field_trial::IsEnabled(kExtraDelayFieldTrial)) {
+ const auto field_trial_string =
+ webrtc::field_trial::FindFullName(kExtraDelayFieldTrial);
+ int extra_delay_ms = -1;
+ if (sscanf(field_trial_string.c_str(), "Enabled-%d", &extra_delay_ms) ==
+ 1 &&
+ extra_delay_ms >= 0 && extra_delay_ms <= 2000) {
+ RTC_LOG(LS_INFO) << "Delay chain length set to " << extra_delay_ms
+ << " ms in field trial";
+ return (extra_delay_ms / 10) * 10; // Rounding down to multiple of 10.
+ }
+ }
+ // Field trial not set, or invalid value read. Use value from config.
+ return config_extra_delay_ms;
+}
+
} // namespace
NetEqImpl::Dependencies::Dependencies(
@@ -140,7 +159,10 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config,
10, // Report once every 10 s.
tick_timer_.get()),
no_time_stretching_(config.for_test_no_time_stretching),
- enable_rtx_handling_(config.enable_rtx_handling) {
+ enable_rtx_handling_(config.enable_rtx_handling),
+ output_delay_chain_ms_(
+ GetDelayChainLengthMs(config.extra_output_delay_ms)),
+ output_delay_chain_(rtc::CheckedDivExact(output_delay_chain_ms_, 10)) {
RTC_LOG(LS_INFO) << "NetEq config: " << config.ToString();
int fs = config.sample_rate_hz;
if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
@@ -171,7 +193,7 @@ int NetEqImpl::InsertPacket(const RTPHeader& rtp_header,
rtc::ArrayView<const uint8_t> payload) {
rtc::MsanCheckInitialized(payload);
TRACE_EVENT0("webrtc", "NetEqImpl::InsertPacket");
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (InsertPacketInternal(rtp_header, payload) != 0) {
return kFail;
}
@@ -182,7 +204,7 @@ void NetEqImpl::InsertEmptyPacket(const RTPHeader& /*rtp_header*/) {
// TODO(henrik.lundin) Handle NACK as well. This will make use of the
// rtp_header parameter.
// https://bugs.chromium.org/p/webrtc/issues/detail?id=7611
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
controller_->RegisterEmptyPacket();
}
@@ -238,7 +260,7 @@ int NetEqImpl::GetAudio(AudioFrame* audio_frame,
bool* muted,
absl::optional<Operation> action_override) {
TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio");
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (GetAudioInternal(audio_frame, muted, action_override) != 0) {
return kFail;
}
@@ -255,11 +277,30 @@ int NetEqImpl::GetAudio(AudioFrame* audio_frame,
last_output_sample_rate_hz_ == 32000 ||
last_output_sample_rate_hz_ == 48000)
<< "Unexpected sample rate " << last_output_sample_rate_hz_;
+
+ if (!output_delay_chain_.empty()) {
+ if (output_delay_chain_empty_) {
+ for (auto& f : output_delay_chain_) {
+ f.CopyFrom(*audio_frame);
+ }
+ output_delay_chain_empty_ = false;
+ delayed_last_output_sample_rate_hz_ = last_output_sample_rate_hz_;
+ } else {
+ RTC_DCHECK_GE(output_delay_chain_ix_, 0);
+ RTC_DCHECK_LT(output_delay_chain_ix_, output_delay_chain_.size());
+ swap(output_delay_chain_[output_delay_chain_ix_], *audio_frame);
+ *muted = audio_frame->muted();
+ output_delay_chain_ix_ =
+ (output_delay_chain_ix_ + 1) % output_delay_chain_.size();
+ delayed_last_output_sample_rate_hz_ = audio_frame->sample_rate_hz();
+ }
+ }
+
return kOK;
}
void NetEqImpl::SetCodecs(const std::map<int, SdpAudioFormat>& codecs) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
const std::vector<int> changed_payload_types =
decoder_database_->SetCodecs(codecs);
for (const int pt : changed_payload_types) {
@@ -272,13 +313,13 @@ bool NetEqImpl::RegisterPayloadType(int rtp_payload_type,
RTC_LOG(LS_VERBOSE) << "NetEqImpl::RegisterPayloadType: payload type "
<< rtp_payload_type << ", codec "
<< rtc::ToString(audio_format);
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return decoder_database_->RegisterPayload(rtp_payload_type, audio_format) ==
DecoderDatabase::kOK;
}
int NetEqImpl::RemovePayloadType(uint8_t rtp_payload_type) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
int ret = decoder_database_->Remove(rtp_payload_type);
if (ret == DecoderDatabase::kOK || ret == DecoderDatabase::kDecoderNotFound) {
packet_buffer_->DiscardPacketsWithPayloadType(rtp_payload_type,
@@ -289,30 +330,32 @@ int NetEqImpl::RemovePayloadType(uint8_t rtp_payload_type) {
}
void NetEqImpl::RemoveAllPayloadTypes() {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
decoder_database_->RemoveAll();
}
bool NetEqImpl::SetMinimumDelay(int delay_ms) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (delay_ms >= 0 && delay_ms <= 10000) {
assert(controller_.get());
- return controller_->SetMinimumDelay(delay_ms);
+ return controller_->SetMinimumDelay(
+ std::max(delay_ms - output_delay_chain_ms_, 0));
}
return false;
}
bool NetEqImpl::SetMaximumDelay(int delay_ms) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (delay_ms >= 0 && delay_ms <= 10000) {
assert(controller_.get());
- return controller_->SetMaximumDelay(delay_ms);
+ return controller_->SetMaximumDelay(
+ std::max(delay_ms - output_delay_chain_ms_, 0));
}
return false;
}
bool NetEqImpl::SetBaseMinimumDelayMs(int delay_ms) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (delay_ms >= 0 && delay_ms <= 10000) {
return controller_->SetBaseMinimumDelay(delay_ms);
}
@@ -320,28 +363,29 @@ bool NetEqImpl::SetBaseMinimumDelayMs(int delay_ms) {
}
int NetEqImpl::GetBaseMinimumDelayMs() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return controller_->GetBaseMinimumDelay();
}
int NetEqImpl::TargetDelayMs() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
RTC_DCHECK(controller_.get());
- return controller_->TargetLevelMs();
+ return controller_->TargetLevelMs() + output_delay_chain_ms_;
}
int NetEqImpl::FilteredCurrentDelayMs() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
// Sum up the filtered packet buffer level with the future length of the sync
// buffer.
const int delay_samples =
controller_->GetFilteredBufferLevel() + sync_buffer_->FutureLength();
// The division below will truncate. The return value is in ms.
- return delay_samples / rtc::CheckedDivExact(fs_hz_, 1000);
+ return delay_samples / rtc::CheckedDivExact(fs_hz_, 1000) +
+ output_delay_chain_ms_;
}
int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
assert(decoder_database_.get());
const size_t total_samples_in_buffers =
packet_buffer_->NumSamplesInBuffer(decoder_frame_length_) +
@@ -351,16 +395,23 @@ int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) {
stats->jitter_peaks_found = controller_->PeakFound();
stats_->GetNetworkStatistics(fs_hz_, total_samples_in_buffers,
decoder_frame_length_, stats);
+ // Compensate for output delay chain.
+ stats->current_buffer_size_ms += output_delay_chain_ms_;
+ stats->preferred_buffer_size_ms += output_delay_chain_ms_;
+ stats->mean_waiting_time_ms += output_delay_chain_ms_;
+ stats->median_waiting_time_ms += output_delay_chain_ms_;
+ stats->min_waiting_time_ms += output_delay_chain_ms_;
+ stats->max_waiting_time_ms += output_delay_chain_ms_;
return 0;
}
NetEqLifetimeStatistics NetEqImpl::GetLifetimeStatistics() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return stats_->GetLifetimeStatistics();
}
NetEqOperationsAndState NetEqImpl::GetOperationsAndState() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
auto result = stats_->GetOperationsAndState();
result.current_buffer_size_ms =
(packet_buffer_->NumSamplesInBuffer(decoder_frame_length_) +
@@ -374,19 +425,19 @@ NetEqOperationsAndState NetEqImpl::GetOperationsAndState() const {
}
void NetEqImpl::EnableVad() {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
assert(vad_.get());
vad_->Enable();
}
void NetEqImpl::DisableVad() {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
assert(vad_.get());
vad_->Disable();
}
absl::optional<uint32_t> NetEqImpl::GetPlayoutTimestamp() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (first_packet_ || last_mode_ == Mode::kRfc3389Cng ||
last_mode_ == Mode::kCodecInternalCng) {
// We don't have a valid RTP timestamp until we have decoded our first
@@ -394,17 +445,24 @@ absl::optional<uint32_t> NetEqImpl::GetPlayoutTimestamp() const {
// which is indicated by returning an empty value.
return absl::nullopt;
}
- return timestamp_scaler_->ToExternal(playout_timestamp_);
+ size_t sum_samples_in_output_delay_chain = 0;
+ for (const auto& audio_frame : output_delay_chain_) {
+ sum_samples_in_output_delay_chain += audio_frame.samples_per_channel();
+ }
+ return timestamp_scaler_->ToExternal(
+ playout_timestamp_ -
+ static_cast<uint32_t>(sum_samples_in_output_delay_chain));
}
int NetEqImpl::last_output_sample_rate_hz() const {
- rtc::CritScope lock(&crit_sect_);
- return last_output_sample_rate_hz_;
+ MutexLock lock(&mutex_);
+ return delayed_last_output_sample_rate_hz_.value_or(
+ last_output_sample_rate_hz_);
}
absl::optional<NetEq::DecoderFormat> NetEqImpl::GetDecoderFormat(
int payload_type) const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
const DecoderDatabase::DecoderInfo* const di =
decoder_database_->GetDecoderInfo(payload_type);
if (di) {
@@ -422,7 +480,7 @@ absl::optional<NetEq::DecoderFormat> NetEqImpl::GetDecoderFormat(
}
void NetEqImpl::FlushBuffers() {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
RTC_LOG(LS_VERBOSE) << "FlushBuffers";
packet_buffer_->Flush();
assert(sync_buffer_.get());
@@ -435,7 +493,7 @@ void NetEqImpl::FlushBuffers() {
}
void NetEqImpl::EnableNack(size_t max_nack_list_size) {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (!nack_enabled_) {
const int kNackThresholdPackets = 2;
nack_.reset(NackTracker::Create(kNackThresholdPackets));
@@ -446,13 +504,13 @@ void NetEqImpl::EnableNack(size_t max_nack_list_size) {
}
void NetEqImpl::DisableNack() {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
nack_.reset();
nack_enabled_ = false;
}
std::vector<uint16_t> NetEqImpl::GetNackList(int64_t round_trip_time_ms) const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (!nack_enabled_) {
return std::vector<uint16_t>();
}
@@ -461,23 +519,23 @@ std::vector<uint16_t> NetEqImpl::GetNackList(int64_t round_trip_time_ms) const {
}
std::vector<uint32_t> NetEqImpl::LastDecodedTimestamps() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return last_decoded_timestamps_;
}
int NetEqImpl::SyncBufferSizeMs() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return rtc::dchecked_cast<int>(sync_buffer_->FutureLength() /
rtc::CheckedDivExact(fs_hz_, 1000));
}
const SyncBuffer* NetEqImpl::sync_buffer_for_test() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return sync_buffer_.get();
}
NetEq::Operation NetEqImpl::last_operation_for_test() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return last_operation_;
}
@@ -1988,8 +2046,9 @@ int NetEqImpl::ExtractPackets(size_t required_samples,
extracted_samples = packet->timestamp - first_timestamp + packet_duration;
RTC_DCHECK(controller_);
- stats_->JitterBufferDelay(packet_duration, waiting_time_ms,
- controller_->TargetLevelMs());
+ stats_->JitterBufferDelay(
+ packet_duration, waiting_time_ms + output_delay_chain_ms_,
+ controller_->TargetLevelMs() + output_delay_chain_ms_);
packet_list->push_back(std::move(*packet)); // Store packet in list.
packet = absl::nullopt; // Ensure it's never used after the move.
diff --git a/modules/audio_coding/neteq/neteq_impl.h b/modules/audio_coding/neteq/neteq_impl.h
index 956cb6ef17..0ade6b5388 100644
--- a/modules/audio_coding/neteq/neteq_impl.h
+++ b/modules/audio_coding/neteq/neteq_impl.h
@@ -30,7 +30,7 @@
#include "modules/audio_coding/neteq/random_vector.h"
#include "modules/audio_coding/neteq/statistics_calculator.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -210,14 +210,14 @@ class NetEqImpl : public webrtc::NetEq {
// TODO(hlundin): Merge this with InsertPacket above?
int InsertPacketInternal(const RTPHeader& rtp_header,
rtc::ArrayView<const uint8_t> payload)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Delivers 10 ms of audio data. The data is written to |audio_frame|.
// Returns 0 on success, otherwise an error code.
int GetAudioInternal(AudioFrame* audio_frame,
bool* muted,
absl::optional<Operation> action_override)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Provides a decision to the GetAudioInternal method. The decision what to
// do is written to |operation|. Packets to decode are written to
@@ -229,7 +229,7 @@ class NetEqImpl : public webrtc::NetEq {
DtmfEvent* dtmf_event,
bool* play_dtmf,
absl::optional<Operation> action_override)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Decodes the speech packets in |packet_list|, and writes the results to
// |decoded_buffer|, which is allocated to hold |decoded_buffer_length|
@@ -241,13 +241,13 @@ class NetEqImpl : public webrtc::NetEq {
Operation* operation,
int* decoded_length,
AudioDecoder::SpeechType* speech_type)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Sub-method to Decode(). Performs codec internal CNG.
int DecodeCng(AudioDecoder* decoder,
int* decoded_length,
AudioDecoder::SpeechType* speech_type)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Sub-method to Decode(). Performs the actual decoding.
int DecodeLoop(PacketList* packet_list,
@@ -255,24 +255,24 @@ class NetEqImpl : public webrtc::NetEq {
AudioDecoder* decoder,
int* decoded_length,
AudioDecoder::SpeechType* speech_type)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Sub-method which calls the Normal class to perform the normal operation.
void DoNormal(const int16_t* decoded_buffer,
size_t decoded_length,
AudioDecoder::SpeechType speech_type,
- bool play_dtmf) RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool play_dtmf) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Sub-method which calls the Merge class to perform the merge operation.
void DoMerge(int16_t* decoded_buffer,
size_t decoded_length,
AudioDecoder::SpeechType speech_type,
- bool play_dtmf) RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool play_dtmf) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- bool DoCodecPlc() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool DoCodecPlc() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Sub-method which calls the Expand class to perform the expand operation.
- int DoExpand(bool play_dtmf) RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ int DoExpand(bool play_dtmf) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Sub-method which calls the Accelerate class to perform the accelerate
// operation.
@@ -280,128 +280,136 @@ class NetEqImpl : public webrtc::NetEq {
size_t decoded_length,
AudioDecoder::SpeechType speech_type,
bool play_dtmf,
- bool fast_accelerate)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool fast_accelerate) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Sub-method which calls the PreemptiveExpand class to perform the
// preemtive expand operation.
int DoPreemptiveExpand(int16_t* decoded_buffer,
size_t decoded_length,
AudioDecoder::SpeechType speech_type,
- bool play_dtmf)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool play_dtmf) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Sub-method which calls the ComfortNoise class to generate RFC 3389 comfort
// noise. |packet_list| can either contain one SID frame to update the
// noise parameters, or no payload at all, in which case the previously
// received parameters are used.
int DoRfc3389Cng(PacketList* packet_list, bool play_dtmf)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Calls the audio decoder to generate codec-internal comfort noise when
// no packet was received.
void DoCodecInternalCng(const int16_t* decoded_buffer, size_t decoded_length)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Calls the DtmfToneGenerator class to generate DTMF tones.
int DoDtmf(const DtmfEvent& dtmf_event, bool* play_dtmf)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Overdub DTMF on top of |output|.
int DtmfOverdub(const DtmfEvent& dtmf_event,
size_t num_channels,
- int16_t* output) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ int16_t* output) const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Extracts packets from |packet_buffer_| to produce at least
// |required_samples| samples. The packets are inserted into |packet_list|.
// Returns the number of samples that the packets in the list will produce, or
// -1 in case of an error.
int ExtractPackets(size_t required_samples, PacketList* packet_list)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Resets various variables and objects to new values based on the sample rate
// |fs_hz| and |channels| number audio channels.
void SetSampleRateAndChannels(int fs_hz, size_t channels)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Returns the output type for the audio produced by the latest call to
// GetAudio().
- OutputType LastOutputType() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ OutputType LastOutputType() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Updates Expand and Merge.
virtual void UpdatePlcComponents(int fs_hz, size_t channels)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
Clock* const clock_;
- rtc::CriticalSection crit_sect_;
- const std::unique_ptr<TickTimer> tick_timer_ RTC_GUARDED_BY(crit_sect_);
+ mutable Mutex mutex_;
+ const std::unique_ptr<TickTimer> tick_timer_ RTC_GUARDED_BY(mutex_);
const std::unique_ptr<DecoderDatabase> decoder_database_
- RTC_GUARDED_BY(crit_sect_);
- const std::unique_ptr<DtmfBuffer> dtmf_buffer_ RTC_GUARDED_BY(crit_sect_);
+ RTC_GUARDED_BY(mutex_);
+ const std::unique_ptr<DtmfBuffer> dtmf_buffer_ RTC_GUARDED_BY(mutex_);
const std::unique_ptr<DtmfToneGenerator> dtmf_tone_generator_
- RTC_GUARDED_BY(crit_sect_);
- const std::unique_ptr<PacketBuffer> packet_buffer_ RTC_GUARDED_BY(crit_sect_);
+ RTC_GUARDED_BY(mutex_);
+ const std::unique_ptr<PacketBuffer> packet_buffer_ RTC_GUARDED_BY(mutex_);
const std::unique_ptr<RedPayloadSplitter> red_payload_splitter_
- RTC_GUARDED_BY(crit_sect_);
+ RTC_GUARDED_BY(mutex_);
const std::unique_ptr<TimestampScaler> timestamp_scaler_
- RTC_GUARDED_BY(crit_sect_);
- const std::unique_ptr<PostDecodeVad> vad_ RTC_GUARDED_BY(crit_sect_);
- const std::unique_ptr<ExpandFactory> expand_factory_
- RTC_GUARDED_BY(crit_sect_);
+ RTC_GUARDED_BY(mutex_);
+ const std::unique_ptr<PostDecodeVad> vad_ RTC_GUARDED_BY(mutex_);
+ const std::unique_ptr<ExpandFactory> expand_factory_ RTC_GUARDED_BY(mutex_);
const std::unique_ptr<AccelerateFactory> accelerate_factory_
- RTC_GUARDED_BY(crit_sect_);
+ RTC_GUARDED_BY(mutex_);
const std::unique_ptr<PreemptiveExpandFactory> preemptive_expand_factory_
- RTC_GUARDED_BY(crit_sect_);
- const std::unique_ptr<StatisticsCalculator> stats_ RTC_GUARDED_BY(crit_sect_);
-
- std::unique_ptr<BackgroundNoise> background_noise_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<NetEqController> controller_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<AudioMultiVector> algorithm_buffer_
- RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<SyncBuffer> sync_buffer_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<Expand> expand_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<Normal> normal_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<Merge> merge_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<Accelerate> accelerate_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<PreemptiveExpand> preemptive_expand_
- RTC_GUARDED_BY(crit_sect_);
- RandomVector random_vector_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<ComfortNoise> comfort_noise_ RTC_GUARDED_BY(crit_sect_);
- int fs_hz_ RTC_GUARDED_BY(crit_sect_);
- int fs_mult_ RTC_GUARDED_BY(crit_sect_);
- int last_output_sample_rate_hz_ RTC_GUARDED_BY(crit_sect_);
- size_t output_size_samples_ RTC_GUARDED_BY(crit_sect_);
- size_t decoder_frame_length_ RTC_GUARDED_BY(crit_sect_);
- Mode last_mode_ RTC_GUARDED_BY(crit_sect_);
- Operation last_operation_ RTC_GUARDED_BY(crit_sect_);
- size_t decoded_buffer_length_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<int16_t[]> decoded_buffer_ RTC_GUARDED_BY(crit_sect_);
- uint32_t playout_timestamp_ RTC_GUARDED_BY(crit_sect_);
- bool new_codec_ RTC_GUARDED_BY(crit_sect_);
- uint32_t timestamp_ RTC_GUARDED_BY(crit_sect_);
- bool reset_decoder_ RTC_GUARDED_BY(crit_sect_);
- absl::optional<uint8_t> current_rtp_payload_type_ RTC_GUARDED_BY(crit_sect_);
- absl::optional<uint8_t> current_cng_rtp_payload_type_
- RTC_GUARDED_BY(crit_sect_);
- bool first_packet_ RTC_GUARDED_BY(crit_sect_);
- bool enable_fast_accelerate_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<NackTracker> nack_ RTC_GUARDED_BY(crit_sect_);
- bool nack_enabled_ RTC_GUARDED_BY(crit_sect_);
- const bool enable_muted_state_ RTC_GUARDED_BY(crit_sect_);
- AudioFrame::VADActivity last_vad_activity_ RTC_GUARDED_BY(crit_sect_) =
+ RTC_GUARDED_BY(mutex_);
+ const std::unique_ptr<StatisticsCalculator> stats_ RTC_GUARDED_BY(mutex_);
+
+ std::unique_ptr<BackgroundNoise> background_noise_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<NetEqController> controller_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<AudioMultiVector> algorithm_buffer_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<SyncBuffer> sync_buffer_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<Expand> expand_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<Normal> normal_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<Merge> merge_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<Accelerate> accelerate_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<PreemptiveExpand> preemptive_expand_ RTC_GUARDED_BY(mutex_);
+ RandomVector random_vector_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<ComfortNoise> comfort_noise_ RTC_GUARDED_BY(mutex_);
+ int fs_hz_ RTC_GUARDED_BY(mutex_);
+ int fs_mult_ RTC_GUARDED_BY(mutex_);
+ int last_output_sample_rate_hz_ RTC_GUARDED_BY(mutex_);
+ size_t output_size_samples_ RTC_GUARDED_BY(mutex_);
+ size_t decoder_frame_length_ RTC_GUARDED_BY(mutex_);
+ Mode last_mode_ RTC_GUARDED_BY(mutex_);
+ Operation last_operation_ RTC_GUARDED_BY(mutex_);
+ size_t decoded_buffer_length_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<int16_t[]> decoded_buffer_ RTC_GUARDED_BY(mutex_);
+ uint32_t playout_timestamp_ RTC_GUARDED_BY(mutex_);
+ bool new_codec_ RTC_GUARDED_BY(mutex_);
+ uint32_t timestamp_ RTC_GUARDED_BY(mutex_);
+ bool reset_decoder_ RTC_GUARDED_BY(mutex_);
+ absl::optional<uint8_t> current_rtp_payload_type_ RTC_GUARDED_BY(mutex_);
+ absl::optional<uint8_t> current_cng_rtp_payload_type_ RTC_GUARDED_BY(mutex_);
+ bool first_packet_ RTC_GUARDED_BY(mutex_);
+ bool enable_fast_accelerate_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<NackTracker> nack_ RTC_GUARDED_BY(mutex_);
+ bool nack_enabled_ RTC_GUARDED_BY(mutex_);
+ const bool enable_muted_state_ RTC_GUARDED_BY(mutex_);
+ AudioFrame::VADActivity last_vad_activity_ RTC_GUARDED_BY(mutex_) =
AudioFrame::kVadPassive;
std::unique_ptr<TickTimer::Stopwatch> generated_noise_stopwatch_
- RTC_GUARDED_BY(crit_sect_);
- std::vector<uint32_t> last_decoded_timestamps_ RTC_GUARDED_BY(crit_sect_);
- std::vector<RtpPacketInfo> last_decoded_packet_infos_
- RTC_GUARDED_BY(crit_sect_);
- ExpandUmaLogger expand_uma_logger_ RTC_GUARDED_BY(crit_sect_);
- ExpandUmaLogger speech_expand_uma_logger_ RTC_GUARDED_BY(crit_sect_);
- bool no_time_stretching_ RTC_GUARDED_BY(crit_sect_); // Only used for test.
- rtc::BufferT<int16_t> concealment_audio_ RTC_GUARDED_BY(crit_sect_);
- const bool enable_rtx_handling_ RTC_GUARDED_BY(crit_sect_);
+ RTC_GUARDED_BY(mutex_);
+ std::vector<uint32_t> last_decoded_timestamps_ RTC_GUARDED_BY(mutex_);
+ std::vector<RtpPacketInfo> last_decoded_packet_infos_ RTC_GUARDED_BY(mutex_);
+ ExpandUmaLogger expand_uma_logger_ RTC_GUARDED_BY(mutex_);
+ ExpandUmaLogger speech_expand_uma_logger_ RTC_GUARDED_BY(mutex_);
+ bool no_time_stretching_ RTC_GUARDED_BY(mutex_); // Only used for test.
+ rtc::BufferT<int16_t> concealment_audio_ RTC_GUARDED_BY(mutex_);
+ const bool enable_rtx_handling_ RTC_GUARDED_BY(mutex_);
+ // Data members used for adding extra delay to the output of NetEq.
+ // The delay in ms (which is 10 times the number of elements in
+ // output_delay_chain_).
+ const int output_delay_chain_ms_ RTC_GUARDED_BY(mutex_);
+ // Vector of AudioFrames which contains the delayed audio. Accessed as a
+ // circular buffer.
+ std::vector<AudioFrame> output_delay_chain_ RTC_GUARDED_BY(mutex_);
+ // Index into output_delay_chain_.
+ size_t output_delay_chain_ix_ RTC_GUARDED_BY(mutex_) = 0;
+ // Did output_delay_chain_ get populated yet?
+ bool output_delay_chain_empty_ RTC_GUARDED_BY(mutex_) = true;
+ // Contains the sample rate of the AudioFrame last emitted from the delay
+ // chain. If the extra output delay chain is not used, or if no audio has been
+ // emitted yet, the variable is empty.
+ absl::optional<int> delayed_last_output_sample_rate_hz_
+ RTC_GUARDED_BY(mutex_);
private:
RTC_DISALLOW_COPY_AND_ASSIGN(NetEqImpl);
diff --git a/modules/audio_coding/neteq/neteq_network_stats_unittest.cc b/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
index d35c44c4c2..a43e52def3 100644
--- a/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
+++ b/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
@@ -42,20 +42,16 @@ using ::testing::SetArgPointee;
class MockAudioDecoder final : public AudioDecoder {
public:
- // TODO(nisse): Valid overrides commented out, because the gmock
- // methods don't use any override declarations, and we want to avoid
- // warnings from -Winconsistent-missing-override. See
- // http://crbug.com/428099.
static const int kPacketDuration = 960; // 48 kHz * 20 ms
MockAudioDecoder(int sample_rate_hz, size_t num_channels)
: sample_rate_hz_(sample_rate_hz),
num_channels_(num_channels),
fec_enabled_(false) {}
- ~MockAudioDecoder() /* override */ { Die(); }
- MOCK_METHOD0(Die, void());
+ ~MockAudioDecoder() override { Die(); }
+ MOCK_METHOD(void, Die, ());
- MOCK_METHOD0(Reset, void());
+ MOCK_METHOD(void, Reset, (), (override));
class MockFrame : public AudioDecoder::EncodedAudioFrame {
public:
@@ -83,7 +79,7 @@ class MockAudioDecoder final : public AudioDecoder {
};
std::vector<ParseResult> ParsePayload(rtc::Buffer&& payload,
- uint32_t timestamp) /* override */ {
+ uint32_t timestamp) override {
std::vector<ParseResult> results;
if (fec_enabled_) {
std::unique_ptr<MockFrame> fec_frame(new MockFrame(num_channels_));
@@ -96,23 +92,22 @@ class MockAudioDecoder final : public AudioDecoder {
return results;
}
- int PacketDuration(const uint8_t* encoded, size_t encoded_len) const
- /* override */ {
+ int PacketDuration(const uint8_t* encoded,
+ size_t encoded_len) const override {
ADD_FAILURE() << "Since going through ParsePayload, PacketDuration should "
"never get called.";
return kPacketDuration;
}
- bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const
- /* override */ {
+ bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const override {
ADD_FAILURE() << "Since going through ParsePayload, PacketHasFec should "
"never get called.";
return fec_enabled_;
}
- int SampleRateHz() const /* override */ { return sample_rate_hz_; }
+ int SampleRateHz() const override { return sample_rate_hz_; }
- size_t Channels() const /* override */ { return num_channels_; }
+ size_t Channels() const override { return num_channels_; }
void set_fec_enabled(bool enable_fec) { fec_enabled_ = enable_fec; }
@@ -123,7 +118,7 @@ class MockAudioDecoder final : public AudioDecoder {
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
- SpeechType* speech_type) /* override */ {
+ SpeechType* speech_type) override {
ADD_FAILURE() << "Since going through ParsePayload, DecodeInternal should "
"never get called.";
return -1;
diff --git a/modules/audio_coding/neteq/neteq_unittest.cc b/modules/audio_coding/neteq/neteq_unittest.cc
index d78e2c6488..f5fb647965 100644
--- a/modules/audio_coding/neteq/neteq_unittest.cc
+++ b/modules/audio_coding/neteq/neteq_unittest.cc
@@ -1102,5 +1102,186 @@ TEST(NetEqNoTimeStretchingMode, RunTest) {
EXPECT_EQ(0, stats.preemptive_rate);
}
+namespace {
+// Helper classes and data types and functions for NetEqOutputDelayTest.
+
+class VectorAudioSink : public AudioSink {
+ public:
+ // Does not take ownership of the vector.
+ VectorAudioSink(std::vector<int16_t>* output_vector) : v_(output_vector) {}
+
+ virtual ~VectorAudioSink() = default;
+
+ bool WriteArray(const int16_t* audio, size_t num_samples) override {
+ v_->reserve(v_->size() + num_samples);
+ for (size_t i = 0; i < num_samples; ++i) {
+ v_->push_back(audio[i]);
+ }
+ return true;
+ }
+
+ private:
+ std::vector<int16_t>* const v_;
+};
+
+struct TestResult {
+ NetEqLifetimeStatistics lifetime_stats;
+ NetEqNetworkStatistics network_stats;
+ absl::optional<uint32_t> playout_timestamp;
+ int target_delay_ms;
+ int filtered_current_delay_ms;
+ int sample_rate_hz;
+};
+
+// This class is used as callback object to NetEqTest to collect some stats
+// at the end of the simulation.
+class SimEndStatsCollector : public NetEqSimulationEndedCallback {
+ public:
+ SimEndStatsCollector(TestResult& result) : result_(result) {}
+
+ void SimulationEnded(int64_t /*simulation_time_ms*/, NetEq* neteq) override {
+ result_.playout_timestamp = neteq->GetPlayoutTimestamp();
+ result_.target_delay_ms = neteq->TargetDelayMs();
+ result_.filtered_current_delay_ms = neteq->FilteredCurrentDelayMs();
+ result_.sample_rate_hz = neteq->last_output_sample_rate_hz();
+ }
+
+ private:
+ TestResult& result_;
+};
+
+TestResult DelayLineNetEqTest(int delay_ms,
+ std::vector<int16_t>* output_vector) {
+ NetEq::Config config;
+ config.for_test_no_time_stretching = true;
+ config.extra_output_delay_ms = delay_ms;
+ auto codecs = NetEqTest::StandardDecoderMap();
+ NetEqPacketSourceInput::RtpHeaderExtensionMap rtp_ext_map = {
+ {1, kRtpExtensionAudioLevel},
+ {3, kRtpExtensionAbsoluteSendTime},
+ {5, kRtpExtensionTransportSequenceNumber},
+ {7, kRtpExtensionVideoContentType},
+ {8, kRtpExtensionVideoTiming}};
+ std::unique_ptr<NetEqInput> input = std::make_unique<NetEqRtpDumpInput>(
+ webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp"),
+ rtp_ext_map, absl::nullopt /*No SSRC filter*/);
+ std::unique_ptr<TimeLimitedNetEqInput> input_time_limit(
+ new TimeLimitedNetEqInput(std::move(input), 10000));
+ std::unique_ptr<AudioSink> output =
+ std::make_unique<VectorAudioSink>(output_vector);
+
+ TestResult result;
+ SimEndStatsCollector stats_collector(result);
+ NetEqTest::Callbacks callbacks;
+ callbacks.simulation_ended_callback = &stats_collector;
+
+ NetEqTest test(config, CreateBuiltinAudioDecoderFactory(), codecs,
+ /*text_log=*/nullptr, /*neteq_factory=*/nullptr,
+ /*input=*/std::move(input_time_limit), std::move(output),
+ callbacks);
+ test.Run();
+ result.lifetime_stats = test.LifetimeStats();
+ result.network_stats = test.SimulationStats();
+ return result;
+}
+} // namespace
+
+// Tests the extra output delay functionality of NetEq.
+TEST(NetEqOutputDelayTest, RunTest) {
+ std::vector<int16_t> output;
+ const auto result_no_delay = DelayLineNetEqTest(0, &output);
+ std::vector<int16_t> output_delayed;
+ constexpr int kDelayMs = 100;
+ const auto result_delay = DelayLineNetEqTest(kDelayMs, &output_delayed);
+
+ // Verify that the loss concealment remains unchanged. The point of the delay
+ // is to not affect the jitter buffering behavior.
+ // First verify that there are concealments in the test.
+ EXPECT_GT(result_no_delay.lifetime_stats.concealed_samples, 0u);
+ // And that not all of the output is concealment.
+ EXPECT_GT(result_no_delay.lifetime_stats.total_samples_received,
+ result_no_delay.lifetime_stats.concealed_samples);
+ // Now verify that they remain unchanged by the delay.
+ EXPECT_EQ(result_no_delay.lifetime_stats.concealed_samples,
+ result_delay.lifetime_stats.concealed_samples);
+ // Accelerate and pre-emptive expand should also be unchanged.
+ EXPECT_EQ(result_no_delay.lifetime_stats.inserted_samples_for_deceleration,
+ result_delay.lifetime_stats.inserted_samples_for_deceleration);
+ EXPECT_EQ(result_no_delay.lifetime_stats.removed_samples_for_acceleration,
+ result_delay.lifetime_stats.removed_samples_for_acceleration);
+ // Verify that delay stats are increased with the delay chain.
+ EXPECT_EQ(
+ result_no_delay.lifetime_stats.jitter_buffer_delay_ms +
+ kDelayMs * result_no_delay.lifetime_stats.jitter_buffer_emitted_count,
+ result_delay.lifetime_stats.jitter_buffer_delay_ms);
+ EXPECT_EQ(
+ result_no_delay.lifetime_stats.jitter_buffer_target_delay_ms +
+ kDelayMs * result_no_delay.lifetime_stats.jitter_buffer_emitted_count,
+ result_delay.lifetime_stats.jitter_buffer_target_delay_ms);
+ EXPECT_EQ(result_no_delay.network_stats.current_buffer_size_ms + kDelayMs,
+ result_delay.network_stats.current_buffer_size_ms);
+ EXPECT_EQ(result_no_delay.network_stats.preferred_buffer_size_ms + kDelayMs,
+ result_delay.network_stats.preferred_buffer_size_ms);
+ EXPECT_EQ(result_no_delay.network_stats.mean_waiting_time_ms + kDelayMs,
+ result_delay.network_stats.mean_waiting_time_ms);
+ EXPECT_EQ(result_no_delay.network_stats.median_waiting_time_ms + kDelayMs,
+ result_delay.network_stats.median_waiting_time_ms);
+ EXPECT_EQ(result_no_delay.network_stats.min_waiting_time_ms + kDelayMs,
+ result_delay.network_stats.min_waiting_time_ms);
+ EXPECT_EQ(result_no_delay.network_stats.max_waiting_time_ms + kDelayMs,
+ result_delay.network_stats.max_waiting_time_ms);
+
+ ASSERT_TRUE(result_no_delay.playout_timestamp);
+ ASSERT_TRUE(result_delay.playout_timestamp);
+ EXPECT_EQ(*result_no_delay.playout_timestamp -
+ static_cast<uint32_t>(
+ kDelayMs *
+ rtc::CheckedDivExact(result_no_delay.sample_rate_hz, 1000)),
+ *result_delay.playout_timestamp);
+ EXPECT_EQ(result_no_delay.target_delay_ms + kDelayMs,
+ result_delay.target_delay_ms);
+ EXPECT_EQ(result_no_delay.filtered_current_delay_ms + kDelayMs,
+ result_delay.filtered_current_delay_ms);
+
+ // Verify expected delay in decoded signal. The test vector uses 8 kHz sample
+ // rate, so the delay will be 8 times the delay in ms.
+ constexpr size_t kExpectedDelaySamples = kDelayMs * 8;
+ for (size_t i = 0;
+ i < output.size() && i + kExpectedDelaySamples < output_delayed.size();
+ ++i) {
+ EXPECT_EQ(output[i], output_delayed[i + kExpectedDelaySamples]);
+ }
+}
+
+// Tests the extra output delay functionality of NetEq when configured via
+// field trial.
+TEST(NetEqOutputDelayTest, RunTestWithFieldTrial) {
+ test::ScopedFieldTrials field_trial(
+ "WebRTC-Audio-NetEqExtraDelay/Enabled-50/");
+ constexpr int kExpectedDelayMs = 50;
+ std::vector<int16_t> output;
+ const auto result = DelayLineNetEqTest(0, &output);
+
+ // The base delay values are taken from the resuts of the non-delayed case in
+ // NetEqOutputDelayTest.RunTest above.
+ EXPECT_EQ(10 + kExpectedDelayMs, result.target_delay_ms);
+ EXPECT_EQ(24 + kExpectedDelayMs, result.filtered_current_delay_ms);
+}
+
+// Set a non-multiple-of-10 value in the field trial, and verify that we don't
+// crash, and that the result is rounded down.
+TEST(NetEqOutputDelayTest, RunTestWithFieldTrialOddValue) {
+ test::ScopedFieldTrials field_trial(
+ "WebRTC-Audio-NetEqExtraDelay/Enabled-103/");
+ constexpr int kRoundedDelayMs = 100;
+ std::vector<int16_t> output;
+ const auto result = DelayLineNetEqTest(0, &output);
+
+ // The base delay values are taken from the resuts of the non-delayed case in
+ // NetEqOutputDelayTest.RunTest above.
+ EXPECT_EQ(10 + kRoundedDelayMs, result.target_delay_ms);
+ EXPECT_EQ(24 + kRoundedDelayMs, result.filtered_current_delay_ms);
+}
+
} // namespace test
} // namespace webrtc
diff --git a/modules/audio_coding/neteq/packet_buffer_unittest.cc b/modules/audio_coding/neteq/packet_buffer_unittest.cc
index 0aff955fd7..40e7d5371a 100644
--- a/modules/audio_coding/neteq/packet_buffer_unittest.cc
+++ b/modules/audio_coding/neteq/packet_buffer_unittest.cc
@@ -31,13 +31,14 @@ using ::testing::StrictMock;
namespace {
class MockEncodedAudioFrame : public webrtc::AudioDecoder::EncodedAudioFrame {
public:
- MOCK_CONST_METHOD0(Duration, size_t());
+ MOCK_METHOD(size_t, Duration, (), (const, override));
- MOCK_CONST_METHOD0(IsDtxPacket, bool());
+ MOCK_METHOD(bool, IsDtxPacket, (), (const, override));
- MOCK_CONST_METHOD1(
- Decode,
- absl::optional<DecodeResult>(rtc::ArrayView<int16_t> decoded));
+ MOCK_METHOD(absl::optional<DecodeResult>,
+ Decode,
+ (rtc::ArrayView<int16_t> decoded),
+ (const, override));
};
// Helper class to generate packets. Packets must be deleted by the user.
diff --git a/modules/audio_coding/neteq/time_stretch.cc b/modules/audio_coding/neteq/time_stretch.cc
index 560d9be56d..ba24e0bfc3 100644
--- a/modules/audio_coding/neteq/time_stretch.cc
+++ b/modules/audio_coding/neteq/time_stretch.cc
@@ -43,7 +43,7 @@ TimeStretch::ReturnCodes TimeStretch::Process(const int16_t* input,
signal_len = input_len / num_channels_;
signal_array.reset(new int16_t[signal_len]);
signal = signal_array.get();
- size_t j = master_channel_;
+ size_t j = kRefChannel;
for (size_t i = 0; i < signal_len; ++i) {
signal_array[i] = input[j];
j += num_channels_;
@@ -187,7 +187,7 @@ bool TimeStretch::SpeechDetection(int32_t vec1_energy,
(static_cast<int64_t>(vec1_energy) + vec2_energy) / 16);
int32_t right_side;
if (background_noise_.initialized()) {
- right_side = background_noise_.Energy(master_channel_);
+ right_side = background_noise_.Energy(kRefChannel);
} else {
// If noise parameters have not been estimated, use a fixed threshold.
right_side = 75000;
diff --git a/modules/audio_coding/neteq/time_stretch.h b/modules/audio_coding/neteq/time_stretch.h
index 9f866493d9..aede9cadf3 100644
--- a/modules/audio_coding/neteq/time_stretch.h
+++ b/modules/audio_coding/neteq/time_stretch.h
@@ -40,13 +40,11 @@ class TimeStretch {
: sample_rate_hz_(sample_rate_hz),
fs_mult_(sample_rate_hz / 8000),
num_channels_(num_channels),
- master_channel_(0), // First channel is master.
background_noise_(background_noise),
max_input_value_(0) {
assert(sample_rate_hz_ == 8000 || sample_rate_hz_ == 16000 ||
sample_rate_hz_ == 32000 || sample_rate_hz_ == 48000);
assert(num_channels_ > 0);
- assert(master_channel_ < num_channels_);
memset(auto_correlation_, 0, sizeof(auto_correlation_));
}
@@ -86,11 +84,11 @@ class TimeStretch {
static const size_t kMaxLag = 60;
static const size_t kDownsampledLen = kCorrelationLen + kMaxLag;
static const int kCorrelationThreshold = 14746; // 0.9 in Q14.
+ static constexpr size_t kRefChannel = 0; // First channel is reference.
const int sample_rate_hz_;
const int fs_mult_; // Sample rate multiplier = sample_rate_hz_ / 8000.
const size_t num_channels_;
- const size_t master_channel_;
const BackgroundNoise& background_noise_;
int16_t max_input_value_;
int16_t downsampled_input_[kDownsampledLen];
diff --git a/modules/audio_coding/neteq/tools/neteq_stats_plotter.cc b/modules/audio_coding/neteq/tools/neteq_stats_plotter.cc
index 3f06b1cfc4..337f54ed6e 100644
--- a/modules/audio_coding/neteq/tools/neteq_stats_plotter.cc
+++ b/modules/audio_coding/neteq/tools/neteq_stats_plotter.cc
@@ -33,7 +33,8 @@ NetEqStatsPlotter::NetEqStatsPlotter(bool make_matlab_plot,
stats_getter_.reset(new NetEqStatsGetter(std::move(delay_analyzer)));
}
-void NetEqStatsPlotter::SimulationEnded(int64_t simulation_time_ms) {
+void NetEqStatsPlotter::SimulationEnded(int64_t simulation_time_ms,
+ NetEq* /*neteq*/) {
if (make_matlab_plot_) {
auto matlab_script_name = base_file_name_;
std::replace(matlab_script_name.begin(), matlab_script_name.end(), '.',
diff --git a/modules/audio_coding/neteq/tools/neteq_stats_plotter.h b/modules/audio_coding/neteq/tools/neteq_stats_plotter.h
index c4df24e073..d6918670fd 100644
--- a/modules/audio_coding/neteq/tools/neteq_stats_plotter.h
+++ b/modules/audio_coding/neteq/tools/neteq_stats_plotter.h
@@ -28,7 +28,7 @@ class NetEqStatsPlotter : public NetEqSimulationEndedCallback {
bool show_concealment_events,
std::string base_file_name);
- void SimulationEnded(int64_t simulation_time_ms) override;
+ void SimulationEnded(int64_t simulation_time_ms, NetEq* neteq) override;
NetEqStatsGetter* stats_getter() { return stats_getter_.get(); }
diff --git a/modules/audio_coding/neteq/tools/neteq_test.cc b/modules/audio_coding/neteq/tools/neteq_test.cc
index f8b6161a98..a263a73721 100644
--- a/modules/audio_coding/neteq/tools/neteq_test.cc
+++ b/modules/audio_coding/neteq/tools/neteq_test.cc
@@ -91,7 +91,8 @@ int64_t NetEqTest::Run() {
simulation_time += step_result.simulation_step_ms;
} while (!step_result.is_simulation_finished);
if (callbacks_.simulation_ended_callback) {
- callbacks_.simulation_ended_callback->SimulationEnded(simulation_time);
+ callbacks_.simulation_ended_callback->SimulationEnded(simulation_time,
+ neteq_.get());
}
return simulation_time;
}
diff --git a/modules/audio_coding/neteq/tools/neteq_test.h b/modules/audio_coding/neteq/tools/neteq_test.h
index 0a6c24f3d6..3b787a6cfb 100644
--- a/modules/audio_coding/neteq/tools/neteq_test.h
+++ b/modules/audio_coding/neteq/tools/neteq_test.h
@@ -61,7 +61,7 @@ class NetEqGetAudioCallback {
class NetEqSimulationEndedCallback {
public:
virtual ~NetEqSimulationEndedCallback() = default;
- virtual void SimulationEnded(int64_t simulation_time_ms) = 0;
+ virtual void SimulationEnded(int64_t simulation_time_ms, NetEq* neteq) = 0;
};
// Class that provides an input--output test for NetEq. The input (both packets
diff --git a/modules/audio_coding/neteq/tools/neteq_test_factory.h b/modules/audio_coding/neteq/tools/neteq_test_factory.h
index b8ba8cc537..fdfe650b94 100644
--- a/modules/audio_coding/neteq/tools/neteq_test_factory.h
+++ b/modules/audio_coding/neteq/tools/neteq_test_factory.h
@@ -121,7 +121,7 @@ class NetEqTestFactory {
// Prints concealment events.
bool concealment_events = false;
// Maximum allowed number of packets in the buffer.
- static constexpr int default_max_nr_packets_in_buffer() { return 50; }
+ static constexpr int default_max_nr_packets_in_buffer() { return 200; }
int max_nr_packets_in_buffer = default_max_nr_packets_in_buffer();
// Number of dummy packets to put in the packet buffer at the start of the
// simulation.
diff --git a/modules/audio_coding/test/Channel.cc b/modules/audio_coding/test/Channel.cc
index 3590891e42..9456145d8c 100644
--- a/modules/audio_coding/test/Channel.cc
+++ b/modules/audio_coding/test/Channel.cc
@@ -58,7 +58,7 @@ int32_t Channel::SendData(AudioFrameType frameType,
}
}
- _channelCritSect.Enter();
+ _channelCritSect.Lock();
if (_saveBitStream) {
// fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
}
@@ -69,7 +69,7 @@ int32_t Channel::SendData(AudioFrameType frameType,
_useLastFrameSize = false;
_lastInTimestamp = timeStamp;
_totalBytes += payloadDataSize;
- _channelCritSect.Leave();
+ _channelCritSect.Unlock();
if (_useFECTestWithPacketLoss) {
_packetLoss += 1;
@@ -238,7 +238,7 @@ void Channel::RegisterReceiverACM(AudioCodingModule* acm) {
void Channel::ResetStats() {
int n;
int k;
- _channelCritSect.Enter();
+ _channelCritSect.Lock();
_lastPayloadType = -1;
for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
_payloadStats[n].payloadType = -1;
@@ -253,23 +253,23 @@ void Channel::ResetStats() {
}
_beginTime = rtc::TimeMillis();
_totalBytes = 0;
- _channelCritSect.Leave();
+ _channelCritSect.Unlock();
}
uint32_t Channel::LastInTimestamp() {
uint32_t timestamp;
- _channelCritSect.Enter();
+ _channelCritSect.Lock();
timestamp = _lastInTimestamp;
- _channelCritSect.Leave();
+ _channelCritSect.Unlock();
return timestamp;
}
double Channel::BitRate() {
double rate;
uint64_t currTime = rtc::TimeMillis();
- _channelCritSect.Enter();
+ _channelCritSect.Lock();
rate = ((double)_totalBytes * 8.0) / (double)(currTime - _beginTime);
- _channelCritSect.Leave();
+ _channelCritSect.Unlock();
return rate;
}
diff --git a/modules/audio_coding/test/Channel.h b/modules/audio_coding/test/Channel.h
index 78129e5a54..7a8829e1d2 100644
--- a/modules/audio_coding/test/Channel.h
+++ b/modules/audio_coding/test/Channel.h
@@ -15,7 +15,7 @@
#include "modules/audio_coding/include/audio_coding_module.h"
#include "modules/include/module_common_types.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -88,7 +88,7 @@ class Channel : public AudioPacketizationCallback {
// 60msec * 32 sample(max)/msec * 2 description (maybe) * 2 bytes/sample
uint8_t _payloadData[60 * 32 * 2 * 2];
- rtc::CriticalSection _channelCritSect;
+ Mutex _channelCritSect;
FILE* _bitStreamFile;
bool _saveBitStream;
int16_t _lastPayloadType;
diff --git a/modules/audio_coding/test/TestVADDTX.cc b/modules/audio_coding/test/TestVADDTX.cc
index c493e64ee0..dce5433ed8 100644
--- a/modules/audio_coding/test/TestVADDTX.cc
+++ b/modules/audio_coding/test/TestVADDTX.cc
@@ -166,11 +166,13 @@ void TestVadDtx::Run(std::string in_filename,
int i = &st - stats; // Calculate the current position in stats.
switch (expects[i]) {
case 0: {
- EXPECT_EQ(0u, st) << "stats[" << i << "] error.";
+ EXPECT_EQ(0u, st) << "stats[" << i << "] error. Output file "
+ << out_filename;
break;
}
case 1: {
- EXPECT_GT(st, 0u) << "stats[" << i << "] error.";
+ EXPECT_GT(st, 0u) << "stats[" << i << "] error. Output file "
+ << out_filename;
break;
}
}
@@ -189,25 +191,29 @@ void TestWebRtcVadDtx::Perform() {
// Test various configurations on VAD/DTX.
void TestWebRtcVadDtx::RunTestCases(const SdpAudioFormat& codec_format) {
+ RegisterCodec(codec_format, absl::nullopt);
Test(/*new_outfile=*/true,
- /*expect_dtx_enabled=*/RegisterCodec(codec_format, absl::nullopt));
+ /*expect_vad_packets=*/codec_format.name == "opus");
+ RegisterCodec(codec_format, Vad::kVadAggressive);
Test(/*new_outfile=*/false,
- /*expect_dtx_enabled=*/RegisterCodec(codec_format, Vad::kVadAggressive));
+ /*expect_vad_packets=*/true);
+ RegisterCodec(codec_format, Vad::kVadLowBitrate);
Test(/*new_outfile=*/false,
- /*expect_dtx_enabled=*/RegisterCodec(codec_format, Vad::kVadLowBitrate));
+ /*expect_vad_packets=*/true);
- Test(/*new_outfile=*/false, /*expect_dtx_enabled=*/RegisterCodec(
- codec_format, Vad::kVadVeryAggressive));
+ RegisterCodec(codec_format, Vad::kVadVeryAggressive);
+ Test(/*new_outfile=*/false, /*expect_vad_packets=*/true);
+ RegisterCodec(codec_format, Vad::kVadNormal);
Test(/*new_outfile=*/false,
- /*expect_dtx_enabled=*/RegisterCodec(codec_format, Vad::kVadNormal));
+ /*expect_vad_packets=*/true);
}
// Set the expectation and run the test.
-void TestWebRtcVadDtx::Test(bool new_outfile, bool expect_dtx_enabled) {
- int expects[] = {-1, 1, expect_dtx_enabled, 0, 0};
+void TestWebRtcVadDtx::Test(bool new_outfile, bool expect_vad_packets) {
+ int expects[] = {-1, 1, expect_vad_packets ? 1 : -1, 0, 0};
if (new_outfile) {
output_file_num_++;
}
@@ -220,16 +226,20 @@ void TestWebRtcVadDtx::Test(bool new_outfile, bool expect_dtx_enabled) {
// Following is the implementation of TestOpusDtx.
void TestOpusDtx::Perform() {
- int expects[] = {0, 1, 0, 0, 0};
+ int expects[] = {0, 0, 0, 0, 0};
// Register Opus as send codec
std::string out_filename =
webrtc::test::OutputPath() + "testOpusDtx_outFile_mono.pcm";
RegisterCodec({"opus", 48000, 2}, absl::nullopt);
+
acm_send_->ModifyEncoder([](std::unique_ptr<AudioEncoder>* encoder_ptr) {
(*encoder_ptr)->SetDtx(false);
});
+ expects[static_cast<int>(AudioFrameType::kEmptyFrame)] = 0;
+ expects[static_cast<int>(AudioFrameType::kAudioFrameSpeech)] = 1;
+ expects[static_cast<int>(AudioFrameType::kAudioFrameCN)] = 1;
Run(webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"), 32000, 1,
out_filename, false, expects);
@@ -237,6 +247,7 @@ void TestOpusDtx::Perform() {
(*encoder_ptr)->SetDtx(true);
});
expects[static_cast<int>(AudioFrameType::kEmptyFrame)] = 1;
+ expects[static_cast<int>(AudioFrameType::kAudioFrameSpeech)] = 1;
expects[static_cast<int>(AudioFrameType::kAudioFrameCN)] = 1;
Run(webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"), 32000, 1,
out_filename, true, expects);
@@ -244,19 +255,26 @@ void TestOpusDtx::Perform() {
// Register stereo Opus as send codec
out_filename = webrtc::test::OutputPath() + "testOpusDtx_outFile_stereo.pcm";
RegisterCodec({"opus", 48000, 2, {{"stereo", "1"}}}, absl::nullopt);
+
acm_send_->ModifyEncoder([](std::unique_ptr<AudioEncoder>* encoder_ptr) {
(*encoder_ptr)->SetDtx(false);
});
expects[static_cast<int>(AudioFrameType::kEmptyFrame)] = 0;
+ expects[static_cast<int>(AudioFrameType::kAudioFrameSpeech)] = 1;
expects[static_cast<int>(AudioFrameType::kAudioFrameCN)] = 0;
Run(webrtc::test::ResourcePath("audio_coding/teststereo32kHz", "pcm"), 32000,
2, out_filename, false, expects);
acm_send_->ModifyEncoder([](std::unique_ptr<AudioEncoder>* encoder_ptr) {
(*encoder_ptr)->SetDtx(true);
+ // The default bitrate will not generate frames recognized as CN on desktop
+ // since the frames will be encoded as CELT. Set a low target bitrate to get
+ // consistent behaviour across platforms.
+ (*encoder_ptr)->OnReceivedTargetAudioBitrate(24000);
});
expects[static_cast<int>(AudioFrameType::kEmptyFrame)] = 1;
+ expects[static_cast<int>(AudioFrameType::kAudioFrameSpeech)] = 1;
expects[static_cast<int>(AudioFrameType::kAudioFrameCN)] = 1;
Run(webrtc::test::ResourcePath("audio_coding/teststereo32kHz", "pcm"), 32000,
2, out_filename, true, expects);
diff --git a/modules/audio_device/BUILD.gn b/modules/audio_device/BUILD.gn
index 2ce0ae20e8..cee89a9d9b 100644
--- a/modules/audio_device/BUILD.gn
+++ b/modules/audio_device/BUILD.gn
@@ -73,6 +73,7 @@ rtc_library("audio_device_buffer") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_task_queue",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers",
"../../system_wrappers:metrics",
]
@@ -144,8 +145,8 @@ rtc_source_set("audio_device_module_from_input_and_output") {
"../../api/task_queue",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
@@ -169,6 +170,7 @@ rtc_library("audio_device_impl") {
"../../rtc_base:deprecation",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_task_queue",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/system:arch",
"../../rtc_base/system:file_wrapper",
"../../rtc_base/task_utils:repeating_task",
@@ -331,7 +333,7 @@ rtc_library("audio_device_impl") {
if (is_mac) {
rtc_source_set("audio_device_impl_frameworks") {
visibility = [ ":*" ]
- libs = [
+ frameworks = [
# Needed for CoreGraphics:
"ApplicationServices.framework",
@@ -381,12 +383,13 @@ if (rtc_include_tests) {
"../../rtc_base:checks",
"../../rtc_base:ignore_wundef",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers",
"../../test:fileutils",
"../../test:test_support",
"../utility",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (is_linux || is_mac || is_win) {
sources += [ "audio_device_unittest.cc" ]
}
diff --git a/modules/audio_device/android/audio_device_unittest.cc b/modules/audio_device/android/audio_device_unittest.cc
index e2c6800f38..20c36c7c31 100644
--- a/modules/audio_device/android/audio_device_unittest.cc
+++ b/modules/audio_device/android/audio_device_unittest.cc
@@ -28,9 +28,9 @@
#include "modules/audio_device/audio_device_impl.h"
#include "modules/audio_device/include/mock_audio_transport.h"
#include "rtc_base/arraysize.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/format_macros.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/time_utils.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -182,7 +182,7 @@ class FifoAudioStream : public AudioStreamInterface {
}
int16_t* memory = new int16_t[frames_per_buffer_];
memcpy(static_cast<int16_t*>(&memory[0]), source, bytes_per_buffer_);
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
fifo_->push_back(memory);
const size_t size = fifo_->size();
if (size > largest_size_) {
@@ -198,7 +198,7 @@ class FifoAudioStream : public AudioStreamInterface {
void Read(void* destination, size_t num_frames) override {
ASSERT_EQ(num_frames, frames_per_buffer_);
PRINTD("-");
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (fifo_->empty()) {
memset(destination, 0, bytes_per_buffer_);
} else {
@@ -229,7 +229,7 @@ class FifoAudioStream : public AudioStreamInterface {
}
using AudioBufferList = std::list<int16_t*>;
- rtc::CriticalSection lock_;
+ Mutex lock_;
const size_t frames_per_buffer_;
const size_t bytes_per_buffer_;
std::unique_ptr<AudioBufferList> fifo_;
diff --git a/modules/audio_device/android/audio_track_jni.cc b/modules/audio_device/android/audio_track_jni.cc
index 776f0cfd70..daaeeca1ea 100644
--- a/modules/audio_device/android/audio_track_jni.cc
+++ b/modules/audio_device/android/audio_track_jni.cc
@@ -19,6 +19,7 @@
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
namespace webrtc {
@@ -27,13 +28,15 @@ AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
NativeRegistration* native_reg,
std::unique_ptr<GlobalRef> audio_track)
: audio_track_(std::move(audio_track)),
- init_playout_(native_reg->GetMethodId("initPlayout", "(IID)Z")),
+ init_playout_(native_reg->GetMethodId("initPlayout", "(IID)I")),
start_playout_(native_reg->GetMethodId("startPlayout", "()Z")),
stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")),
set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
get_stream_max_volume_(
native_reg->GetMethodId("getStreamMaxVolume", "()I")),
- get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")) {}
+ get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")),
+ get_buffer_size_in_frames_(
+ native_reg->GetMethodId("getBufferSizeInFrames", "()I")) {}
AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
@@ -45,8 +48,29 @@ bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) {
nullptr);
if (buffer_size_factor == 0)
buffer_size_factor = 1.0;
- return audio_track_->CallBooleanMethod(init_playout_, sample_rate, channels,
- buffer_size_factor);
+ int requested_buffer_size_bytes = audio_track_->CallIntMethod(
+ init_playout_, sample_rate, channels, buffer_size_factor);
+ // Update UMA histograms for both the requested and actual buffer size.
+ if (requested_buffer_size_bytes >= 0) {
+ // To avoid division by zero, we assume the sample rate is 48k if an invalid
+ // value is found.
+ sample_rate = sample_rate <= 0 ? 48000 : sample_rate;
+ // This calculation assumes that audio is mono.
+ const int requested_buffer_size_ms =
+ (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
+ requested_buffer_size_ms, 0, 1000, 100);
+ int actual_buffer_size_frames =
+ audio_track_->CallIntMethod(get_buffer_size_in_frames_);
+ if (actual_buffer_size_frames >= 0) {
+ const int actual_buffer_size_ms =
+ actual_buffer_size_frames * 1000 / sample_rate;
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
+ actual_buffer_size_ms, 0, 1000, 100);
+ }
+ return true;
+ }
+ return false;
}
bool AudioTrackJni::JavaAudioTrack::StartPlayout() {
diff --git a/modules/audio_device/android/audio_track_jni.h b/modules/audio_device/android/audio_track_jni.h
index 6303d754c8..529a9013e8 100644
--- a/modules/audio_device/android/audio_track_jni.h
+++ b/modules/audio_device/android/audio_track_jni.h
@@ -62,6 +62,7 @@ class AudioTrackJni {
jmethodID set_stream_volume_;
jmethodID get_stream_max_volume_;
jmethodID get_stream_volume_;
+ jmethodID get_buffer_size_in_frames_;
};
explicit AudioTrackJni(AudioManager* audio_manager);
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
index 3023c99fa2..7e6ad5acf4 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
@@ -215,7 +215,7 @@ public class WebRtcAudioTrack {
}
}
- private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
+ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG,
"initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
@@ -244,14 +244,14 @@ public class WebRtcAudioTrack {
// can happen that |minBufferSizeInBytes| contains an invalid value.
if (minBufferSizeInBytes < byteBuffer.capacity()) {
reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
- return false;
+ return -1;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
- return false;
+ return -1;
}
try {
// Create an AudioTrack object and initialize its associated audio buffer.
@@ -273,7 +273,7 @@ public class WebRtcAudioTrack {
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
- return false;
+ return -1;
}
// It can happen that an AudioTrack is created but it was not successfully
@@ -282,11 +282,11 @@ public class WebRtcAudioTrack {
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
releaseAudioResources();
- return false;
+ return -1;
}
logMainParameters();
logMainParametersExtended();
- return true;
+ return minBufferSizeInBytes;
}
private boolean startPlayout() {
@@ -433,6 +433,13 @@ public class WebRtcAudioTrack {
}
}
+ private int getBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= 23) {
+ return audioTrack.getBufferSizeInFrames();
+ }
+ return -1;
+ }
+
private void logBufferCapacityInFrames() {
if (Build.VERSION.SDK_INT >= 24) {
Logging.d(TAG,
diff --git a/modules/audio_device/audio_device_buffer.cc b/modules/audio_device/audio_device_buffer.cc
index 336846ed11..8d3637308a 100644
--- a/modules/audio_device/audio_device_buffer.cc
+++ b/modules/audio_device/audio_device_buffer.cc
@@ -386,7 +386,7 @@ void AudioDeviceBuffer::LogStats(LogState state) {
Stats stats;
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
stats = stats_;
stats_.max_rec_level = 0;
stats_.max_play_level = 0;
@@ -468,20 +468,20 @@ void AudioDeviceBuffer::LogStats(LogState state) {
void AudioDeviceBuffer::ResetRecStats() {
RTC_DCHECK_RUN_ON(&task_queue_);
last_stats_.ResetRecStats();
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
stats_.ResetRecStats();
}
void AudioDeviceBuffer::ResetPlayStats() {
RTC_DCHECK_RUN_ON(&task_queue_);
last_stats_.ResetPlayStats();
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
stats_.ResetPlayStats();
}
void AudioDeviceBuffer::UpdateRecStats(int16_t max_abs,
size_t samples_per_channel) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
++stats_.rec_callbacks;
stats_.rec_samples += samples_per_channel;
if (max_abs > stats_.max_rec_level) {
@@ -491,7 +491,7 @@ void AudioDeviceBuffer::UpdateRecStats(int16_t max_abs,
void AudioDeviceBuffer::UpdatePlayStats(int16_t max_abs,
size_t samples_per_channel) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
++stats_.play_callbacks;
stats_.play_samples += samples_per_channel;
if (max_abs > stats_.max_play_level) {
diff --git a/modules/audio_device/audio_device_buffer.h b/modules/audio_device/audio_device_buffer.h
index 579cf53603..37b8a2ec5e 100644
--- a/modules/audio_device/audio_device_buffer.h
+++ b/modules/audio_device/audio_device_buffer.h
@@ -19,7 +19,7 @@
#include "api/task_queue/task_queue_factory.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "rtc_base/buffer.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
@@ -142,7 +142,7 @@ class AudioDeviceBuffer {
// Main thread on which this object is created.
rtc::ThreadChecker main_thread_checker_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
// Task queue used to invoke LogStats() periodically. Tasks are executed on a
// worker thread but it does not necessarily have to be the same thread for
diff --git a/modules/audio_device/audio_device_unittest.cc b/modules/audio_device/audio_device_unittest.cc
index e896453640..c8cb438158 100644
--- a/modules/audio_device/audio_device_unittest.cc
+++ b/modules/audio_device/audio_device_unittest.cc
@@ -25,11 +25,11 @@
#include "modules/audio_device/include/mock_audio_transport.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/buffer.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/race_checker.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/time_utils.h"
@@ -39,6 +39,7 @@
#ifdef WEBRTC_WIN
#include "modules/audio_device/include/audio_device_factory.h"
#include "modules/audio_device/win/core_audio_utility_win.h"
+
#endif
using ::testing::_;
@@ -137,7 +138,7 @@ class FifoAudioStream : public AudioStream {
void Write(rtc::ArrayView<const int16_t> source) override {
RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
const size_t size = [&] {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
fifo_.push_back(Buffer16(source.data(), source.size()));
return fifo_.size();
}();
@@ -152,7 +153,7 @@ class FifoAudioStream : public AudioStream {
}
void Read(rtc::ArrayView<int16_t> destination) override {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (fifo_.empty()) {
std::fill(destination.begin(), destination.end(), 0);
} else {
@@ -183,7 +184,7 @@ class FifoAudioStream : public AudioStream {
}
size_t size() const {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
return fifo_.size();
}
@@ -199,7 +200,7 @@ class FifoAudioStream : public AudioStream {
using Buffer16 = rtc::BufferT<int16_t>;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
rtc::RaceChecker race_checker_;
std::list<Buffer16> fifo_ RTC_GUARDED_BY(lock_);
@@ -230,7 +231,7 @@ class LatencyAudioStream : public AudioStream {
if (read_count_ % (kNumCallbacksPerSecond / kImpulseFrequencyInHz) == 0) {
PRINT(".");
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (!pulse_time_) {
pulse_time_ = rtc::TimeMillis();
}
@@ -245,7 +246,7 @@ class LatencyAudioStream : public AudioStream {
void Write(rtc::ArrayView<const int16_t> source) override {
RTC_DCHECK_RUN_ON(&write_thread_checker_);
RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
write_count_++;
if (!pulse_time_) {
// Avoid detection of new impulse response until a new impulse has
@@ -315,7 +316,7 @@ class LatencyAudioStream : public AudioStream {
max_latency(), average_latency());
}
- rtc::CriticalSection lock_;
+ Mutex lock_;
rtc::RaceChecker race_checker_;
rtc::ThreadChecker read_thread_checker_;
rtc::ThreadChecker write_thread_checker_;
@@ -390,7 +391,7 @@ class MockAudioTransport : public test::MockAudioTransport {
record_parameters_.frames_per_10ms_buffer());
}
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
rec_count_++;
}
// Write audio data to audio stream object if one has been injected.
@@ -430,7 +431,7 @@ class MockAudioTransport : public test::MockAudioTransport {
playout_parameters_.frames_per_10ms_buffer());
}
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
play_count_++;
}
samples_out = samples_per_channel * channels;
@@ -453,14 +454,14 @@ class MockAudioTransport : public test::MockAudioTransport {
bool ReceivedEnoughCallbacks() {
bool recording_done = false;
if (rec_mode()) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
recording_done = rec_count_ >= num_callbacks_;
} else {
recording_done = true;
}
bool playout_done = false;
if (play_mode()) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
playout_done = play_count_ >= num_callbacks_;
} else {
playout_done = true;
@@ -479,7 +480,7 @@ class MockAudioTransport : public test::MockAudioTransport {
}
void ResetCallbackCounters() {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (play_mode()) {
play_count_ = 0;
}
@@ -489,7 +490,7 @@ class MockAudioTransport : public test::MockAudioTransport {
}
private:
- rtc::CriticalSection lock_;
+ Mutex lock_;
TransportType type_ = TransportType::kInvalid;
rtc::Event* event_ = nullptr;
AudioStream* audio_stream_ = nullptr;
diff --git a/modules/audio_device/dummy/file_audio_device.cc b/modules/audio_device/dummy/file_audio_device.cc
index 60ff9907bf..c68e7bba1a 100644
--- a/modules/audio_device/dummy/file_audio_device.cc
+++ b/modules/audio_device/dummy/file_audio_device.cc
@@ -139,7 +139,7 @@ int32_t FileAudioDevice::PlayoutIsAvailable(bool& available) {
}
int32_t FileAudioDevice::InitPlayout() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_playing) {
return -1;
@@ -169,7 +169,7 @@ int32_t FileAudioDevice::RecordingIsAvailable(bool& available) {
}
int32_t FileAudioDevice::InitRecording() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_recording) {
return -1;
@@ -228,7 +228,7 @@ int32_t FileAudioDevice::StartPlayout() {
int32_t FileAudioDevice::StopPlayout() {
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_playing = false;
}
@@ -238,7 +238,7 @@ int32_t FileAudioDevice::StopPlayout() {
_ptrThreadPlay.reset();
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_playoutFramesLeft = 0;
delete[] _playoutBuffer;
@@ -289,7 +289,7 @@ int32_t FileAudioDevice::StartRecording() {
int32_t FileAudioDevice::StopRecording() {
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_recording = false;
}
@@ -298,7 +298,7 @@ int32_t FileAudioDevice::StopRecording() {
_ptrThreadRec.reset();
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_recordingFramesLeft = 0;
if (_recordingBuffer) {
delete[] _recordingBuffer;
@@ -426,7 +426,7 @@ int32_t FileAudioDevice::PlayoutDelay(uint16_t& delayMS) const {
}
void FileAudioDevice::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_ptrAudioBuffer = audioBuffer;
@@ -456,13 +456,13 @@ bool FileAudioDevice::PlayThreadProcess() {
return false;
}
int64_t currentTime = rtc::TimeMillis();
- _critSect.Enter();
+ mutex_.Lock();
if (_lastCallPlayoutMillis == 0 ||
currentTime - _lastCallPlayoutMillis >= 10) {
- _critSect.Leave();
+ mutex_.Unlock();
_ptrAudioBuffer->RequestPlayoutData(_playoutFramesIn10MS);
- _critSect.Enter();
+ mutex_.Lock();
_playoutFramesLeft = _ptrAudioBuffer->GetPlayoutData(_playoutBuffer);
RTC_DCHECK_EQ(_playoutFramesIn10MS, _playoutFramesLeft);
@@ -472,7 +472,7 @@ bool FileAudioDevice::PlayThreadProcess() {
_lastCallPlayoutMillis = currentTime;
}
_playoutFramesLeft = 0;
- _critSect.Leave();
+ mutex_.Unlock();
int64_t deltaTimeMillis = rtc::TimeMillis() - currentTime;
if (deltaTimeMillis < 10) {
@@ -488,7 +488,7 @@ bool FileAudioDevice::RecThreadProcess() {
}
int64_t currentTime = rtc::TimeMillis();
- _critSect.Enter();
+ mutex_.Lock();
if (_lastCallRecordMillis == 0 || currentTime - _lastCallRecordMillis >= 10) {
if (_inputFile.is_open()) {
@@ -499,13 +499,13 @@ bool FileAudioDevice::RecThreadProcess() {
_inputFile.Rewind();
}
_lastCallRecordMillis = currentTime;
- _critSect.Leave();
+ mutex_.Unlock();
_ptrAudioBuffer->DeliverRecordedData();
- _critSect.Enter();
+ mutex_.Lock();
}
}
- _critSect.Leave();
+ mutex_.Unlock();
int64_t deltaTimeMillis = rtc::TimeMillis() - currentTime;
if (deltaTimeMillis < 10) {
diff --git a/modules/audio_device/dummy/file_audio_device.h b/modules/audio_device/dummy/file_audio_device.h
index 719d9a3782..ecb3f2f533 100644
--- a/modules/audio_device/dummy/file_audio_device.h
+++ b/modules/audio_device/dummy/file_audio_device.h
@@ -17,7 +17,7 @@
#include <string>
#include "modules/audio_device/audio_device_generic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/file_wrapper.h"
#include "rtc_base/time_utils.h"
@@ -139,7 +139,7 @@ class FileAudioDevice : public AudioDeviceGeneric {
int8_t* _playoutBuffer; // In bytes.
uint32_t _recordingFramesLeft;
uint32_t _playoutFramesLeft;
- rtc::CriticalSection _critSect;
+ Mutex mutex_;
size_t _recordingBufferSizeIn10MS;
size_t _recordingFramesIn10MS;
diff --git a/modules/audio_device/include/mock_audio_device.h b/modules/audio_device/include/mock_audio_device.h
index a05e64e6c9..0ca19de156 100644
--- a/modules/audio_device/include/mock_audio_device.h
+++ b/modules/audio_device/include/mock_audio_device.h
@@ -32,76 +32,123 @@ class MockAudioDeviceModule : public AudioDeviceModule {
}
// AudioDeviceModule.
- MOCK_CONST_METHOD1(ActiveAudioLayer, int32_t(AudioLayer* audioLayer));
- MOCK_METHOD1(RegisterAudioCallback, int32_t(AudioTransport* audioCallback));
- MOCK_METHOD0(Init, int32_t());
- MOCK_METHOD0(Terminate, int32_t());
- MOCK_CONST_METHOD0(Initialized, bool());
- MOCK_METHOD0(PlayoutDevices, int16_t());
- MOCK_METHOD0(RecordingDevices, int16_t());
- MOCK_METHOD3(PlayoutDeviceName,
- int32_t(uint16_t index,
- char name[kAdmMaxDeviceNameSize],
- char guid[kAdmMaxGuidSize]));
- MOCK_METHOD3(RecordingDeviceName,
- int32_t(uint16_t index,
- char name[kAdmMaxDeviceNameSize],
- char guid[kAdmMaxGuidSize]));
- MOCK_METHOD1(SetPlayoutDevice, int32_t(uint16_t index));
- MOCK_METHOD1(SetPlayoutDevice, int32_t(WindowsDeviceType device));
- MOCK_METHOD1(SetRecordingDevice, int32_t(uint16_t index));
- MOCK_METHOD1(SetRecordingDevice, int32_t(WindowsDeviceType device));
- MOCK_METHOD1(PlayoutIsAvailable, int32_t(bool* available));
- MOCK_METHOD0(InitPlayout, int32_t());
- MOCK_CONST_METHOD0(PlayoutIsInitialized, bool());
- MOCK_METHOD1(RecordingIsAvailable, int32_t(bool* available));
- MOCK_METHOD0(InitRecording, int32_t());
- MOCK_CONST_METHOD0(RecordingIsInitialized, bool());
- MOCK_METHOD0(StartPlayout, int32_t());
- MOCK_METHOD0(StopPlayout, int32_t());
- MOCK_CONST_METHOD0(Playing, bool());
- MOCK_METHOD0(StartRecording, int32_t());
- MOCK_METHOD0(StopRecording, int32_t());
- MOCK_CONST_METHOD0(Recording, bool());
- MOCK_METHOD1(SetAGC, int32_t(bool enable));
- MOCK_CONST_METHOD0(AGC, bool());
- MOCK_METHOD0(InitSpeaker, int32_t());
- MOCK_CONST_METHOD0(SpeakerIsInitialized, bool());
- MOCK_METHOD0(InitMicrophone, int32_t());
- MOCK_CONST_METHOD0(MicrophoneIsInitialized, bool());
- MOCK_METHOD1(SpeakerVolumeIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetSpeakerVolume, int32_t(uint32_t volume));
- MOCK_CONST_METHOD1(SpeakerVolume, int32_t(uint32_t* volume));
- MOCK_CONST_METHOD1(MaxSpeakerVolume, int32_t(uint32_t* maxVolume));
- MOCK_CONST_METHOD1(MinSpeakerVolume, int32_t(uint32_t* minVolume));
- MOCK_METHOD1(MicrophoneVolumeIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetMicrophoneVolume, int32_t(uint32_t volume));
- MOCK_CONST_METHOD1(MicrophoneVolume, int32_t(uint32_t* volume));
- MOCK_CONST_METHOD1(MaxMicrophoneVolume, int32_t(uint32_t* maxVolume));
- MOCK_CONST_METHOD1(MinMicrophoneVolume, int32_t(uint32_t* minVolume));
- MOCK_METHOD1(SpeakerMuteIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetSpeakerMute, int32_t(bool enable));
- MOCK_CONST_METHOD1(SpeakerMute, int32_t(bool* enabled));
- MOCK_METHOD1(MicrophoneMuteIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetMicrophoneMute, int32_t(bool enable));
- MOCK_CONST_METHOD1(MicrophoneMute, int32_t(bool* enabled));
- MOCK_CONST_METHOD1(StereoPlayoutIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetStereoPlayout, int32_t(bool enable));
- MOCK_CONST_METHOD1(StereoPlayout, int32_t(bool* enabled));
- MOCK_CONST_METHOD1(StereoRecordingIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetStereoRecording, int32_t(bool enable));
- MOCK_CONST_METHOD1(StereoRecording, int32_t(bool* enabled));
- MOCK_CONST_METHOD1(PlayoutDelay, int32_t(uint16_t* delayMS));
- MOCK_CONST_METHOD0(BuiltInAECIsAvailable, bool());
- MOCK_CONST_METHOD0(BuiltInAGCIsAvailable, bool());
- MOCK_CONST_METHOD0(BuiltInNSIsAvailable, bool());
- MOCK_METHOD1(EnableBuiltInAEC, int32_t(bool enable));
- MOCK_METHOD1(EnableBuiltInAGC, int32_t(bool enable));
- MOCK_METHOD1(EnableBuiltInNS, int32_t(bool enable));
- MOCK_CONST_METHOD0(GetPlayoutUnderrunCount, int32_t());
+ MOCK_METHOD(int32_t,
+ ActiveAudioLayer,
+ (AudioLayer * audioLayer),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ RegisterAudioCallback,
+ (AudioTransport * audioCallback),
+ (override));
+ MOCK_METHOD(int32_t, Init, (), (override));
+ MOCK_METHOD(int32_t, Terminate, (), (override));
+ MOCK_METHOD(bool, Initialized, (), (const, override));
+ MOCK_METHOD(int16_t, PlayoutDevices, (), (override));
+ MOCK_METHOD(int16_t, RecordingDevices, (), (override));
+ MOCK_METHOD(int32_t,
+ PlayoutDeviceName,
+ (uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]),
+ (override));
+ MOCK_METHOD(int32_t,
+ RecordingDeviceName,
+ (uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]),
+ (override));
+ MOCK_METHOD(int32_t, SetPlayoutDevice, (uint16_t index), (override));
+ MOCK_METHOD(int32_t,
+ SetPlayoutDevice,
+ (WindowsDeviceType device),
+ (override));
+ MOCK_METHOD(int32_t, SetRecordingDevice, (uint16_t index), (override));
+ MOCK_METHOD(int32_t,
+ SetRecordingDevice,
+ (WindowsDeviceType device),
+ (override));
+ MOCK_METHOD(int32_t, PlayoutIsAvailable, (bool* available), (override));
+ MOCK_METHOD(int32_t, InitPlayout, (), (override));
+ MOCK_METHOD(bool, PlayoutIsInitialized, (), (const, override));
+ MOCK_METHOD(int32_t, RecordingIsAvailable, (bool* available), (override));
+ MOCK_METHOD(int32_t, InitRecording, (), (override));
+ MOCK_METHOD(bool, RecordingIsInitialized, (), (const, override));
+ MOCK_METHOD(int32_t, StartPlayout, (), (override));
+ MOCK_METHOD(int32_t, StopPlayout, (), (override));
+ MOCK_METHOD(bool, Playing, (), (const, override));
+ MOCK_METHOD(int32_t, StartRecording, (), (override));
+ MOCK_METHOD(int32_t, StopRecording, (), (override));
+ MOCK_METHOD(bool, Recording, (), (const, override));
+ MOCK_METHOD(int32_t, InitSpeaker, (), (override));
+ MOCK_METHOD(bool, SpeakerIsInitialized, (), (const, override));
+ MOCK_METHOD(int32_t, InitMicrophone, (), (override));
+ MOCK_METHOD(bool, MicrophoneIsInitialized, (), (const, override));
+ MOCK_METHOD(int32_t, SpeakerVolumeIsAvailable, (bool* available), (override));
+ MOCK_METHOD(int32_t, SetSpeakerVolume, (uint32_t volume), (override));
+ MOCK_METHOD(int32_t, SpeakerVolume, (uint32_t * volume), (const, override));
+ MOCK_METHOD(int32_t,
+ MaxSpeakerVolume,
+ (uint32_t * maxVolume),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ MinSpeakerVolume,
+ (uint32_t * minVolume),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ MicrophoneVolumeIsAvailable,
+ (bool* available),
+ (override));
+ MOCK_METHOD(int32_t, SetMicrophoneVolume, (uint32_t volume), (override));
+ MOCK_METHOD(int32_t,
+ MicrophoneVolume,
+ (uint32_t * volume),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ MaxMicrophoneVolume,
+ (uint32_t * maxVolume),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ MinMicrophoneVolume,
+ (uint32_t * minVolume),
+ (const, override));
+ MOCK_METHOD(int32_t, SpeakerMuteIsAvailable, (bool* available), (override));
+ MOCK_METHOD(int32_t, SetSpeakerMute, (bool enable), (override));
+ MOCK_METHOD(int32_t, SpeakerMute, (bool* enabled), (const, override));
+ MOCK_METHOD(int32_t,
+ MicrophoneMuteIsAvailable,
+ (bool* available),
+ (override));
+ MOCK_METHOD(int32_t, SetMicrophoneMute, (bool enable), (override));
+ MOCK_METHOD(int32_t, MicrophoneMute, (bool* enabled), (const, override));
+ MOCK_METHOD(int32_t,
+ StereoPlayoutIsAvailable,
+ (bool* available),
+ (const, override));
+ MOCK_METHOD(int32_t, SetStereoPlayout, (bool enable), (override));
+ MOCK_METHOD(int32_t, StereoPlayout, (bool* enabled), (const, override));
+ MOCK_METHOD(int32_t,
+ StereoRecordingIsAvailable,
+ (bool* available),
+ (const, override));
+ MOCK_METHOD(int32_t, SetStereoRecording, (bool enable), (override));
+ MOCK_METHOD(int32_t, StereoRecording, (bool* enabled), (const, override));
+ MOCK_METHOD(int32_t, PlayoutDelay, (uint16_t * delayMS), (const, override));
+ MOCK_METHOD(bool, BuiltInAECIsAvailable, (), (const, override));
+ MOCK_METHOD(bool, BuiltInAGCIsAvailable, (), (const, override));
+ MOCK_METHOD(bool, BuiltInNSIsAvailable, (), (const, override));
+ MOCK_METHOD(int32_t, EnableBuiltInAEC, (bool enable), (override));
+ MOCK_METHOD(int32_t, EnableBuiltInAGC, (bool enable), (override));
+ MOCK_METHOD(int32_t, EnableBuiltInNS, (bool enable), (override));
+ MOCK_METHOD(int32_t, GetPlayoutUnderrunCount, (), (const, override));
#if defined(WEBRTC_IOS)
- MOCK_CONST_METHOD1(GetPlayoutAudioParameters, int(AudioParameters* params));
- MOCK_CONST_METHOD1(GetRecordAudioParameters, int(AudioParameters* params));
+ MOCK_METHOD(int,
+ GetPlayoutAudioParameters,
+ (AudioParameters * params),
+ (const, override));
+ MOCK_METHOD(int,
+ GetRecordAudioParameters,
+ (AudioParameters * params),
+ (const, override));
#endif // WEBRTC_IOS
};
} // namespace test
diff --git a/modules/audio_device/include/mock_audio_transport.h b/modules/audio_device/include/mock_audio_transport.h
index ebdfbc7015..8f71a2d71f 100644
--- a/modules/audio_device/include/mock_audio_transport.h
+++ b/modules/audio_device/include/mock_audio_transport.h
@@ -22,36 +22,42 @@ class MockAudioTransport : public AudioTransport {
MockAudioTransport() {}
~MockAudioTransport() {}
- MOCK_METHOD10(RecordedDataIsAvailable,
- int32_t(const void* audioSamples,
- const size_t nSamples,
- const size_t nBytesPerSample,
- const size_t nChannels,
- const uint32_t samplesPerSec,
- const uint32_t totalDelayMS,
- const int32_t clockDrift,
- const uint32_t currentMicLevel,
- const bool keyPressed,
- uint32_t& newMicLevel));
-
- MOCK_METHOD8(NeedMorePlayData,
- int32_t(const size_t nSamples,
- const size_t nBytesPerSample,
- const size_t nChannels,
- const uint32_t samplesPerSec,
- void* audioSamples,
- size_t& nSamplesOut,
- int64_t* elapsed_time_ms,
- int64_t* ntp_time_ms));
-
- MOCK_METHOD7(PullRenderData,
- void(int bits_per_sample,
- int sample_rate,
- size_t number_of_channels,
- size_t number_of_frames,
- void* audio_data,
- int64_t* elapsed_time_ms,
- int64_t* ntp_time_ms));
+ MOCK_METHOD(int32_t,
+ RecordedDataIsAvailable,
+ (const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel),
+ (override));
+
+ MOCK_METHOD(int32_t,
+ NeedMorePlayData,
+ (const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms),
+ (override));
+
+ MOCK_METHOD(void,
+ PullRenderData,
+ (int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms),
+ (override));
};
} // namespace test
diff --git a/modules/audio_device/include/test_audio_device.cc b/modules/audio_device/include/test_audio_device.cc
index 96d4958706..46bf216540 100644
--- a/modules/audio_device/include/test_audio_device.cc
+++ b/modules/audio_device/include/test_audio_device.cc
@@ -23,13 +23,13 @@
#include "modules/audio_device/include/audio_device_default.h"
#include "rtc_base/buffer.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/random.h"
#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/thread_annotations.h"
@@ -99,45 +99,45 @@ class TestAudioDeviceModuleImpl
}
int32_t RegisterAudioCallback(AudioTransport* callback) override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
RTC_DCHECK(callback || audio_callback_);
audio_callback_ = callback;
return 0;
}
int32_t StartPlayout() override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
RTC_CHECK(renderer_);
rendering_ = true;
return 0;
}
int32_t StopPlayout() override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
rendering_ = false;
return 0;
}
int32_t StartRecording() override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
RTC_CHECK(capturer_);
capturing_ = true;
return 0;
}
int32_t StopRecording() override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
capturing_ = false;
return 0;
}
bool Playing() const override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
return rendering_;
}
bool Recording() const override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
return capturing_;
}
@@ -155,7 +155,7 @@ class TestAudioDeviceModuleImpl
private:
void ProcessAudio() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (capturing_) {
// Capture 10ms of audio. 2 bytes per sample.
const bool keep_capturing = capturer_->Capture(&recording_buffer_);
@@ -194,7 +194,7 @@ class TestAudioDeviceModuleImpl
const std::unique_ptr<Renderer> renderer_ RTC_GUARDED_BY(lock_);
const int64_t process_interval_us_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
AudioTransport* audio_callback_ RTC_GUARDED_BY(lock_);
bool rendering_ RTC_GUARDED_BY(lock_);
bool capturing_ RTC_GUARDED_BY(lock_);
@@ -231,7 +231,7 @@ class PulsedNoiseCapturerImpl final
fill_with_zero_ = !fill_with_zero_;
int16_t max_amplitude;
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
max_amplitude = max_amplitude_;
}
buffer->SetData(
@@ -251,7 +251,7 @@ class PulsedNoiseCapturerImpl final
}
void SetMaxAmplitude(int16_t amplitude) override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
max_amplitude_ = amplitude;
}
@@ -259,7 +259,7 @@ class PulsedNoiseCapturerImpl final
int sampling_frequency_in_hz_;
bool fill_with_zero_;
Random random_generator_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
int16_t max_amplitude_ RTC_GUARDED_BY(lock_);
const int num_channels_;
};
diff --git a/modules/audio_device/linux/audio_device_alsa_linux.cc b/modules/audio_device/linux/audio_device_alsa_linux.cc
index fe5c62f33a..5fac1bcacd 100644
--- a/modules/audio_device/linux/audio_device_alsa_linux.cc
+++ b/modules/audio_device/linux/audio_device_alsa_linux.cc
@@ -122,7 +122,7 @@ AudioDeviceLinuxALSA::~AudioDeviceLinuxALSA() {
}
void AudioDeviceLinuxALSA::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_ptrAudioBuffer = audioBuffer;
@@ -142,7 +142,7 @@ int32_t AudioDeviceLinuxALSA::ActiveAudioLayer(
}
AudioDeviceGeneric::InitStatus AudioDeviceLinuxALSA::Init() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
// Load libasound
if (!GetAlsaSymbolTable()->Load()) {
@@ -173,30 +173,30 @@ int32_t AudioDeviceLinuxALSA::Terminate() {
return 0;
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_mixerManager.Close();
// RECORDING
if (_ptrThreadRec) {
rtc::PlatformThread* tmpThread = _ptrThreadRec.release();
- _critSect.Leave();
+ mutex_.Unlock();
tmpThread->Stop();
delete tmpThread;
- _critSect.Enter();
+ mutex_.Lock();
}
// PLAYOUT
if (_ptrThreadPlay) {
rtc::PlatformThread* tmpThread = _ptrThreadPlay.release();
- _critSect.Leave();
+ mutex_.Unlock();
tmpThread->Stop();
delete tmpThread;
- _critSect.Enter();
+ mutex_.Lock();
}
#if defined(WEBRTC_USE_X11)
if (_XDisplay) {
@@ -216,7 +216,7 @@ bool AudioDeviceLinuxALSA::Initialized() const {
}
int32_t AudioDeviceLinuxALSA::InitSpeaker() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_playing) {
return -1;
@@ -228,7 +228,7 @@ int32_t AudioDeviceLinuxALSA::InitSpeaker() {
}
int32_t AudioDeviceLinuxALSA::InitMicrophone() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_recording) {
return -1;
@@ -404,7 +404,7 @@ int32_t AudioDeviceLinuxALSA::MicrophoneMute(bool& enabled) const {
}
int32_t AudioDeviceLinuxALSA::StereoRecordingIsAvailable(bool& available) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
// If we already have initialized in stereo it's obviously available
if (_recIsInitialized && (2 == _recChannels)) {
@@ -464,7 +464,7 @@ int32_t AudioDeviceLinuxALSA::StereoRecording(bool& enabled) const {
}
int32_t AudioDeviceLinuxALSA::StereoPlayoutIsAvailable(bool& available) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
// If we already have initialized in stereo it's obviously available
if (_playIsInitialized && (2 == _playChannels)) {
@@ -747,7 +747,7 @@ int32_t AudioDeviceLinuxALSA::RecordingIsAvailable(bool& available) {
int32_t AudioDeviceLinuxALSA::InitPlayout() {
int errVal = 0;
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_playing) {
return -1;
}
@@ -866,7 +866,7 @@ int32_t AudioDeviceLinuxALSA::InitPlayout() {
int32_t AudioDeviceLinuxALSA::InitRecording() {
int errVal = 0;
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_recording) {
return -1;
@@ -1059,7 +1059,7 @@ int32_t AudioDeviceLinuxALSA::StartRecording() {
int32_t AudioDeviceLinuxALSA::StopRecording() {
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_recIsInitialized) {
return 0;
@@ -1079,7 +1079,7 @@ int32_t AudioDeviceLinuxALSA::StopRecording() {
_ptrThreadRec.reset();
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_recordingFramesLeft = 0;
if (_recordingBuffer) {
delete[] _recordingBuffer;
@@ -1163,7 +1163,7 @@ int32_t AudioDeviceLinuxALSA::StartPlayout() {
int32_t AudioDeviceLinuxALSA::StopPlayout() {
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_playIsInitialized) {
return 0;
@@ -1182,7 +1182,7 @@ int32_t AudioDeviceLinuxALSA::StopPlayout() {
_ptrThreadPlay.reset();
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_playoutFramesLeft = 0;
delete[] _playoutBuffer;
diff --git a/modules/audio_device/linux/audio_device_alsa_linux.h b/modules/audio_device/linux/audio_device_alsa_linux.h
index 4eb97afecc..0e0b7919ba 100644
--- a/modules/audio_device/linux/audio_device_alsa_linux.h
+++ b/modules/audio_device/linux/audio_device_alsa_linux.h
@@ -15,8 +15,8 @@
#include "modules/audio_device/audio_device_generic.h"
#include "modules/audio_device/linux/audio_mixer_manager_alsa_linux.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
#if defined(WEBRTC_USE_X11)
#include <X11/Xlib.h>
@@ -131,8 +131,8 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric {
bool KeyPressed() const;
- void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION(_critSect) { _critSect.Enter(); }
- void UnLock() RTC_UNLOCK_FUNCTION(_critSect) { _critSect.Leave(); }
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION(mutex_) { mutex_.Lock(); }
+ void UnLock() RTC_UNLOCK_FUNCTION(mutex_) { mutex_.Unlock(); }
inline int32_t InputSanityCheckAfterUnlockedPeriod() const;
inline int32_t OutputSanityCheckAfterUnlockedPeriod() const;
@@ -144,7 +144,7 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric {
AudioDeviceBuffer* _ptrAudioBuffer;
- rtc::CriticalSection _critSect;
+ Mutex mutex_;
// TODO(pbos): Make plain members and start/stop instead of resetting these
// pointers. A thread can be reused.
diff --git a/modules/audio_device/linux/audio_device_pulse_linux.cc b/modules/audio_device/linux/audio_device_pulse_linux.cc
index 9faff1d021..9a7d1d0ca3 100644
--- a/modules/audio_device/linux/audio_device_pulse_linux.cc
+++ b/modules/audio_device/linux/audio_device_pulse_linux.cc
@@ -181,7 +181,7 @@ int32_t AudioDeviceLinuxPulse::Terminate() {
return 0;
}
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
quit_ = true;
}
_mixerManager.Close();
@@ -872,7 +872,7 @@ int32_t AudioDeviceLinuxPulse::InitPlayout() {
// Create a new play stream
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_playStream =
LATE(pa_stream_new)(_paContext, "playStream", &playSampleSpec, NULL);
}
@@ -945,7 +945,7 @@ int32_t AudioDeviceLinuxPulse::InitPlayout() {
// Mark playout side as initialized
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_playIsInitialized = true;
_sndCardPlayDelay = 0;
}
@@ -1066,7 +1066,7 @@ int32_t AudioDeviceLinuxPulse::StartRecording() {
_timeEventRec.Set();
if (!_recStartEvent.Wait(10000)) {
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_startRec = false;
}
StopRecording();
@@ -1075,7 +1075,7 @@ int32_t AudioDeviceLinuxPulse::StartRecording() {
}
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_recording) {
// The recording state is set by the audio thread after recording
// has started.
@@ -1090,7 +1090,7 @@ int32_t AudioDeviceLinuxPulse::StartRecording() {
int32_t AudioDeviceLinuxPulse::StopRecording() {
RTC_DCHECK(thread_checker_.IsCurrent());
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_recIsInitialized) {
return 0;
@@ -1170,7 +1170,7 @@ int32_t AudioDeviceLinuxPulse::StartPlayout() {
// Set state to ensure that playout starts from the audio thread.
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_startPlay = true;
}
@@ -1181,7 +1181,7 @@ int32_t AudioDeviceLinuxPulse::StartPlayout() {
_timeEventPlay.Set();
if (!_playStartEvent.Wait(10000)) {
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_startPlay = false;
}
StopPlayout();
@@ -1190,7 +1190,7 @@ int32_t AudioDeviceLinuxPulse::StartPlayout() {
}
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_playing) {
// The playing state is set by the audio thread after playout
// has started.
@@ -1205,7 +1205,7 @@ int32_t AudioDeviceLinuxPulse::StartPlayout() {
int32_t AudioDeviceLinuxPulse::StopPlayout() {
RTC_DCHECK(thread_checker_.IsCurrent());
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_playIsInitialized) {
return 0;
@@ -1259,7 +1259,7 @@ int32_t AudioDeviceLinuxPulse::StopPlayout() {
}
int32_t AudioDeviceLinuxPulse::PlayoutDelay(uint16_t& delayMS) const {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
delayMS = (uint16_t)_sndCardPlayDelay;
return 0;
}
@@ -1885,7 +1885,7 @@ int32_t AudioDeviceLinuxPulse::LatencyUsecs(pa_stream* stream) {
int32_t AudioDeviceLinuxPulse::ReadRecordedData(const void* bufferData,
size_t bufferSize)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(_critSect) {
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) {
size_t size = bufferSize;
uint32_t numRecSamples = _recordBufferSize / (2 * _recChannels);
@@ -1953,7 +1953,7 @@ int32_t AudioDeviceLinuxPulse::ReadRecordedData(const void* bufferData,
int32_t AudioDeviceLinuxPulse::ProcessRecordedData(int8_t* bufferData,
uint32_t bufferSizeInSamples,
uint32_t recDelay)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(_critSect) {
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) {
_ptrAudioBuffer->SetRecordedBuffer(bufferData, bufferSizeInSamples);
// TODO(andrew): this is a temporary hack, to avoid non-causal far- and
@@ -1998,7 +1998,7 @@ bool AudioDeviceLinuxPulse::PlayThreadProcess() {
return true;
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (quit_) {
return false;
@@ -2170,7 +2170,7 @@ bool AudioDeviceLinuxPulse::RecThreadProcess() {
return true;
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (quit_) {
return false;
}
diff --git a/modules/audio_device/linux/audio_device_pulse_linux.h b/modules/audio_device/linux/audio_device_pulse_linux.h
index 830f15f706..f05ba1ebf1 100644
--- a/modules/audio_device/linux/audio_device_pulse_linux.h
+++ b/modules/audio_device/linux/audio_device_pulse_linux.h
@@ -19,9 +19,9 @@
#include "modules/audio_device/include/audio_device_defines.h"
#include "modules/audio_device/linux/audio_mixer_manager_pulse_linux.h"
#include "modules/audio_device/linux/pulseaudiosymboltable_linux.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
@@ -197,8 +197,8 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric {
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
private:
- void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION(_critSect) { _critSect.Enter(); }
- void UnLock() RTC_UNLOCK_FUNCTION(_critSect) { _critSect.Leave(); }
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION(mutex_) { mutex_.Lock(); }
+ void UnLock() RTC_UNLOCK_FUNCTION(mutex_) { mutex_.Unlock(); }
void WaitForOperationCompletion(pa_operation* paOperation) const;
void WaitForSuccess(pa_operation* paOperation) const;
@@ -261,7 +261,7 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric {
AudioDeviceBuffer* _ptrAudioBuffer;
- rtc::CriticalSection _critSect;
+ mutable Mutex mutex_;
rtc::Event _timeEventRec;
rtc::Event _timeEventPlay;
rtc::Event _recStartEvent;
@@ -296,9 +296,9 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric {
bool _startRec;
bool _startPlay;
bool update_speaker_volume_at_startup_;
- bool quit_ RTC_GUARDED_BY(&_critSect);
+ bool quit_ RTC_GUARDED_BY(&mutex_);
- uint32_t _sndCardPlayDelay RTC_GUARDED_BY(&_critSect);
+ uint32_t _sndCardPlayDelay RTC_GUARDED_BY(&mutex_);
int32_t _writeErrors;
diff --git a/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc b/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc
index 4368ec96cc..028be5db6b 100644
--- a/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc
+++ b/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc
@@ -45,7 +45,7 @@ AudioMixerManagerLinuxALSA::~AudioMixerManagerLinuxALSA() {
int32_t AudioMixerManagerLinuxALSA::Close() {
RTC_LOG(LS_VERBOSE) << __FUNCTION__;
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
CloseSpeaker();
CloseMicrophone();
@@ -56,7 +56,7 @@ int32_t AudioMixerManagerLinuxALSA::Close() {
int32_t AudioMixerManagerLinuxALSA::CloseSpeaker() {
RTC_LOG(LS_VERBOSE) << __FUNCTION__;
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
int errVal = 0;
@@ -88,7 +88,7 @@ int32_t AudioMixerManagerLinuxALSA::CloseSpeaker() {
int32_t AudioMixerManagerLinuxALSA::CloseMicrophone() {
RTC_LOG(LS_VERBOSE) << __FUNCTION__;
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
int errVal = 0;
@@ -128,7 +128,7 @@ int32_t AudioMixerManagerLinuxALSA::OpenSpeaker(char* deviceName) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerLinuxALSA::OpenSpeaker(name="
<< deviceName << ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
int errVal = 0;
@@ -204,7 +204,7 @@ int32_t AudioMixerManagerLinuxALSA::OpenMicrophone(char* deviceName) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerLinuxALSA::OpenMicrophone(name="
<< deviceName << ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
int errVal = 0;
@@ -298,7 +298,7 @@ int32_t AudioMixerManagerLinuxALSA::SetSpeakerVolume(uint32_t volume) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerLinuxALSA::SetSpeakerVolume(volume="
<< volume << ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_outputMixerElement == NULL) {
RTC_LOG(LS_WARNING) << "no avaliable output mixer element exists";
@@ -501,7 +501,7 @@ int32_t AudioMixerManagerLinuxALSA::SetSpeakerMute(bool enable) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerLinuxALSA::SetSpeakerMute(enable="
<< enable << ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_outputMixerElement == NULL) {
RTC_LOG(LS_WARNING) << "no avaliable output mixer element exists";
@@ -574,7 +574,7 @@ int32_t AudioMixerManagerLinuxALSA::SetMicrophoneMute(bool enable) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerLinuxALSA::SetMicrophoneMute(enable="
<< enable << ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_inputMixerElement == NULL) {
RTC_LOG(LS_WARNING) << "no avaliable input mixer element exists";
@@ -649,7 +649,7 @@ int32_t AudioMixerManagerLinuxALSA::SetMicrophoneVolume(uint32_t volume) {
<< "AudioMixerManagerLinuxALSA::SetMicrophoneVolume(volume=" << volume
<< ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_inputMixerElement == NULL) {
RTC_LOG(LS_WARNING) << "no avaliable input mixer element exists";
diff --git a/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h b/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h
index ca26f6a8c5..61490b4a78 100644
--- a/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h
+++ b/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h
@@ -15,7 +15,7 @@
#include "modules/audio_device/include/audio_device.h"
#include "modules/audio_device/linux/alsasymboltable_linux.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -55,7 +55,7 @@ class AudioMixerManagerLinuxALSA {
void GetControlName(char* controlName, char* deviceName) const;
private:
- rtc::CriticalSection _critSect;
+ Mutex mutex_;
mutable snd_mixer_t* _outputMixerHandle;
char _outputMixerStr[kAdmMaxDeviceNameSize];
mutable snd_mixer_t* _inputMixerHandle;
diff --git a/modules/audio_device/mac/audio_device_mac.cc b/modules/audio_device/mac/audio_device_mac.cc
index e894cf309a..7a35c8e774 100644
--- a/modules/audio_device/mac/audio_device_mac.cc
+++ b/modules/audio_device/mac/audio_device_mac.cc
@@ -206,7 +206,7 @@ AudioDeviceMac::~AudioDeviceMac() {
// ============================================================================
void AudioDeviceMac::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
_ptrAudioBuffer = audioBuffer;
@@ -224,7 +224,7 @@ int32_t AudioDeviceMac::ActiveAudioLayer(
}
AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_initialized) {
return InitStatus::OK;
@@ -350,8 +350,7 @@ int32_t AudioDeviceMac::Terminate() {
return -1;
}
- _critSect.Enter();
-
+ MutexLock lock(&mutex_);
_mixerManager.Close();
OSStatus err = noErr;
@@ -375,8 +374,6 @@ int32_t AudioDeviceMac::Terminate() {
_outputDeviceIsSpecified = false;
_inputDeviceIsSpecified = false;
- _critSect.Leave();
-
return retVal;
}
@@ -409,8 +406,11 @@ int32_t AudioDeviceMac::SpeakerIsAvailable(bool& available) {
}
int32_t AudioDeviceMac::InitSpeaker() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
+ return InitSpeakerLocked();
+}
+int32_t AudioDeviceMac::InitSpeakerLocked() {
if (_playing) {
return -1;
}
@@ -457,8 +457,11 @@ int32_t AudioDeviceMac::MicrophoneIsAvailable(bool& available) {
}
int32_t AudioDeviceMac::InitMicrophone() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
+ return InitMicrophoneLocked();
+}
+int32_t AudioDeviceMac::InitMicrophoneLocked() {
if (_recording) {
return -1;
}
@@ -796,7 +799,7 @@ int16_t AudioDeviceMac::PlayoutDevices() {
}
int32_t AudioDeviceMac::SetPlayoutDevice(uint16_t index) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_playIsInitialized) {
return -1;
@@ -945,7 +948,7 @@ int32_t AudioDeviceMac::RecordingIsAvailable(bool& available) {
int32_t AudioDeviceMac::InitPlayout() {
RTC_LOG(LS_INFO) << "InitPlayout";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_playing) {
return -1;
@@ -960,7 +963,7 @@ int32_t AudioDeviceMac::InitPlayout() {
}
// Initialize the speaker (devices might have been added or removed)
- if (InitSpeaker() == -1) {
+ if (InitSpeakerLocked() == -1) {
RTC_LOG(LS_WARNING) << "InitSpeaker() failed";
}
@@ -1083,7 +1086,7 @@ int32_t AudioDeviceMac::InitPlayout() {
int32_t AudioDeviceMac::InitRecording() {
RTC_LOG(LS_INFO) << "InitRecording";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_recording) {
return -1;
@@ -1098,7 +1101,7 @@ int32_t AudioDeviceMac::InitRecording() {
}
// Initialize the microphone (devices might have been added or removed)
- if (InitMicrophone() == -1) {
+ if (InitMicrophoneLocked() == -1) {
RTC_LOG(LS_WARNING) << "InitMicrophone() failed";
}
@@ -1280,7 +1283,7 @@ int32_t AudioDeviceMac::InitRecording() {
int32_t AudioDeviceMac::StartRecording() {
RTC_LOG(LS_INFO) << "StartRecording";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_recIsInitialized) {
return -1;
@@ -1316,7 +1319,7 @@ int32_t AudioDeviceMac::StartRecording() {
int32_t AudioDeviceMac::StopRecording() {
RTC_LOG(LS_INFO) << "StopRecording";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_recIsInitialized) {
return 0;
@@ -1329,16 +1332,16 @@ int32_t AudioDeviceMac::StopRecording() {
if (_recording) {
_recording = false;
_doStopRec = true; // Signal to io proc to stop audio device
- _critSect.Leave(); // Cannot be under lock, risk of deadlock
+ mutex_.Unlock(); // Cannot be under lock, risk of deadlock
if (!_stopEventRec.Wait(2000)) {
- rtc::CritScope critScoped(&_critSect);
+ MutexLock lockScoped(&mutex_);
RTC_LOG(LS_WARNING) << "Timed out stopping the capture IOProc."
"We may have failed to detect a device removal.";
WEBRTC_CA_LOG_WARN(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
WEBRTC_CA_LOG_WARN(
AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));
}
- _critSect.Enter();
+ mutex_.Lock();
_doStopRec = false;
RTC_LOG(LS_INFO) << "Recording stopped (input device)";
} else if (_recIsInitialized) {
@@ -1357,9 +1360,9 @@ int32_t AudioDeviceMac::StopRecording() {
if (_recording && captureDeviceIsAlive == 1) {
_recording = false;
_doStop = true; // Signal to io proc to stop audio device
- _critSect.Leave(); // Cannot be under lock, risk of deadlock
+ mutex_.Unlock(); // Cannot be under lock, risk of deadlock
if (!_stopEvent.Wait(2000)) {
- rtc::CritScope critScoped(&_critSect);
+ MutexLock lockScoped(&mutex_);
RTC_LOG(LS_WARNING) << "Timed out stopping the shared IOProc."
"We may have failed to detect a device removal.";
// We assume rendering on a shared device has stopped as well if
@@ -1368,7 +1371,7 @@ int32_t AudioDeviceMac::StopRecording() {
WEBRTC_CA_LOG_WARN(
AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
}
- _critSect.Enter();
+ mutex_.Lock();
_doStop = false;
RTC_LOG(LS_INFO) << "Recording stopped (shared device)";
} else if (_recIsInitialized && !_playing && !_playIsInitialized) {
@@ -1382,10 +1385,10 @@ int32_t AudioDeviceMac::StopRecording() {
AtomicSet32(&_captureDeviceIsAlive, 0);
if (capture_worker_thread_.get()) {
- _critSect.Leave();
+ mutex_.Unlock();
capture_worker_thread_->Stop();
capture_worker_thread_.reset();
- _critSect.Enter();
+ mutex_.Lock();
}
WEBRTC_CA_LOG_WARN(AudioConverterDispose(_captureConverter));
@@ -1420,7 +1423,7 @@ bool AudioDeviceMac::PlayoutIsInitialized() const {
int32_t AudioDeviceMac::StartPlayout() {
RTC_LOG(LS_INFO) << "StartPlayout";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_playIsInitialized) {
return -1;
@@ -1446,7 +1449,7 @@ int32_t AudioDeviceMac::StartPlayout() {
int32_t AudioDeviceMac::StopPlayout() {
RTC_LOG(LS_INFO) << "StopPlayout";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_playIsInitialized) {
return 0;
@@ -1464,9 +1467,9 @@ int32_t AudioDeviceMac::StopPlayout() {
// has ended before stopping itself.
_playing = false;
_doStop = true; // Signal to io proc to stop audio device
- _critSect.Leave(); // Cannot be under lock, risk of deadlock
+ mutex_.Unlock(); // Cannot be under lock, risk of deadlock
if (!_stopEvent.Wait(2000)) {
- rtc::CritScope critScoped(&_critSect);
+ MutexLock lockScoped(&mutex_);
RTC_LOG(LS_WARNING) << "Timed out stopping the render IOProc."
"We may have failed to detect a device removal.";
@@ -1476,7 +1479,7 @@ int32_t AudioDeviceMac::StopPlayout() {
WEBRTC_CA_LOG_WARN(
AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
}
- _critSect.Enter();
+ mutex_.Lock();
_doStop = false;
RTC_LOG(LS_INFO) << "Playout stopped";
} else if (_twoDevices && _playIsInitialized) {
@@ -1492,10 +1495,10 @@ int32_t AudioDeviceMac::StopPlayout() {
// Setting this signal will allow the worker thread to be stopped.
AtomicSet32(&_renderDeviceIsAlive, 0);
if (render_worker_thread_.get()) {
- _critSect.Leave();
+ mutex_.Unlock();
render_worker_thread_->Stop();
render_worker_thread_.reset();
- _critSect.Enter();
+ mutex_.Lock();
}
WEBRTC_CA_LOG_WARN(AudioConverterDispose(_renderConverter));
@@ -2175,7 +2178,7 @@ OSStatus AudioDeviceMac::implDeviceIOProc(const AudioBufferList* inputData,
// Check if we should close down audio device
// Double-checked locking optimization to remove locking overhead
if (_doStop) {
- _critSect.Enter();
+ MutexLock lock(&mutex_);
if (_doStop) {
if (_twoDevices || (!_recording && !_playing)) {
// In the case of a shared device, the single driving ioProc
@@ -2190,10 +2193,8 @@ OSStatus AudioDeviceMac::implDeviceIOProc(const AudioBufferList* inputData,
_doStop = false;
_stopEvent.Set();
- _critSect.Leave();
return 0;
}
- _critSect.Leave();
}
if (!_playing) {
@@ -2270,7 +2271,7 @@ OSStatus AudioDeviceMac::implInDeviceIOProc(const AudioBufferList* inputData,
// Check if we should close down audio device
// Double-checked locking optimization to remove locking overhead
if (_doStopRec) {
- _critSect.Enter();
+ MutexLock lock(&mutex_);
if (_doStopRec) {
// This will be signalled only when a shared device is not in use.
WEBRTC_CA_LOG_ERR(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
@@ -2282,10 +2283,8 @@ OSStatus AudioDeviceMac::implInDeviceIOProc(const AudioBufferList* inputData,
_doStopRec = false;
_stopEventRec.Set();
- _critSect.Leave();
return 0;
}
- _critSect.Leave();
}
if (!_recording) {
diff --git a/modules/audio_device/mac/audio_device_mac.h b/modules/audio_device/mac/audio_device_mac.h
index d7076a3c1c..4fcd3b0530 100644
--- a/modules/audio_device/mac/audio_device_mac.h
+++ b/modules/audio_device/mac/audio_device_mac.h
@@ -19,9 +19,9 @@
#include "modules/audio_device/audio_device_generic.h"
#include "modules/audio_device/mac/audio_mixer_manager_mac.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
struct PaUtilRingBuffer;
@@ -69,8 +69,8 @@ class AudioDeviceMac : public AudioDeviceGeneric {
AudioDeviceModule::AudioLayer& audioLayer) const;
// Main initializaton and termination
- virtual InitStatus Init();
- virtual int32_t Terminate();
+ virtual InitStatus Init() RTC_LOCKS_EXCLUDED(mutex_);
+ virtual int32_t Terminate() RTC_LOCKS_EXCLUDED(mutex_);
virtual bool Initialized() const;
// Device enumeration
@@ -84,7 +84,7 @@ class AudioDeviceMac : public AudioDeviceGeneric {
char guid[kAdmMaxGuidSize]);
// Device selection
- virtual int32_t SetPlayoutDevice(uint16_t index);
+ virtual int32_t SetPlayoutDevice(uint16_t index) RTC_LOCKS_EXCLUDED(mutex_);
virtual int32_t SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
virtual int32_t SetRecordingDevice(uint16_t index);
virtual int32_t SetRecordingDevice(
@@ -92,24 +92,24 @@ class AudioDeviceMac : public AudioDeviceGeneric {
// Audio transport initialization
virtual int32_t PlayoutIsAvailable(bool& available);
- virtual int32_t InitPlayout();
+ virtual int32_t InitPlayout() RTC_LOCKS_EXCLUDED(mutex_);
virtual bool PlayoutIsInitialized() const;
virtual int32_t RecordingIsAvailable(bool& available);
- virtual int32_t InitRecording();
+ virtual int32_t InitRecording() RTC_LOCKS_EXCLUDED(mutex_);
virtual bool RecordingIsInitialized() const;
// Audio transport control
- virtual int32_t StartPlayout();
- virtual int32_t StopPlayout();
+ virtual int32_t StartPlayout() RTC_LOCKS_EXCLUDED(mutex_);
+ virtual int32_t StopPlayout() RTC_LOCKS_EXCLUDED(mutex_);
virtual bool Playing() const;
- virtual int32_t StartRecording();
- virtual int32_t StopRecording();
+ virtual int32_t StartRecording() RTC_LOCKS_EXCLUDED(mutex_);
+ virtual int32_t StopRecording() RTC_LOCKS_EXCLUDED(mutex_);
virtual bool Recording() const;
// Audio mixer initialization
- virtual int32_t InitSpeaker();
+ virtual int32_t InitSpeaker() RTC_LOCKS_EXCLUDED(mutex_);
virtual bool SpeakerIsInitialized() const;
- virtual int32_t InitMicrophone();
+ virtual int32_t InitMicrophone() RTC_LOCKS_EXCLUDED(mutex_);
virtual bool MicrophoneIsInitialized() const;
// Speaker volume controls
@@ -147,9 +147,13 @@ class AudioDeviceMac : public AudioDeviceGeneric {
// Delay information and control
virtual int32_t PlayoutDelay(uint16_t& delayMS) const;
- virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+ virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+ RTC_LOCKS_EXCLUDED(mutex_);
private:
+ int32_t InitSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ int32_t InitMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t SpeakerIsAvailable(bool& available);
@@ -229,13 +233,15 @@ class AudioDeviceMac : public AudioDeviceGeneric {
OSStatus implDeviceIOProc(const AudioBufferList* inputData,
const AudioTimeStamp* inputTime,
AudioBufferList* outputData,
- const AudioTimeStamp* outputTime);
+ const AudioTimeStamp* outputTime)
+ RTC_LOCKS_EXCLUDED(mutex_);
OSStatus implOutConverterProc(UInt32* numberDataPackets,
AudioBufferList* data);
OSStatus implInDeviceIOProc(const AudioBufferList* inputData,
- const AudioTimeStamp* inputTime);
+ const AudioTimeStamp* inputTime)
+ RTC_LOCKS_EXCLUDED(mutex_);
OSStatus implInConverterProc(UInt32* numberDataPackets,
AudioBufferList* data);
@@ -249,7 +255,7 @@ class AudioDeviceMac : public AudioDeviceGeneric {
AudioDeviceBuffer* _ptrAudioBuffer;
- rtc::CriticalSection _critSect;
+ Mutex mutex_;
rtc::Event _stopEventRec;
rtc::Event _stopEvent;
diff --git a/modules/audio_device/mac/audio_mixer_manager_mac.cc b/modules/audio_device/mac/audio_mixer_manager_mac.cc
index e2b7d6370d..162f3f255d 100644
--- a/modules/audio_device/mac/audio_mixer_manager_mac.cc
+++ b/modules/audio_device/mac/audio_mixer_manager_mac.cc
@@ -61,18 +61,21 @@ AudioMixerManagerMac::~AudioMixerManagerMac() {
int32_t AudioMixerManagerMac::Close() {
RTC_LOG(LS_VERBOSE) << __FUNCTION__;
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
- CloseSpeaker();
- CloseMicrophone();
+ CloseSpeakerLocked();
+ CloseMicrophoneLocked();
return 0;
}
int32_t AudioMixerManagerMac::CloseSpeaker() {
- RTC_LOG(LS_VERBOSE) << __FUNCTION__;
+ MutexLock lock(&mutex_);
+ return CloseSpeakerLocked();
+}
- rtc::CritScope lock(&_critSect);
+int32_t AudioMixerManagerMac::CloseSpeakerLocked() {
+ RTC_LOG(LS_VERBOSE) << __FUNCTION__;
_outputDeviceID = kAudioObjectUnknown;
_noOutputChannels = 0;
@@ -81,9 +84,12 @@ int32_t AudioMixerManagerMac::CloseSpeaker() {
}
int32_t AudioMixerManagerMac::CloseMicrophone() {
- RTC_LOG(LS_VERBOSE) << __FUNCTION__;
+ MutexLock lock(&mutex_);
+ return CloseMicrophoneLocked();
+}
- rtc::CritScope lock(&_critSect);
+int32_t AudioMixerManagerMac::CloseMicrophoneLocked() {
+ RTC_LOG(LS_VERBOSE) << __FUNCTION__;
_inputDeviceID = kAudioObjectUnknown;
_noInputChannels = 0;
@@ -95,7 +101,7 @@ int32_t AudioMixerManagerMac::OpenSpeaker(AudioDeviceID deviceID) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerMac::OpenSpeaker(id=" << deviceID
<< ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
OSStatus err = noErr;
UInt32 size = 0;
@@ -147,7 +153,7 @@ int32_t AudioMixerManagerMac::OpenMicrophone(AudioDeviceID deviceID) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerMac::OpenMicrophone(id=" << deviceID
<< ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
OSStatus err = noErr;
UInt32 size = 0;
@@ -205,7 +211,7 @@ int32_t AudioMixerManagerMac::SetSpeakerVolume(uint32_t volume) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerMac::SetSpeakerVolume(volume="
<< volume << ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_outputDeviceID == kAudioObjectUnknown) {
RTC_LOG(LS_WARNING) << "device ID has not been set";
@@ -421,7 +427,7 @@ int32_t AudioMixerManagerMac::SetSpeakerMute(bool enable) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerMac::SetSpeakerMute(enable="
<< enable << ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_outputDeviceID == kAudioObjectUnknown) {
RTC_LOG(LS_WARNING) << "device ID has not been set";
@@ -589,7 +595,7 @@ int32_t AudioMixerManagerMac::SetMicrophoneMute(bool enable) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerMac::SetMicrophoneMute(enable="
<< enable << ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_inputDeviceID == kAudioObjectUnknown) {
RTC_LOG(LS_WARNING) << "device ID has not been set";
@@ -737,7 +743,7 @@ int32_t AudioMixerManagerMac::SetMicrophoneVolume(uint32_t volume) {
RTC_LOG(LS_VERBOSE) << "AudioMixerManagerMac::SetMicrophoneVolume(volume="
<< volume << ")";
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_inputDeviceID == kAudioObjectUnknown) {
RTC_LOG(LS_WARNING) << "device ID has not been set";
diff --git a/modules/audio_device/mac/audio_mixer_manager_mac.h b/modules/audio_device/mac/audio_mixer_manager_mac.h
index 342e1c997c..17e34ff17d 100644
--- a/modules/audio_device/mac/audio_mixer_manager_mac.h
+++ b/modules/audio_device/mac/audio_mixer_manager_mac.h
@@ -14,36 +14,36 @@
#include <CoreAudio/CoreAudio.h>
#include "modules/audio_device/include/audio_device.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
class AudioMixerManagerMac {
public:
- int32_t OpenSpeaker(AudioDeviceID deviceID);
- int32_t OpenMicrophone(AudioDeviceID deviceID);
- int32_t SetSpeakerVolume(uint32_t volume);
+ int32_t OpenSpeaker(AudioDeviceID deviceID) RTC_LOCKS_EXCLUDED(mutex_);
+ int32_t OpenMicrophone(AudioDeviceID deviceID) RTC_LOCKS_EXCLUDED(mutex_);
+ int32_t SetSpeakerVolume(uint32_t volume) RTC_LOCKS_EXCLUDED(mutex_);
int32_t SpeakerVolume(uint32_t& volume) const;
int32_t MaxSpeakerVolume(uint32_t& maxVolume) const;
int32_t MinSpeakerVolume(uint32_t& minVolume) const;
int32_t SpeakerVolumeIsAvailable(bool& available);
int32_t SpeakerMuteIsAvailable(bool& available);
- int32_t SetSpeakerMute(bool enable);
+ int32_t SetSpeakerMute(bool enable) RTC_LOCKS_EXCLUDED(mutex_);
int32_t SpeakerMute(bool& enabled) const;
int32_t StereoPlayoutIsAvailable(bool& available);
int32_t StereoRecordingIsAvailable(bool& available);
int32_t MicrophoneMuteIsAvailable(bool& available);
- int32_t SetMicrophoneMute(bool enable);
+ int32_t SetMicrophoneMute(bool enable) RTC_LOCKS_EXCLUDED(mutex_);
int32_t MicrophoneMute(bool& enabled) const;
int32_t MicrophoneVolumeIsAvailable(bool& available);
- int32_t SetMicrophoneVolume(uint32_t volume);
+ int32_t SetMicrophoneVolume(uint32_t volume) RTC_LOCKS_EXCLUDED(mutex_);
int32_t MicrophoneVolume(uint32_t& volume) const;
int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const;
int32_t MinMicrophoneVolume(uint32_t& minVolume) const;
- int32_t Close();
- int32_t CloseSpeaker();
- int32_t CloseMicrophone();
+ int32_t Close() RTC_LOCKS_EXCLUDED(mutex_);
+ int32_t CloseSpeaker() RTC_LOCKS_EXCLUDED(mutex_);
+ int32_t CloseMicrophone() RTC_LOCKS_EXCLUDED(mutex_);
bool SpeakerIsInitialized() const;
bool MicrophoneIsInitialized() const;
@@ -52,12 +52,14 @@ class AudioMixerManagerMac {
~AudioMixerManagerMac();
private:
+ int32_t CloseSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ int32_t CloseMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
static void logCAMsg(const rtc::LoggingSeverity sev,
const char* msg,
const char* err);
private:
- rtc::CriticalSection _critSect;
+ Mutex mutex_;
AudioDeviceID _inputDeviceID;
AudioDeviceID _outputDeviceID;
diff --git a/modules/audio_device/mock_audio_device_buffer.h b/modules/audio_device/mock_audio_device_buffer.h
index 1f809cc5dc..b0f54c20ff 100644
--- a/modules/audio_device/mock_audio_device_buffer.h
+++ b/modules/audio_device/mock_audio_device_buffer.h
@@ -20,12 +20,14 @@ class MockAudioDeviceBuffer : public AudioDeviceBuffer {
public:
using AudioDeviceBuffer::AudioDeviceBuffer;
virtual ~MockAudioDeviceBuffer() {}
- MOCK_METHOD1(RequestPlayoutData, int32_t(size_t nSamples));
- MOCK_METHOD1(GetPlayoutData, int32_t(void* audioBuffer));
- MOCK_METHOD2(SetRecordedBuffer,
- int32_t(const void* audioBuffer, size_t nSamples));
- MOCK_METHOD2(SetVQEData, void(int playDelayMS, int recDelayMS));
- MOCK_METHOD0(DeliverRecordedData, int32_t());
+ MOCK_METHOD(int32_t, RequestPlayoutData, (size_t nSamples), (override));
+ MOCK_METHOD(int32_t, GetPlayoutData, (void* audioBuffer), (override));
+ MOCK_METHOD(int32_t,
+ SetRecordedBuffer,
+ (const void* audioBuffer, size_t nSamples),
+ (override));
+ MOCK_METHOD(void, SetVQEData, (int playDelayMS, int recDelayMS), (override));
+ MOCK_METHOD(int32_t, DeliverRecordedData, (), (override));
};
} // namespace webrtc
diff --git a/modules/audio_device/win/audio_device_core_win.cc b/modules/audio_device/win/audio_device_core_win.cc
index a7aecb0110..548158c191 100644
--- a/modules/audio_device/win/audio_device_core_win.cc
+++ b/modules/audio_device/win/audio_device_core_win.cc
@@ -579,7 +579,7 @@ int32_t AudioDeviceWindowsCore::ActiveAudioLayer(
// ----------------------------------------------------------------------------
AudioDeviceGeneric::InitStatus AudioDeviceWindowsCore::Init() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_initialized) {
return InitStatus::OK;
@@ -601,7 +601,7 @@ AudioDeviceGeneric::InitStatus AudioDeviceWindowsCore::Init() {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::Terminate() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_initialized) {
return 0;
@@ -640,7 +640,7 @@ bool AudioDeviceWindowsCore::Initialized() const {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::InitSpeaker() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_playing) {
return -1;
@@ -709,7 +709,7 @@ int32_t AudioDeviceWindowsCore::InitSpeaker() {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::InitMicrophone() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_recording) {
return -1;
@@ -784,7 +784,7 @@ bool AudioDeviceWindowsCore::MicrophoneIsInitialized() const {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::SpeakerVolumeIsAvailable(bool& available) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_ptrDeviceOut == NULL) {
return -1;
@@ -826,7 +826,7 @@ Exit:
int32_t AudioDeviceWindowsCore::SetSpeakerVolume(uint32_t volume) {
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_speakerIsInitialized) {
return -1;
@@ -846,9 +846,9 @@ int32_t AudioDeviceWindowsCore::SetSpeakerVolume(uint32_t volume) {
// scale input volume to valid range (0.0 to 1.0)
const float fLevel = (float)volume / MAX_CORE_SPEAKER_VOLUME;
- _volumeMutex.Enter();
+ volume_mutex_.Lock();
hr = _ptrRenderSimpleVolume->SetMasterVolume(fLevel, NULL);
- _volumeMutex.Leave();
+ volume_mutex_.Unlock();
EXIT_ON_ERROR(hr);
return 0;
@@ -864,7 +864,7 @@ Exit:
int32_t AudioDeviceWindowsCore::SpeakerVolume(uint32_t& volume) const {
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_speakerIsInitialized) {
return -1;
@@ -878,9 +878,9 @@ int32_t AudioDeviceWindowsCore::SpeakerVolume(uint32_t& volume) const {
HRESULT hr = S_OK;
float fLevel(0.0f);
- _volumeMutex.Enter();
+ volume_mutex_.Lock();
hr = _ptrRenderSimpleVolume->GetMasterVolume(&fLevel);
- _volumeMutex.Leave();
+ volume_mutex_.Unlock();
EXIT_ON_ERROR(hr);
// scale input volume range [0.0,1.0] to valid output range
@@ -931,7 +931,7 @@ int32_t AudioDeviceWindowsCore::MinSpeakerVolume(uint32_t& minVolume) const {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::SpeakerMuteIsAvailable(bool& available) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_ptrDeviceOut == NULL) {
return -1;
@@ -967,7 +967,7 @@ Exit:
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::SetSpeakerMute(bool enable) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_speakerIsInitialized) {
return -1;
@@ -1041,7 +1041,7 @@ Exit:
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::MicrophoneMuteIsAvailable(bool& available) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_ptrDeviceIn == NULL) {
return -1;
@@ -1151,7 +1151,7 @@ int32_t AudioDeviceWindowsCore::StereoRecordingIsAvailable(bool& available) {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::SetStereoRecording(bool enable) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (enable) {
_recChannelsPrioList[0] = 2; // try stereo first
@@ -1193,7 +1193,7 @@ int32_t AudioDeviceWindowsCore::StereoPlayoutIsAvailable(bool& available) {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::SetStereoPlayout(bool enable) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (enable) {
_playChannelsPrioList[0] = 2; // try stereo first
@@ -1226,7 +1226,7 @@ int32_t AudioDeviceWindowsCore::StereoPlayout(bool& enabled) const {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::MicrophoneVolumeIsAvailable(bool& available) {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_ptrDeviceIn == NULL) {
return -1;
@@ -1264,7 +1264,7 @@ int32_t AudioDeviceWindowsCore::SetMicrophoneVolume(uint32_t volume) {
<< volume << ")";
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_microphoneIsInitialized) {
return -1;
@@ -1283,9 +1283,9 @@ int32_t AudioDeviceWindowsCore::SetMicrophoneVolume(uint32_t volume) {
HRESULT hr = S_OK;
// scale input volume to valid range (0.0 to 1.0)
const float fLevel = static_cast<float>(volume) / MAX_CORE_MICROPHONE_VOLUME;
- _volumeMutex.Enter();
+ volume_mutex_.Lock();
_ptrCaptureVolume->SetMasterVolumeLevelScalar(fLevel, NULL);
- _volumeMutex.Leave();
+ volume_mutex_.Unlock();
EXIT_ON_ERROR(hr);
return 0;
@@ -1301,7 +1301,7 @@ Exit:
int32_t AudioDeviceWindowsCore::MicrophoneVolume(uint32_t& volume) const {
{
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (!_microphoneIsInitialized) {
return -1;
@@ -1315,9 +1315,9 @@ int32_t AudioDeviceWindowsCore::MicrophoneVolume(uint32_t& volume) const {
HRESULT hr = S_OK;
float fLevel(0.0f);
volume = 0;
- _volumeMutex.Enter();
+ volume_mutex_.Lock();
hr = _ptrCaptureVolume->GetMasterVolumeLevelScalar(&fLevel);
- _volumeMutex.Leave();
+ volume_mutex_.Unlock();
EXIT_ON_ERROR(hr);
// scale input volume range [0.0,1.0] to valid output range
@@ -1370,7 +1370,7 @@ int32_t AudioDeviceWindowsCore::MinMicrophoneVolume(uint32_t& minVolume) const {
// ----------------------------------------------------------------------------
int16_t AudioDeviceWindowsCore::PlayoutDevices() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_RefreshDeviceList(eRender) != -1) {
return (_DeviceListCount(eRender));
@@ -1398,7 +1398,7 @@ int32_t AudioDeviceWindowsCore::SetPlayoutDevice(uint16_t index) {
return -1;
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
HRESULT hr(S_OK);
@@ -1445,7 +1445,7 @@ int32_t AudioDeviceWindowsCore::SetPlayoutDevice(
role = eCommunications;
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
// Refresh the list of rendering endpoint devices
_RefreshDeviceList(eRender);
@@ -1506,7 +1506,7 @@ int32_t AudioDeviceWindowsCore::PlayoutDeviceName(
memset(guid, 0, kAdmMaxGuidSize);
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
int32_t ret(-1);
WCHAR szDeviceName[MAX_PATH];
@@ -1582,7 +1582,7 @@ int32_t AudioDeviceWindowsCore::RecordingDeviceName(
memset(guid, 0, kAdmMaxGuidSize);
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
int32_t ret(-1);
WCHAR szDeviceName[MAX_PATH];
@@ -1633,7 +1633,7 @@ int32_t AudioDeviceWindowsCore::RecordingDeviceName(
// ----------------------------------------------------------------------------
int16_t AudioDeviceWindowsCore::RecordingDevices() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_RefreshDeviceList(eCapture) != -1) {
return (_DeviceListCount(eCapture));
@@ -1661,7 +1661,7 @@ int32_t AudioDeviceWindowsCore::SetRecordingDevice(uint16_t index) {
return -1;
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
HRESULT hr(S_OK);
@@ -1708,7 +1708,7 @@ int32_t AudioDeviceWindowsCore::SetRecordingDevice(
role = eCommunications;
}
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
// Refresh the list of capture endpoint devices
_RefreshDeviceList(eCapture);
@@ -1785,7 +1785,7 @@ int32_t AudioDeviceWindowsCore::RecordingIsAvailable(bool& available) {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::InitPlayout() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_playing) {
return -1;
@@ -2099,7 +2099,7 @@ int32_t AudioDeviceWindowsCore::InitRecordingDMO() {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::InitRecording() {
- rtc::CritScope lock(&_critSect);
+ MutexLock lock(&mutex_);
if (_recording) {
return -1;
@@ -2326,7 +2326,7 @@ int32_t AudioDeviceWindowsCore::StartRecording() {
}
{
- rtc::CritScope critScoped(&_critSect);
+ MutexLock lockScoped(&mutex_);
// Create thread which will drive the capturing
LPTHREAD_START_ROUTINE lpStartAddress = WSAPICaptureThread;
@@ -2479,7 +2479,7 @@ int32_t AudioDeviceWindowsCore::StartPlayout() {
}
{
- rtc::CritScope critScoped(&_critSect);
+ MutexLock lockScoped(&mutex_);
// Create thread which will drive the rendering.
assert(_hPlayThread == NULL);
@@ -2515,7 +2515,7 @@ int32_t AudioDeviceWindowsCore::StopPlayout() {
}
{
- rtc::CritScope critScoped(&_critSect);
+ MutexLock lockScoped(&mutex_);
if (_hPlayThread == NULL) {
RTC_LOG(LS_VERBOSE)
@@ -2545,7 +2545,7 @@ int32_t AudioDeviceWindowsCore::StopPlayout() {
}
{
- rtc::CritScope critScoped(&_critSect);
+ MutexLock lockScoped(&mutex_);
RTC_LOG(LS_VERBOSE) << "webrtc_core_audio_render_thread is now closed";
// to reset this event manually at each time we finish with it,
@@ -2587,7 +2587,7 @@ int32_t AudioDeviceWindowsCore::StopPlayout() {
// ----------------------------------------------------------------------------
int32_t AudioDeviceWindowsCore::PlayoutDelay(uint16_t& delayMS) const {
- rtc::CritScope critScoped(&_critSect);
+ MutexLock lockScoped(&mutex_);
delayMS = static_cast<uint16_t>(_sndCardPlayDelay);
return 0;
}
@@ -2981,7 +2981,7 @@ DWORD AudioDeviceWindowsCore::DoCaptureThreadPollDMO() {
}
while (keepRecording) {
- rtc::CritScope critScoped(&_critSect);
+ MutexLock lockScoped(&mutex_);
DWORD dwStatus = 0;
{
@@ -3357,11 +3357,11 @@ int32_t AudioDeviceWindowsCore::EnableBuiltInAEC(bool enable) {
}
void AudioDeviceWindowsCore::_Lock() RTC_NO_THREAD_SAFETY_ANALYSIS {
- _critSect.Enter();
+ mutex_.Lock();
}
void AudioDeviceWindowsCore::_UnLock() RTC_NO_THREAD_SAFETY_ANALYSIS {
- _critSect.Leave();
+ mutex_.Unlock();
}
int AudioDeviceWindowsCore::SetDMOProperties() {
diff --git a/modules/audio_device/win/audio_device_core_win.h b/modules/audio_device/win/audio_device_core_win.h
index 81db3d8bc4..4803362ac6 100644
--- a/modules/audio_device/win/audio_device_core_win.h
+++ b/modules/audio_device/win/audio_device_core_win.h
@@ -27,7 +27,7 @@
#include <mmdeviceapi.h> // MMDevice
#include "api/scoped_refptr.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
// Use Multimedia Class Scheduler Service (MMCSS) to boost the thread priority
#pragma comment(lib, "avrt.lib")
@@ -237,8 +237,8 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric {
ScopedCOMInitializer _comInit;
AudioDeviceBuffer* _ptrAudioBuffer;
- rtc::CriticalSection _critSect;
- rtc::CriticalSection _volumeMutex;
+ mutable Mutex mutex_;
+ mutable Mutex volume_mutex_;
IMMDeviceEnumerator* _ptrEnumerator;
IMMDeviceCollection* _ptrRenderCollection;
diff --git a/modules/audio_mixer/BUILD.gn b/modules/audio_mixer/BUILD.gn
index a4b71f6625..7ce35ffeb3 100644
--- a/modules/audio_mixer/BUILD.gn
+++ b/modules/audio_mixer/BUILD.gn
@@ -46,6 +46,7 @@ rtc_library("audio_mixer_impl") {
"../../common_audio",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers",
"../../system_wrappers:metrics",
"../audio_processing:api",
diff --git a/modules/audio_mixer/audio_mixer_impl.cc b/modules/audio_mixer/audio_mixer_impl.cc
index abfda25f41..6552953dec 100644
--- a/modules/audio_mixer/audio_mixer_impl.cc
+++ b/modules/audio_mixer/audio_mixer_impl.cc
@@ -126,7 +126,7 @@ void AudioMixerImpl::Mix(size_t number_of_channels,
CalculateOutputFrequency();
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
const size_t number_of_streams = audio_source_list_.size();
frame_combiner_.Combine(GetAudioFromSources(), number_of_channels,
OutputFrequency(), number_of_streams,
@@ -138,7 +138,7 @@ void AudioMixerImpl::Mix(size_t number_of_channels,
void AudioMixerImpl::CalculateOutputFrequency() {
RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
std::vector<int> preferred_rates;
std::transform(audio_source_list_.begin(), audio_source_list_.end(),
@@ -159,7 +159,7 @@ int AudioMixerImpl::OutputFrequency() const {
bool AudioMixerImpl::AddSource(Source* audio_source) {
RTC_DCHECK(audio_source);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RTC_DCHECK(FindSourceInList(audio_source, &audio_source_list_) ==
audio_source_list_.end())
<< "Source already added to mixer";
@@ -169,7 +169,7 @@ bool AudioMixerImpl::AddSource(Source* audio_source) {
void AudioMixerImpl::RemoveSource(Source* audio_source) {
RTC_DCHECK(audio_source);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
const auto iter = FindSourceInList(audio_source, &audio_source_list_);
RTC_DCHECK(iter != audio_source_list_.end()) << "Source not present in mixer";
audio_source_list_.erase(iter);
@@ -227,7 +227,7 @@ AudioFrameList AudioMixerImpl::GetAudioFromSources() {
bool AudioMixerImpl::GetAudioSourceMixabilityStatusForTest(
AudioMixerImpl::Source* audio_source) const {
RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
const auto iter = FindSourceInList(audio_source, &audio_source_list_);
if (iter != audio_source_list_.end()) {
diff --git a/modules/audio_mixer/audio_mixer_impl.h b/modules/audio_mixer/audio_mixer_impl.h
index c503932909..57b1f5e4a0 100644
--- a/modules/audio_mixer/audio_mixer_impl.h
+++ b/modules/audio_mixer/audio_mixer_impl.h
@@ -22,8 +22,8 @@
#include "modules/audio_mixer/frame_combiner.h"
#include "modules/audio_mixer/output_rate_calculator.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/race_checker.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -63,7 +63,7 @@ class AudioMixerImpl : public AudioMixer {
void Mix(size_t number_of_channels,
AudioFrame* audio_frame_for_mixing) override
- RTC_LOCKS_EXCLUDED(crit_);
+ RTC_LOCKS_EXCLUDED(mutex_);
// Returns true if the source was mixed last round. Returns
// false and logs an error if the source was never added to the
@@ -83,12 +83,12 @@ class AudioMixerImpl : public AudioMixer {
// Compute what audio sources to mix from audio_source_list_. Ramp
// in and out. Update mixed status. Mixes up to
// kMaximumAmountOfMixedAudioSources audio sources.
- AudioFrameList GetAudioFromSources() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ AudioFrameList GetAudioFromSources() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// The critical section lock guards audio source insertion and
// removal, which can be done from any thread. The race checker
// checks that mixing is done sequentially.
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
rtc::RaceChecker race_checker_;
std::unique_ptr<OutputRateCalculator> output_rate_calculator_;
@@ -97,7 +97,7 @@ class AudioMixerImpl : public AudioMixer {
size_t sample_size_ RTC_GUARDED_BY(race_checker_);
// List of all audio sources. Note all lists are disjunct
- SourceStatusList audio_source_list_ RTC_GUARDED_BY(crit_); // May be mixed.
+ SourceStatusList audio_source_list_ RTC_GUARDED_BY(mutex_); // May be mixed.
// Component that handles actual adding of audio frames.
FrameCombiner frame_combiner_ RTC_GUARDED_BY(race_checker_);
diff --git a/modules/audio_mixer/audio_mixer_impl_unittest.cc b/modules/audio_mixer/audio_mixer_impl_unittest.cc
index f899dd618a..383771ce60 100644
--- a/modules/audio_mixer/audio_mixer_impl_unittest.cc
+++ b/modules/audio_mixer/audio_mixer_impl_unittest.cc
@@ -74,11 +74,13 @@ class MockMixerAudioSource : public ::testing::NiceMock<AudioMixer::Source> {
.WillByDefault(Return(kDefaultSampleRateHz));
}
- MOCK_METHOD2(GetAudioFrameWithInfo,
- AudioFrameInfo(int sample_rate_hz, AudioFrame* audio_frame));
+ MOCK_METHOD(AudioFrameInfo,
+ GetAudioFrameWithInfo,
+ (int sample_rate_hz, AudioFrame* audio_frame),
+ (override));
- MOCK_CONST_METHOD0(PreferredSampleRate, int());
- MOCK_CONST_METHOD0(Ssrc, int());
+ MOCK_METHOD(int, PreferredSampleRate, (), (const, override));
+ MOCK_METHOD(int, Ssrc, (), (const, override));
AudioFrame* fake_frame() { return &fake_frame_; }
AudioFrameInfo fake_info() { return fake_audio_frame_info_; }
@@ -604,7 +606,7 @@ class HighOutputRateCalculator : public OutputRateCalculator {
};
const int HighOutputRateCalculator::kDefaultFrequency;
-TEST(AudioMixer, MultipleChannelsAndHighRate) {
+TEST(AudioMixerDeathTest, MultipleChannelsAndHighRate) {
constexpr size_t kSamplesPerChannel =
HighOutputRateCalculator::kDefaultFrequency / 100;
// As many channels as an AudioFrame can fit:
diff --git a/modules/audio_mixer/frame_combiner_unittest.cc b/modules/audio_mixer/frame_combiner_unittest.cc
index 5f024a4a55..4b189a052e 100644
--- a/modules/audio_mixer/frame_combiner_unittest.cc
+++ b/modules/audio_mixer/frame_combiner_unittest.cc
@@ -89,7 +89,7 @@ TEST(FrameCombiner, BasicApiCallsLimiter) {
}
// There are DCHECKs in place to check for invalid parameters.
-TEST(FrameCombiner, DebugBuildCrashesWithManyChannels) {
+TEST(FrameCombinerDeathTest, DebugBuildCrashesWithManyChannels) {
FrameCombiner combiner(true);
for (const int rate : {8000, 18000, 34000, 48000}) {
for (const int number_of_channels : {10, 20, 21}) {
@@ -118,7 +118,7 @@ TEST(FrameCombiner, DebugBuildCrashesWithManyChannels) {
}
}
-TEST(FrameCombiner, DebugBuildCrashesWithHighRate) {
+TEST(FrameCombinerDeathTest, DebugBuildCrashesWithHighRate) {
FrameCombiner combiner(true);
for (const int rate : {50000, 96000, 128000, 196000}) {
for (const int number_of_channels : {1, 2, 3}) {
diff --git a/modules/audio_processing/BUILD.gn b/modules/audio_processing/BUILD.gn
index 7ca78e20b4..1510930640 100644
--- a/modules/audio_processing/BUILD.gn
+++ b/modules/audio_processing/BUILD.gn
@@ -53,8 +53,8 @@ rtc_library("api") {
"../../rtc_base/system:file_wrapper",
"../../rtc_base/system:rtc_export",
"agc:gain_control_interface",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_frame_proxies") {
@@ -187,6 +187,7 @@ rtc_library("audio_processing") {
"../../rtc_base:refcount",
"../../rtc_base:safe_minmax",
"../../rtc_base:sanitizer",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/system:rtc_export",
"../../system_wrappers:cpu_features_api",
"../../system_wrappers:field_trial",
@@ -203,8 +204,8 @@ rtc_library("audio_processing") {
"ns",
"transient:transient_suppressor_api",
"vad",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
deps += [
"../../common_audio",
@@ -255,8 +256,8 @@ rtc_source_set("rms_level") {
deps = [
"../../api:array_view",
"../../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_processing_statistics") {
@@ -265,10 +266,8 @@ rtc_library("audio_processing_statistics") {
"include/audio_processing_statistics.cc",
"include/audio_processing_statistics.h",
]
- deps = [
- "../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "../../rtc_base/system:rtc_export" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("audio_frame_view") {
@@ -372,6 +371,7 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_base_tests_utils",
"../../rtc_base:safe_minmax",
"../../rtc_base:task_queue_for_test",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/system:arch",
"../../rtc_base/system:file_wrapper",
"../../system_wrappers",
@@ -395,8 +395,8 @@ if (rtc_include_tests) {
"utility:pffft_wrapper_unittest",
"vad:vad_unittests",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
@@ -422,8 +422,8 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_task_queue",
"aec_dump",
"aec_dump:aec_dump_unittests",
- "//third_party/abseil-cpp/absl/flags:flag",
]
+ absl_deps += [ "//third_party/abseil-cpp/absl/flags:flag" ]
sources += [
"audio_processing_impl_locking_unittest.cc",
"audio_processing_impl_unittest.cc",
@@ -481,8 +481,8 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:safe_minmax",
"agc:gain_map",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_enable_protobuf) {
@@ -527,6 +527,8 @@ if (rtc_include_tests) {
"aec_dump",
"aec_dump:aec_dump_impl",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/strings",
@@ -609,6 +611,6 @@ rtc_library("audioproc_test_utils") {
"../../test:test_support",
"../audio_coding:neteq_input_audio_tools",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/modules/audio_processing/aec3/BUILD.gn b/modules/audio_processing/aec3/BUILD.gn
index d07ffa6abe..507f2bc8bd 100644
--- a/modules/audio_processing/aec3/BUILD.gn
+++ b/modules/audio_processing/aec3/BUILD.gn
@@ -150,8 +150,8 @@ rtc_library("aec3") {
"../../../system_wrappers:field_trial",
"../../../system_wrappers:metrics",
"../utility:cascaded_biquad_filter",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -187,8 +187,8 @@ if (rtc_include_tests) {
"../../../test:field_trial",
"../../../test:test_support",
"../utility:cascaded_biquad_filter",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
diff --git a/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc b/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc
index 8e4f5d9644..39f4e11192 100644
--- a/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc
+++ b/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc
@@ -285,13 +285,13 @@ TEST_P(AdaptiveFirFilterOneTwoFourEightRenderChannels,
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null data dumper works.
-TEST(AdaptiveFirFilterTest, NullDataDumper) {
+TEST(AdaptiveFirFilterDeathTest, NullDataDumper) {
EXPECT_DEATH(AdaptiveFirFilter(9, 9, 250, 1, DetectOptimization(), nullptr),
"");
}
// Verifies that the check for non-null filter output works.
-TEST(AdaptiveFirFilterTest, NullFilterOutput) {
+TEST(AdaptiveFirFilterDeathTest, NullFilterOutput) {
ApmDataDumper data_dumper(42);
AdaptiveFirFilter filter(9, 9, 250, 1, DetectOptimization(), &data_dumper);
std::unique_ptr<RenderDelayBuffer> render_delay_buffer(
diff --git a/modules/audio_processing/aec3/aec3_fft.cc b/modules/audio_processing/aec3/aec3_fft.cc
index 1832101855..d1d4f7da06 100644
--- a/modules/audio_processing/aec3/aec3_fft.cc
+++ b/modules/audio_processing/aec3/aec3_fft.cc
@@ -15,6 +15,7 @@
#include <iterator>
#include "rtc_base/checks.h"
+#include "system_wrappers/include/cpu_features_wrapper.h"
namespace webrtc {
@@ -70,8 +71,18 @@ const float kSqrtHanning128[kFftLength] = {
0.19509032201613f, 0.17096188876030f, 0.14673047445536f, 0.12241067519922f,
0.09801714032956f, 0.07356456359967f, 0.04906767432742f, 0.02454122852291f};
+bool IsSse2Available() {
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+ return WebRtc_GetCPUInfo(kSSE2) != 0;
+#else
+ return false;
+#endif
+}
+
} // namespace
+Aec3Fft::Aec3Fft() : ooura_fft_(IsSse2Available()) {}
+
// TODO(peah): Change x to be std::array once the rest of the code allows this.
void Aec3Fft::ZeroPaddedFft(rtc::ArrayView<const float> x,
Window window,
diff --git a/modules/audio_processing/aec3/aec3_fft.h b/modules/audio_processing/aec3/aec3_fft.h
index 7a2e024d75..6f7fbe4d0e 100644
--- a/modules/audio_processing/aec3/aec3_fft.h
+++ b/modules/audio_processing/aec3/aec3_fft.h
@@ -28,7 +28,8 @@ class Aec3Fft {
public:
enum class Window { kRectangular, kHanning, kSqrtHanning };
- Aec3Fft() = default;
+ Aec3Fft();
+
// Computes the FFT. Note that both the input and output are modified.
void Fft(std::array<float, kFftLength>* x, FftData* X) const {
RTC_DCHECK(x);
diff --git a/modules/audio_processing/aec3/aec3_fft_unittest.cc b/modules/audio_processing/aec3/aec3_fft_unittest.cc
index 82d6e766cc..e60ef5b713 100644
--- a/modules/audio_processing/aec3/aec3_fft_unittest.cc
+++ b/modules/audio_processing/aec3/aec3_fft_unittest.cc
@@ -20,28 +20,28 @@ namespace webrtc {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null input in Fft works.
-TEST(Aec3Fft, NullFftInput) {
+TEST(Aec3FftDeathTest, NullFftInput) {
Aec3Fft fft;
FftData X;
EXPECT_DEATH(fft.Fft(nullptr, &X), "");
}
// Verifies that the check for non-null input in Fft works.
-TEST(Aec3Fft, NullFftOutput) {
+TEST(Aec3FftDeathTest, NullFftOutput) {
Aec3Fft fft;
std::array<float, kFftLength> x;
EXPECT_DEATH(fft.Fft(&x, nullptr), "");
}
// Verifies that the check for non-null output in Ifft works.
-TEST(Aec3Fft, NullIfftOutput) {
+TEST(Aec3FftDeathTest, NullIfftOutput) {
Aec3Fft fft;
FftData X;
EXPECT_DEATH(fft.Ifft(X, nullptr), "");
}
// Verifies that the check for non-null output in ZeroPaddedFft works.
-TEST(Aec3Fft, NullZeroPaddedFftOutput) {
+TEST(Aec3FftDeathTest, NullZeroPaddedFftOutput) {
Aec3Fft fft;
std::array<float, kFftLengthBy2> x;
EXPECT_DEATH(fft.ZeroPaddedFft(x, Aec3Fft::Window::kRectangular, nullptr),
@@ -49,7 +49,7 @@ TEST(Aec3Fft, NullZeroPaddedFftOutput) {
}
// Verifies that the check for input length in ZeroPaddedFft works.
-TEST(Aec3Fft, ZeroPaddedFftWrongInputLength) {
+TEST(Aec3FftDeathTest, ZeroPaddedFftWrongInputLength) {
Aec3Fft fft;
FftData X;
std::array<float, kFftLengthBy2 - 1> x;
@@ -57,7 +57,7 @@ TEST(Aec3Fft, ZeroPaddedFftWrongInputLength) {
}
// Verifies that the check for non-null output in PaddedFft works.
-TEST(Aec3Fft, NullPaddedFftOutput) {
+TEST(Aec3FftDeathTest, NullPaddedFftOutput) {
Aec3Fft fft;
std::array<float, kFftLengthBy2> x;
std::array<float, kFftLengthBy2> x_old;
@@ -65,7 +65,7 @@ TEST(Aec3Fft, NullPaddedFftOutput) {
}
// Verifies that the check for input length in PaddedFft works.
-TEST(Aec3Fft, PaddedFftWrongInputLength) {
+TEST(Aec3FftDeathTest, PaddedFftWrongInputLength) {
Aec3Fft fft;
FftData X;
std::array<float, kFftLengthBy2 - 1> x;
@@ -74,7 +74,7 @@ TEST(Aec3Fft, PaddedFftWrongInputLength) {
}
// Verifies that the check for length in the old value in PaddedFft works.
-TEST(Aec3Fft, PaddedFftWrongOldValuesLength) {
+TEST(Aec3FftDeathTest, PaddedFftWrongOldValuesLength) {
Aec3Fft fft;
FftData X;
std::array<float, kFftLengthBy2> x;
diff --git a/modules/audio_processing/aec3/alignment_mixer_unittest.cc b/modules/audio_processing/aec3/alignment_mixer_unittest.cc
index 832e4ea884..03ef06614b 100644
--- a/modules/audio_processing/aec3/alignment_mixer_unittest.cc
+++ b/modules/audio_processing/aec3/alignment_mixer_unittest.cc
@@ -175,7 +175,7 @@ TEST(AlignmentMixer, FixedMode) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(AlignmentMixer, ZeroNumChannels) {
+TEST(AlignmentMixerDeathTest, ZeroNumChannels) {
EXPECT_DEATH(
AlignmentMixer(/*num_channels*/ 0, /*downmix*/ false,
/*adaptive_selection*/ false, /*excitation_limit*/ 1.f,
@@ -183,7 +183,7 @@ TEST(AlignmentMixer, ZeroNumChannels) {
, "");
}
-TEST(AlignmentMixer, IncorrectVariant) {
+TEST(AlignmentMixerDeathTest, IncorrectVariant) {
EXPECT_DEATH(
AlignmentMixer(/*num_channels*/ 1, /*downmix*/ true,
/*adaptive_selection*/ true, /*excitation_limit*/ 1.f,
diff --git a/modules/audio_processing/aec3/block_framer_unittest.cc b/modules/audio_processing/aec3/block_framer_unittest.cc
index e9a16d06d5..d67967bc02 100644
--- a/modules/audio_processing/aec3/block_framer_unittest.cc
+++ b/modules/audio_processing/aec3/block_framer_unittest.cc
@@ -214,7 +214,8 @@ std::string ProduceDebugText(int sample_rate_hz, size_t num_channels) {
} // namespace
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(BlockFramer, WrongNumberOfBandsInBlockForInsertBlockAndExtractSubFrame) {
+TEST(BlockFramerDeathTest,
+ WrongNumberOfBandsInBlockForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -227,7 +228,7 @@ TEST(BlockFramer, WrongNumberOfBandsInBlockForInsertBlockAndExtractSubFrame) {
}
}
-TEST(BlockFramer,
+TEST(BlockFramerDeathTest,
WrongNumberOfChannelsInBlockForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
@@ -241,7 +242,7 @@ TEST(BlockFramer,
}
}
-TEST(BlockFramer,
+TEST(BlockFramerDeathTest,
WrongNumberOfBandsInSubFrameForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
@@ -255,7 +256,7 @@ TEST(BlockFramer,
}
}
-TEST(BlockFramer,
+TEST(BlockFramerDeathTest,
WrongNumberOfChannelsInSubFrameForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
@@ -269,7 +270,8 @@ TEST(BlockFramer,
}
}
-TEST(BlockFramer, WrongNumberOfSamplesInBlockForInsertBlockAndExtractSubFrame) {
+TEST(BlockFramerDeathTest,
+ WrongNumberOfSamplesInBlockForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -282,7 +284,7 @@ TEST(BlockFramer, WrongNumberOfSamplesInBlockForInsertBlockAndExtractSubFrame) {
}
}
-TEST(BlockFramer,
+TEST(BlockFramerDeathTest,
WrongNumberOfSamplesInSubFrameForInsertBlockAndExtractSubFrame) {
const size_t correct_num_channels = 1;
for (auto rate : {16000, 32000, 48000}) {
@@ -295,7 +297,7 @@ TEST(BlockFramer,
}
}
-TEST(BlockFramer, WrongNumberOfBandsInBlockForInsertBlock) {
+TEST(BlockFramerDeathTest, WrongNumberOfBandsInBlockForInsertBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -308,7 +310,7 @@ TEST(BlockFramer, WrongNumberOfBandsInBlockForInsertBlock) {
}
}
-TEST(BlockFramer, WrongNumberOfChannelsInBlockForInsertBlock) {
+TEST(BlockFramerDeathTest, WrongNumberOfChannelsInBlockForInsertBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -321,7 +323,7 @@ TEST(BlockFramer, WrongNumberOfChannelsInBlockForInsertBlock) {
}
}
-TEST(BlockFramer, WrongNumberOfSamplesInBlockForInsertBlock) {
+TEST(BlockFramerDeathTest, WrongNumberOfSamplesInBlockForInsertBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -333,7 +335,7 @@ TEST(BlockFramer, WrongNumberOfSamplesInBlockForInsertBlock) {
}
}
-TEST(BlockFramer, WrongNumberOfPreceedingApiCallsForInsertBlock) {
+TEST(BlockFramerDeathTest, WrongNumberOfPreceedingApiCallsForInsertBlock) {
for (size_t num_channels : {1, 2, 8}) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_calls = 0; num_calls < 4; ++num_calls) {
@@ -351,17 +353,17 @@ TEST(BlockFramer, WrongNumberOfPreceedingApiCallsForInsertBlock) {
}
// Verifies that the verification for 0 number of channels works.
-TEST(BlockFramer, ZeroNumberOfChannelsParameter) {
+TEST(BlockFramerDeathTest, ZeroNumberOfChannelsParameter) {
EXPECT_DEATH(BlockFramer(16000, 0), "");
}
// Verifies that the verification for 0 number of bands works.
-TEST(BlockFramer, ZeroNumberOfBandsParameter) {
+TEST(BlockFramerDeathTest, ZeroNumberOfBandsParameter) {
EXPECT_DEATH(BlockFramer(0, 1), "");
}
// Verifies that the verification for null sub_frame pointer works.
-TEST(BlockFramer, NullSubFrameParameter) {
+TEST(BlockFramerDeathTest, NullSubFrameParameter) {
EXPECT_DEATH(BlockFramer(1, 1).InsertBlockAndExtractSubFrame(
std::vector<std::vector<std::vector<float>>>(
1, std::vector<std::vector<float>>(
diff --git a/modules/audio_processing/aec3/block_processor_unittest.cc b/modules/audio_processing/aec3/block_processor_unittest.cc
index 2b928e877b..911dad4c81 100644
--- a/modules/audio_processing/aec3/block_processor_unittest.cc
+++ b/modules/audio_processing/aec3/block_processor_unittest.cc
@@ -252,21 +252,21 @@ TEST(BlockProcessor, TestLongerCall) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// TODO(gustaf): Re-enable the test once the issue with memory leaks during
// DEATH tests on test bots has been fixed.
-TEST(BlockProcessor, DISABLED_VerifyRenderBlockSizeCheck) {
+TEST(BlockProcessorDeathTest, DISABLED_VerifyRenderBlockSizeCheck) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
RunRenderBlockSizeVerificationTest(rate);
}
}
-TEST(BlockProcessor, VerifyCaptureBlockSizeCheck) {
+TEST(BlockProcessorDeathTest, VerifyCaptureBlockSizeCheck) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
RunCaptureBlockSizeVerificationTest(rate);
}
}
-TEST(BlockProcessor, VerifyRenderNumBandsCheck) {
+TEST(BlockProcessorDeathTest, VerifyRenderNumBandsCheck) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
RunRenderNumBandsVerificationTest(rate);
@@ -275,7 +275,7 @@ TEST(BlockProcessor, VerifyRenderNumBandsCheck) {
// TODO(peah): Verify the check for correct number of bands in the capture
// signal.
-TEST(BlockProcessor, VerifyCaptureNumBandsCheck) {
+TEST(BlockProcessorDeathTest, VerifyCaptureNumBandsCheck) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
RunCaptureNumBandsVerificationTest(rate);
@@ -283,7 +283,7 @@ TEST(BlockProcessor, VerifyCaptureNumBandsCheck) {
}
// Verifiers that the verification for null ProcessCapture input works.
-TEST(BlockProcessor, NullProcessCaptureParameter) {
+TEST(BlockProcessorDeathTest, NullProcessCaptureParameter) {
EXPECT_DEATH(std::unique_ptr<BlockProcessor>(
BlockProcessor::Create(EchoCanceller3Config(), 16000, 1, 1))
->ProcessCapture(false, false, nullptr, nullptr),
diff --git a/modules/audio_processing/aec3/clockdrift_detector.h b/modules/audio_processing/aec3/clockdrift_detector.h
index 22528c9489..2ba90bb889 100644
--- a/modules/audio_processing/aec3/clockdrift_detector.h
+++ b/modules/audio_processing/aec3/clockdrift_detector.h
@@ -11,6 +11,8 @@
#ifndef MODULES_AUDIO_PROCESSING_AEC3_CLOCKDRIFT_DETECTOR_H_
#define MODULES_AUDIO_PROCESSING_AEC3_CLOCKDRIFT_DETECTOR_H_
+#include <stddef.h>
+
#include <array>
namespace webrtc {
diff --git a/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc b/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc
index 4185c1adb8..92775cf702 100644
--- a/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc
+++ b/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc
@@ -138,7 +138,7 @@ std::string ProduceDebugText(size_t delay, int filter_length_blocks) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null output gain parameter works.
-TEST(CoarseFilterUpdateGain, NullDataOutputGain) {
+TEST(CoarseFilterUpdateGainDeathTest, NullDataOutputGain) {
ApmDataDumper data_dumper(42);
FftBuffer fft_buffer(1, 1);
RenderSignalAnalyzer analyzer(EchoCanceller3Config{});
diff --git a/modules/audio_processing/aec3/decimator_unittest.cc b/modules/audio_processing/aec3/decimator_unittest.cc
index 1e279cea3e..e6f5ea0403 100644
--- a/modules/audio_processing/aec3/decimator_unittest.cc
+++ b/modules/audio_processing/aec3/decimator_unittest.cc
@@ -103,7 +103,7 @@ TEST(Decimator, NoLeakageFromUpperFrequencies) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for the input size.
-TEST(Decimator, WrongInputSize) {
+TEST(DecimatorDeathTest, WrongInputSize) {
Decimator decimator(4);
std::vector<float> x(kBlockSize - 1, 0.f);
std::array<float, kBlockSize / 4> x_downsampled;
@@ -111,14 +111,14 @@ TEST(Decimator, WrongInputSize) {
}
// Verifies the check for non-null output parameter.
-TEST(Decimator, NullOutput) {
+TEST(DecimatorDeathTest, NullOutput) {
Decimator decimator(4);
std::vector<float> x(kBlockSize, 0.f);
EXPECT_DEATH(decimator.Decimate(x, nullptr), "");
}
// Verifies the check for the output size.
-TEST(Decimator, WrongOutputSize) {
+TEST(DecimatorDeathTest, WrongOutputSize) {
Decimator decimator(4);
std::vector<float> x(kBlockSize, 0.f);
std::array<float, kBlockSize / 4 - 1> x_downsampled;
@@ -126,7 +126,7 @@ TEST(Decimator, WrongOutputSize) {
}
// Verifies the check for the correct downsampling factor.
-TEST(Decimator, CorrectDownSamplingFactor) {
+TEST(DecimatorDeathTest, CorrectDownSamplingFactor) {
EXPECT_DEATH(Decimator(3), "");
}
diff --git a/modules/audio_processing/aec3/echo_canceller3_unittest.cc b/modules/audio_processing/aec3/echo_canceller3_unittest.cc
index 21255f192e..04d93e4db4 100644
--- a/modules/audio_processing/aec3/echo_canceller3_unittest.cc
+++ b/modules/audio_processing/aec3/echo_canceller3_unittest.cc
@@ -890,7 +890,7 @@ TEST(EchoCanceller3FieldTrials, Aec3SuppressorTuningOverrideOneParam) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(EchoCanceller3InputCheck, WrongCaptureNumBandsCheckVerification) {
+TEST(EchoCanceller3InputCheckDeathTest, WrongCaptureNumBandsCheckVerification) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
EchoCanceller3Tester(rate).RunProcessCaptureNumBandsCheckVerification();
@@ -899,7 +899,7 @@ TEST(EchoCanceller3InputCheck, WrongCaptureNumBandsCheckVerification) {
// Verifiers that the verification for null input to the capture processing api
// call works.
-TEST(EchoCanceller3InputCheck, NullCaptureProcessingParameter) {
+TEST(EchoCanceller3InputCheckDeathTest, NullCaptureProcessingParameter) {
EXPECT_DEATH(EchoCanceller3(EchoCanceller3Config(), 16000, 1, 1)
.ProcessCapture(nullptr, false),
"");
@@ -908,7 +908,7 @@ TEST(EchoCanceller3InputCheck, NullCaptureProcessingParameter) {
// Verifies the check for correct sample rate.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(EchoCanceller3InputCheck, DISABLED_WrongSampleRate) {
+TEST(EchoCanceller3InputCheckDeathTest, DISABLED_WrongSampleRate) {
ApmDataDumper data_dumper(0);
EXPECT_DEATH(EchoCanceller3(EchoCanceller3Config(), 8001, 1, 1), "");
}
diff --git a/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc b/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc
index 8003a11bbc..6ba4cdd0d7 100644
--- a/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc
+++ b/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc
@@ -165,7 +165,7 @@ TEST(EchoPathDelayEstimator, NoDelayEstimatesForLowLevelRenderSignals) {
// Verifies the check for the render blocksize.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(EchoPathDelayEstimator, DISABLED_WrongRenderBlockSize) {
+TEST(EchoPathDelayEstimatorDeathTest, DISABLED_WrongRenderBlockSize) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EchoPathDelayEstimator estimator(&data_dumper, config, 1);
@@ -180,7 +180,7 @@ TEST(EchoPathDelayEstimator, DISABLED_WrongRenderBlockSize) {
// Verifies the check for the capture blocksize.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(EchoPathDelayEstimator, WrongCaptureBlockSize) {
+TEST(EchoPathDelayEstimatorDeathTest, WrongCaptureBlockSize) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EchoPathDelayEstimator estimator(&data_dumper, config, 1);
@@ -194,7 +194,7 @@ TEST(EchoPathDelayEstimator, WrongCaptureBlockSize) {
}
// Verifies the check for non-null data dumper.
-TEST(EchoPathDelayEstimator, NullDataDumper) {
+TEST(EchoPathDelayEstimatorDeathTest, NullDataDumper) {
EXPECT_DEATH(EchoPathDelayEstimator(nullptr, EchoCanceller3Config(), 1), "");
}
diff --git a/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc b/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc
index 30c6611869..45b30a9c74 100644
--- a/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc
+++ b/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc
@@ -23,7 +23,7 @@ namespace webrtc {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for non-null input.
-TEST(UpdateDbMetric, NullValue) {
+TEST(UpdateDbMetricDeathTest, NullValue) {
std::array<float, kFftLengthBy2Plus1> value;
value.fill(0.f);
EXPECT_DEATH(aec3::UpdateDbMetric(value, nullptr), "");
diff --git a/modules/audio_processing/aec3/echo_remover_unittest.cc b/modules/audio_processing/aec3/echo_remover_unittest.cc
index e050027c63..77a207659c 100644
--- a/modules/audio_processing/aec3/echo_remover_unittest.cc
+++ b/modules/audio_processing/aec3/echo_remover_unittest.cc
@@ -91,14 +91,14 @@ TEST_P(EchoRemoverMultiChannel, BasicApiCalls) {
// Verifies the check for the samplerate.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(EchoRemover, DISABLED_WrongSampleRate) {
+TEST(EchoRemoverDeathTest, DISABLED_WrongSampleRate) {
EXPECT_DEATH(std::unique_ptr<EchoRemover>(
EchoRemover::Create(EchoCanceller3Config(), 8001, 1, 1)),
"");
}
// Verifies the check for the capture block size.
-TEST(EchoRemover, WrongCaptureBlockSize) {
+TEST(EchoRemoverDeathTest, WrongCaptureBlockSize) {
absl::optional<DelayEstimate> delay_estimate;
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
@@ -121,7 +121,7 @@ TEST(EchoRemover, WrongCaptureBlockSize) {
// Verifies the check for the number of capture bands.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.c
-TEST(EchoRemover, DISABLED_WrongCaptureNumBands) {
+TEST(EchoRemoverDeathTest, DISABLED_WrongCaptureNumBands) {
absl::optional<DelayEstimate> delay_estimate;
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
@@ -143,7 +143,7 @@ TEST(EchoRemover, DISABLED_WrongCaptureNumBands) {
}
// Verifies the check for non-null capture block.
-TEST(EchoRemover, NullCapture) {
+TEST(EchoRemoverDeathTest, NullCapture) {
absl::optional<DelayEstimate> delay_estimate;
std::unique_ptr<EchoRemover> remover(
EchoRemover::Create(EchoCanceller3Config(), 16000, 1, 1));
diff --git a/modules/audio_processing/aec3/fft_data_unittest.cc b/modules/audio_processing/aec3/fft_data_unittest.cc
index 0812fd6420..9be2680453 100644
--- a/modules/audio_processing/aec3/fft_data_unittest.cc
+++ b/modules/audio_processing/aec3/fft_data_unittest.cc
@@ -44,12 +44,12 @@ TEST(FftData, TestOptimizations) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for null output in CopyToPackedArray.
-TEST(FftData, NonNullCopyToPackedArrayOutput) {
+TEST(FftDataDeathTest, NonNullCopyToPackedArrayOutput) {
EXPECT_DEATH(FftData().CopyToPackedArray(nullptr), "");
}
// Verifies the check for null output in Spectrum.
-TEST(FftData, NonNullSpectrumOutput) {
+TEST(FftDataDeathTest, NonNullSpectrumOutput) {
EXPECT_DEATH(FftData().Spectrum(Aec3Optimization::kNone, nullptr), "");
}
diff --git a/modules/audio_processing/aec3/frame_blocker_unittest.cc b/modules/audio_processing/aec3/frame_blocker_unittest.cc
index e907608d95..216f515037 100644
--- a/modules/audio_processing/aec3/frame_blocker_unittest.cc
+++ b/modules/audio_processing/aec3/frame_blocker_unittest.cc
@@ -287,7 +287,8 @@ std::string ProduceDebugText(int sample_rate_hz, size_t num_channels) {
} // namespace
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(FrameBlocker, WrongNumberOfBandsInBlockForInsertSubFrameAndExtractBlock) {
+TEST(FrameBlockerDeathTest,
+ WrongNumberOfBandsInBlockForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -300,7 +301,7 @@ TEST(FrameBlocker, WrongNumberOfBandsInBlockForInsertSubFrameAndExtractBlock) {
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfChannelsInBlockForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -314,7 +315,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfBandsInSubFrameForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -328,7 +329,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfChannelsInSubFrameForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -342,7 +343,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfSamplesInBlockForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -356,7 +357,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfSamplesInSubFrameForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -370,7 +371,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker, WrongNumberOfBandsInBlockForExtractBlock) {
+TEST(FrameBlockerDeathTest, WrongNumberOfBandsInBlockForExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -383,7 +384,7 @@ TEST(FrameBlocker, WrongNumberOfBandsInBlockForExtractBlock) {
}
}
-TEST(FrameBlocker, WrongNumberOfChannelsInBlockForExtractBlock) {
+TEST(FrameBlockerDeathTest, WrongNumberOfChannelsInBlockForExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -396,7 +397,7 @@ TEST(FrameBlocker, WrongNumberOfChannelsInBlockForExtractBlock) {
}
}
-TEST(FrameBlocker, WrongNumberOfSamplesInBlockForExtractBlock) {
+TEST(FrameBlockerDeathTest, WrongNumberOfSamplesInBlockForExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -408,7 +409,7 @@ TEST(FrameBlocker, WrongNumberOfSamplesInBlockForExtractBlock) {
}
}
-TEST(FrameBlocker, WrongNumberOfPreceedingApiCallsForExtractBlock) {
+TEST(FrameBlockerDeathTest, WrongNumberOfPreceedingApiCallsForExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_channels : {1, 2, 4, 8}) {
for (size_t num_calls = 0; num_calls < 4; ++num_calls) {
@@ -426,17 +427,17 @@ TEST(FrameBlocker, WrongNumberOfPreceedingApiCallsForExtractBlock) {
}
// Verifies that the verification for 0 number of channels works.
-TEST(FrameBlocker, ZeroNumberOfChannelsParameter) {
+TEST(FrameBlockerDeathTest, ZeroNumberOfChannelsParameter) {
EXPECT_DEATH(FrameBlocker(16000, 0), "");
}
// Verifies that the verification for 0 number of bands works.
-TEST(FrameBlocker, ZeroNumberOfBandsParameter) {
+TEST(FrameBlockerDeathTest, ZeroNumberOfBandsParameter) {
EXPECT_DEATH(FrameBlocker(0, 1), "");
}
// Verifiers that the verification for null sub_frame pointer works.
-TEST(FrameBlocker, NullBlockParameter) {
+TEST(FrameBlockerDeathTest, NullBlockParameter) {
std::vector<std::vector<std::vector<float>>> sub_frame(
1, std::vector<std::vector<float>>(
1, std::vector<float>(kSubFrameLength, 0.f)));
diff --git a/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc b/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc
index e136c89877..8e2a12e6c5 100644
--- a/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc
+++ b/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc
@@ -144,7 +144,7 @@ TEST(MatchedFilterLagAggregator, DISABLED_PersistentAggregatedLag) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for non-null data dumper.
-TEST(MatchedFilterLagAggregator, NullDataDumper) {
+TEST(MatchedFilterLagAggregatorDeathTest, NullDataDumper) {
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilterLagAggregator(
nullptr, 10, config.delay.delay_selection_thresholds),
diff --git a/modules/audio_processing/aec3/matched_filter_unittest.cc b/modules/audio_processing/aec3/matched_filter_unittest.cc
index 8a6e22eeca..7d9a7d4d0a 100644
--- a/modules/audio_processing/aec3/matched_filter_unittest.cc
+++ b/modules/audio_processing/aec3/matched_filter_unittest.cc
@@ -375,7 +375,7 @@ TEST(MatchedFilter, NumberOfLagEstimates) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for non-zero windows size.
-TEST(MatchedFilter, ZeroWindowSize) {
+TEST(MatchedFilterDeathTest, ZeroWindowSize) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(&data_dumper, DetectOptimization(), 16, 0, 1, 1,
@@ -385,7 +385,7 @@ TEST(MatchedFilter, ZeroWindowSize) {
}
// Verifies the check for non-null data dumper.
-TEST(MatchedFilter, NullDataDumper) {
+TEST(MatchedFilterDeathTest, NullDataDumper) {
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(nullptr, DetectOptimization(), 16, 1, 1, 1, 150,
config.delay.delay_estimate_smoothing,
@@ -395,7 +395,7 @@ TEST(MatchedFilter, NullDataDumper) {
// Verifies the check for that the sub block size is a multiple of 4.
// TODO(peah): Activate the unittest once the required code has been landed.
-TEST(MatchedFilter, DISABLED_BlockSizeMultipleOf4) {
+TEST(MatchedFilterDeathTest, DISABLED_BlockSizeMultipleOf4) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(&data_dumper, DetectOptimization(), 15, 1, 1, 1,
@@ -407,7 +407,7 @@ TEST(MatchedFilter, DISABLED_BlockSizeMultipleOf4) {
// Verifies the check for that there is an integer number of sub blocks that add
// up to a block size.
// TODO(peah): Activate the unittest once the required code has been landed.
-TEST(MatchedFilter, DISABLED_SubBlockSizeAddsUpToBlockSize) {
+TEST(MatchedFilterDeathTest, DISABLED_SubBlockSizeAddsUpToBlockSize) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(&data_dumper, DetectOptimization(), 12, 1, 1, 1,
diff --git a/modules/audio_processing/aec3/mock/mock_block_processor.h b/modules/audio_processing/aec3/mock/mock_block_processor.h
index e9a95c837d..e1eb26702f 100644
--- a/modules/audio_processing/aec3/mock/mock_block_processor.h
+++ b/modules/audio_processing/aec3/mock/mock_block_processor.h
@@ -24,17 +24,26 @@ class MockBlockProcessor : public BlockProcessor {
MockBlockProcessor();
virtual ~MockBlockProcessor();
- MOCK_METHOD4(
- ProcessCapture,
- void(bool level_change,
- bool saturated_microphone_signal,
- std::vector<std::vector<std::vector<float>>>* linear_output,
- std::vector<std::vector<std::vector<float>>>* capture_block));
- MOCK_METHOD1(BufferRender,
- void(const std::vector<std::vector<std::vector<float>>>& block));
- MOCK_METHOD1(UpdateEchoLeakageStatus, void(bool leakage_detected));
- MOCK_CONST_METHOD1(GetMetrics, void(EchoControl::Metrics* metrics));
- MOCK_METHOD1(SetAudioBufferDelay, void(int delay_ms));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (bool level_change,
+ bool saturated_microphone_signal,
+ std::vector<std::vector<std::vector<float>>>* linear_output,
+ std::vector<std::vector<std::vector<float>>>* capture_block),
+ (override));
+ MOCK_METHOD(void,
+ BufferRender,
+ (const std::vector<std::vector<std::vector<float>>>& block),
+ (override));
+ MOCK_METHOD(void,
+ UpdateEchoLeakageStatus,
+ (bool leakage_detected),
+ (override));
+ MOCK_METHOD(void,
+ GetMetrics,
+ (EchoControl::Metrics * metrics),
+ (const, override));
+ MOCK_METHOD(void, SetAudioBufferDelay, (int delay_ms), (override));
};
} // namespace test
diff --git a/modules/audio_processing/aec3/mock/mock_echo_remover.h b/modules/audio_processing/aec3/mock/mock_echo_remover.h
index 6c580f3a91..8a3044bcf1 100644
--- a/modules/audio_processing/aec3/mock/mock_echo_remover.h
+++ b/modules/audio_processing/aec3/mock/mock_echo_remover.h
@@ -27,16 +27,23 @@ class MockEchoRemover : public EchoRemover {
MockEchoRemover();
virtual ~MockEchoRemover();
- MOCK_METHOD6(ProcessCapture,
- void(EchoPathVariability echo_path_variability,
- bool capture_signal_saturation,
- const absl::optional<DelayEstimate>& delay_estimate,
- RenderBuffer* render_buffer,
- std::vector<std::vector<std::vector<float>>>* linear_output,
- std::vector<std::vector<std::vector<float>>>* capture));
- MOCK_CONST_METHOD0(Delay, absl::optional<int>());
- MOCK_METHOD1(UpdateEchoLeakageStatus, void(bool leakage_detected));
- MOCK_CONST_METHOD1(GetMetrics, void(EchoControl::Metrics* metrics));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (EchoPathVariability echo_path_variability,
+ bool capture_signal_saturation,
+ const absl::optional<DelayEstimate>& delay_estimate,
+ RenderBuffer* render_buffer,
+ std::vector<std::vector<std::vector<float>>>* linear_output,
+ std::vector<std::vector<std::vector<float>>>* capture),
+ (override));
+ MOCK_METHOD(void,
+ UpdateEchoLeakageStatus,
+ (bool leakage_detected),
+ (override));
+ MOCK_METHOD(void,
+ GetMetrics,
+ (EchoControl::Metrics * metrics),
+ (const, override));
};
} // namespace test
diff --git a/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h b/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h
index f83c670632..26f58cfe1e 100644
--- a/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h
+++ b/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h
@@ -27,21 +27,26 @@ class MockRenderDelayBuffer : public RenderDelayBuffer {
MockRenderDelayBuffer(int sample_rate_hz, size_t num_channels);
virtual ~MockRenderDelayBuffer();
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD1(Insert,
- RenderDelayBuffer::BufferingEvent(
- const std::vector<std::vector<std::vector<float>>>& block));
- MOCK_METHOD0(PrepareCaptureProcessing, RenderDelayBuffer::BufferingEvent());
- MOCK_METHOD1(AlignFromDelay, bool(size_t delay));
- MOCK_METHOD0(AlignFromExternalDelay, void());
- MOCK_CONST_METHOD0(Delay, size_t());
- MOCK_CONST_METHOD0(MaxDelay, size_t());
- MOCK_METHOD0(GetRenderBuffer, RenderBuffer*());
- MOCK_CONST_METHOD0(GetDownsampledRenderBuffer,
- const DownsampledRenderBuffer&());
- MOCK_CONST_METHOD1(CausalDelay, bool(size_t delay));
- MOCK_METHOD1(SetAudioBufferDelay, void(int delay_ms));
- MOCK_METHOD0(HasReceivedBufferDelay, bool());
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(RenderDelayBuffer::BufferingEvent,
+ Insert,
+ (const std::vector<std::vector<std::vector<float>>>& block),
+ (override));
+ MOCK_METHOD(RenderDelayBuffer::BufferingEvent,
+ PrepareCaptureProcessing,
+ (),
+ (override));
+ MOCK_METHOD(bool, AlignFromDelay, (size_t delay), (override));
+ MOCK_METHOD(void, AlignFromExternalDelay, (), (override));
+ MOCK_METHOD(size_t, Delay, (), (const, override));
+ MOCK_METHOD(size_t, MaxDelay, (), (const, override));
+ MOCK_METHOD(RenderBuffer*, GetRenderBuffer, (), (override));
+ MOCK_METHOD(const DownsampledRenderBuffer&,
+ GetDownsampledRenderBuffer,
+ (),
+ (const, override));
+ MOCK_METHOD(void, SetAudioBufferDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, HasReceivedBufferDelay, (), (override));
private:
RenderBuffer* FakeGetRenderBuffer() { return &render_buffer_; }
diff --git a/modules/audio_processing/aec3/mock/mock_render_delay_controller.h b/modules/audio_processing/aec3/mock/mock_render_delay_controller.h
index e72333eaeb..67d8baefe6 100644
--- a/modules/audio_processing/aec3/mock/mock_render_delay_controller.h
+++ b/modules/audio_processing/aec3/mock/mock_render_delay_controller.h
@@ -25,14 +25,15 @@ class MockRenderDelayController : public RenderDelayController {
MockRenderDelayController();
virtual ~MockRenderDelayController();
- MOCK_METHOD1(Reset, void(bool reset_delay_statistics));
- MOCK_METHOD0(LogRenderCall, void());
- MOCK_METHOD3(GetDelay,
- absl::optional<DelayEstimate>(
- const DownsampledRenderBuffer& render_buffer,
- size_t render_delay_buffer_delay,
- const std::vector<std::vector<float>>& capture));
- MOCK_CONST_METHOD0(HasClockdrift, bool());
+ MOCK_METHOD(void, Reset, (bool reset_delay_statistics), (override));
+ MOCK_METHOD(void, LogRenderCall, (), (override));
+ MOCK_METHOD(absl::optional<DelayEstimate>,
+ GetDelay,
+ (const DownsampledRenderBuffer& render_buffer,
+ size_t render_delay_buffer_delay,
+ const std::vector<std::vector<float>>& capture),
+ (override));
+ MOCK_METHOD(bool, HasClockdrift, (), (const, override));
};
} // namespace test
diff --git a/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc b/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc
index 117f34508e..2393fddd6f 100644
--- a/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc
+++ b/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc
@@ -234,7 +234,7 @@ std::string ProduceDebugText(size_t delay, int filter_length_blocks) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null output gain parameter works.
-TEST(RefinedFilterUpdateGain, NullDataOutputGain) {
+TEST(RefinedFilterUpdateGainDeathTest, NullDataOutputGain) {
ApmDataDumper data_dumper(42);
EchoCanceller3Config config;
RenderSignalAnalyzer analyzer(config);
diff --git a/modules/audio_processing/aec3/render_buffer_unittest.cc b/modules/audio_processing/aec3/render_buffer_unittest.cc
index 6981f6d510..4559528600 100644
--- a/modules/audio_processing/aec3/render_buffer_unittest.cc
+++ b/modules/audio_processing/aec3/render_buffer_unittest.cc
@@ -21,21 +21,21 @@ namespace webrtc {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for non-null fft buffer.
-TEST(RenderBuffer, NullExternalFftBuffer) {
+TEST(RenderBufferDeathTest, NullExternalFftBuffer) {
BlockBuffer block_buffer(10, 3, 1, kBlockSize);
SpectrumBuffer spectrum_buffer(10, 1);
EXPECT_DEATH(RenderBuffer(&block_buffer, &spectrum_buffer, nullptr), "");
}
// Verifies the check for non-null spectrum buffer.
-TEST(RenderBuffer, NullExternalSpectrumBuffer) {
+TEST(RenderBufferDeathTest, NullExternalSpectrumBuffer) {
FftBuffer fft_buffer(10, 1);
BlockBuffer block_buffer(10, 3, 1, kBlockSize);
EXPECT_DEATH(RenderBuffer(&block_buffer, nullptr, &fft_buffer), "");
}
// Verifies the check for non-null block buffer.
-TEST(RenderBuffer, NullExternalBlockBuffer) {
+TEST(RenderBufferDeathTest, NullExternalBlockBuffer) {
FftBuffer fft_buffer(10, 1);
SpectrumBuffer spectrum_buffer(10, 1);
EXPECT_DEATH(RenderBuffer(nullptr, &spectrum_buffer, &fft_buffer), "");
diff --git a/modules/audio_processing/aec3/render_delay_buffer_unittest.cc b/modules/audio_processing/aec3/render_delay_buffer_unittest.cc
index 35e81319cf..efd4a29920 100644
--- a/modules/audio_processing/aec3/render_delay_buffer_unittest.cc
+++ b/modules/audio_processing/aec3/render_delay_buffer_unittest.cc
@@ -97,14 +97,14 @@ TEST(RenderDelayBuffer, AlignFromDelay) {
// Verifies the check for feasible delay.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(RenderDelayBuffer, DISABLED_WrongDelay) {
+TEST(RenderDelayBufferDeathTest, DISABLED_WrongDelay) {
std::unique_ptr<RenderDelayBuffer> delay_buffer(
RenderDelayBuffer::Create(EchoCanceller3Config(), 48000, 1));
EXPECT_DEATH(delay_buffer->AlignFromDelay(21), "");
}
// Verifies the check for the number of bands in the inserted blocks.
-TEST(RenderDelayBuffer, WrongNumberOfBands) {
+TEST(RenderDelayBufferDeathTest, WrongNumberOfBands) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate));
@@ -120,7 +120,7 @@ TEST(RenderDelayBuffer, WrongNumberOfBands) {
}
// Verifies the check for the number of channels in the inserted blocks.
-TEST(RenderDelayBuffer, WrongNumberOfChannels) {
+TEST(RenderDelayBufferDeathTest, WrongNumberOfChannels) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate));
@@ -136,7 +136,7 @@ TEST(RenderDelayBuffer, WrongNumberOfChannels) {
}
// Verifies the check of the length of the inserted blocks.
-TEST(RenderDelayBuffer, WrongBlockLength) {
+TEST(RenderDelayBufferDeathTest, WrongBlockLength) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate));
diff --git a/modules/audio_processing/aec3/render_delay_controller_unittest.cc b/modules/audio_processing/aec3/render_delay_controller_unittest.cc
index fb7b86a75d..0d3c856466 100644
--- a/modules/audio_processing/aec3/render_delay_controller_unittest.cc
+++ b/modules/audio_processing/aec3/render_delay_controller_unittest.cc
@@ -325,7 +325,7 @@ TEST(RenderDelayController, DISABLED_AlignmentWithJitter) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for the capture signal block size.
-TEST(RenderDelayController, WrongCaptureSize) {
+TEST(RenderDelayControllerDeathTest, WrongCaptureSize) {
std::vector<std::vector<float>> block(
1, std::vector<float>(kBlockSize - 1, 0.f));
EchoCanceller3Config config;
@@ -345,7 +345,7 @@ TEST(RenderDelayController, WrongCaptureSize) {
// Verifies the check for correct sample rate.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(RenderDelayController, DISABLED_WrongSampleRate) {
+TEST(RenderDelayControllerDeathTest, DISABLED_WrongSampleRate) {
for (auto rate : {-1, 0, 8001, 16001}) {
SCOPED_TRACE(ProduceDebugText(rate));
EchoCanceller3Config config;
diff --git a/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc b/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc
index f40fade830..7a48cc4b69 100644
--- a/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc
+++ b/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc
@@ -117,7 +117,7 @@ std::string ProduceDebugText(size_t num_channels) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null output parameter works.
-TEST(RenderSignalAnalyzer, NullMaskOutput) {
+TEST(RenderSignalAnalyzerDeathTest, NullMaskOutput) {
RenderSignalAnalyzer analyzer(EchoCanceller3Config{});
EXPECT_DEATH(analyzer.MaskRegionsAroundNarrowBands(nullptr), "");
}
diff --git a/modules/audio_processing/aec3/subtractor_unittest.cc b/modules/audio_processing/aec3/subtractor_unittest.cc
index 72e57879a0..bbc1e4ffc6 100644
--- a/modules/audio_processing/aec3/subtractor_unittest.cc
+++ b/modules/audio_processing/aec3/subtractor_unittest.cc
@@ -189,7 +189,7 @@ std::string ProduceDebugText(size_t num_render_channels,
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non data dumper works.
-TEST(Subtractor, NullDataDumper) {
+TEST(SubtractorDeathTest, NullDataDumper) {
EXPECT_DEATH(
Subtractor(EchoCanceller3Config(), 1, 1, nullptr, DetectOptimization()),
"");
diff --git a/modules/audio_processing/aec3/suppression_filter.h b/modules/audio_processing/aec3/suppression_filter.h
index 281c2c30c4..dcf2292c7f 100644
--- a/modules/audio_processing/aec3/suppression_filter.h
+++ b/modules/audio_processing/aec3/suppression_filter.h
@@ -14,7 +14,6 @@
#include <array>
#include <vector>
-#include "common_audio/third_party/ooura/fft_size_128/ooura_fft.h"
#include "modules/audio_processing/aec3/aec3_common.h"
#include "modules/audio_processing/aec3/aec3_fft.h"
#include "modules/audio_processing/aec3/fft_data.h"
@@ -39,7 +38,6 @@ class SuppressionFilter {
const Aec3Optimization optimization_;
const int sample_rate_hz_;
const size_t num_capture_channels_;
- const OouraFft ooura_fft_;
const Aec3Fft fft_;
std::vector<std::vector<std::array<float, kFftLengthBy2>>> e_output_old_;
RTC_DISALLOW_COPY_AND_ASSIGN(SuppressionFilter);
diff --git a/modules/audio_processing/aec3/suppression_filter_unittest.cc b/modules/audio_processing/aec3/suppression_filter_unittest.cc
index b55c719fa9..a160bec045 100644
--- a/modules/audio_processing/aec3/suppression_filter_unittest.cc
+++ b/modules/audio_processing/aec3/suppression_filter_unittest.cc
@@ -50,7 +50,7 @@ void ProduceSinusoid(int sample_rate_hz,
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for null suppressor output.
-TEST(SuppressionFilter, NullOutput) {
+TEST(SuppressionFilterDeathTest, NullOutput) {
std::vector<FftData> cn(1);
std::vector<FftData> cn_high_bands(1);
std::vector<FftData> E(1);
@@ -62,7 +62,7 @@ TEST(SuppressionFilter, NullOutput) {
}
// Verifies the check for allowed sample rate.
-TEST(SuppressionFilter, ProperSampleRate) {
+TEST(SuppressionFilterDeathTest, ProperSampleRate) {
EXPECT_DEATH(SuppressionFilter(Aec3Optimization::kNone, 16001, 1), "");
}
diff --git a/modules/audio_processing/aec3/suppression_gain_unittest.cc b/modules/audio_processing/aec3/suppression_gain_unittest.cc
index 0452f2e1fb..4fb4cd7142 100644
--- a/modules/audio_processing/aec3/suppression_gain_unittest.cc
+++ b/modules/audio_processing/aec3/suppression_gain_unittest.cc
@@ -25,7 +25,7 @@ namespace aec3 {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null output gains works.
-TEST(SuppressionGain, NullOutputGains) {
+TEST(SuppressionGainDeathTest, NullOutputGains) {
std::vector<std::array<float, kFftLengthBy2Plus1>> E2(1, {0.f});
std::vector<std::array<float, kFftLengthBy2Plus1>> R2(1, {0.f});
std::vector<std::array<float, kFftLengthBy2Plus1>> S2(1);
diff --git a/modules/audio_processing/aec_dump/mock_aec_dump.h b/modules/audio_processing/aec_dump/mock_aec_dump.h
index 65306a7b28..b396739de4 100644
--- a/modules/audio_processing/aec_dump/mock_aec_dump.h
+++ b/modules/audio_processing/aec_dump/mock_aec_dump.h
@@ -25,36 +25,54 @@ class MockAecDump : public AecDump {
MockAecDump();
virtual ~MockAecDump();
- MOCK_METHOD2(WriteInitMessage,
- void(const ProcessingConfig& api_format, int64_t time_now_ms));
+ MOCK_METHOD(void,
+ WriteInitMessage,
+ (const ProcessingConfig& api_format, int64_t time_now_ms),
+ (override));
- MOCK_METHOD1(AddCaptureStreamInput,
- void(const AudioFrameView<const float>& src));
- MOCK_METHOD1(AddCaptureStreamOutput,
- void(const AudioFrameView<const float>& src));
- MOCK_METHOD3(AddCaptureStreamInput,
- void(const int16_t* const data,
- int num_channels,
- int samples_per_channel));
- MOCK_METHOD3(AddCaptureStreamOutput,
- void(const int16_t* const data,
- int num_channels,
- int samples_per_channel));
- MOCK_METHOD1(AddAudioProcessingState,
- void(const AudioProcessingState& state));
- MOCK_METHOD0(WriteCaptureStreamMessage, void());
+ MOCK_METHOD(void,
+ AddCaptureStreamInput,
+ (const AudioFrameView<const float>& src),
+ (override));
+ MOCK_METHOD(void,
+ AddCaptureStreamOutput,
+ (const AudioFrameView<const float>& src),
+ (override));
+ MOCK_METHOD(void,
+ AddCaptureStreamInput,
+ (const int16_t* const data,
+ int num_channels,
+ int samples_per_channel),
+ (override));
+ MOCK_METHOD(void,
+ AddCaptureStreamOutput,
+ (const int16_t* const data,
+ int num_channels,
+ int samples_per_channel),
+ (override));
+ MOCK_METHOD(void,
+ AddAudioProcessingState,
+ (const AudioProcessingState& state),
+ (override));
+ MOCK_METHOD(void, WriteCaptureStreamMessage, (), (override));
- MOCK_METHOD3(WriteRenderStreamMessage,
- void(const int16_t* const data,
- int num_channels,
- int samples_per_channel));
- MOCK_METHOD1(WriteRenderStreamMessage,
- void(const AudioFrameView<const float>& src));
+ MOCK_METHOD(void,
+ WriteRenderStreamMessage,
+ (const int16_t* const data,
+ int num_channels,
+ int samples_per_channel),
+ (override));
+ MOCK_METHOD(void,
+ WriteRenderStreamMessage,
+ (const AudioFrameView<const float>& src),
+ (override));
- MOCK_METHOD1(WriteConfig, void(const InternalAPMConfig& config));
+ MOCK_METHOD(void, WriteConfig, (const InternalAPMConfig& config), (override));
- MOCK_METHOD1(WriteRuntimeSetting,
- void(const AudioProcessing::RuntimeSetting& config));
+ MOCK_METHOD(void,
+ WriteRuntimeSetting,
+ (const AudioProcessing::RuntimeSetting& config),
+ (override));
};
} // namespace test
diff --git a/modules/audio_processing/agc/BUILD.gn b/modules/audio_processing/agc/BUILD.gn
index 42830c918d..9ed6399cbf 100644
--- a/modules/audio_processing/agc/BUILD.gn
+++ b/modules/audio_processing/agc/BUILD.gn
@@ -36,8 +36,8 @@ rtc_library("agc") {
"../../../system_wrappers:metrics",
"../agc2:level_estimation_agc",
"../vad",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("level_estimation") {
diff --git a/modules/audio_processing/agc/agc_manager_direct_unittest.cc b/modules/audio_processing/agc/agc_manager_direct_unittest.cc
index c5e65adec1..995801a8cb 100644
--- a/modules/audio_processing/agc/agc_manager_direct_unittest.cc
+++ b/modules/audio_processing/agc/agc_manager_direct_unittest.cc
@@ -37,22 +37,23 @@ const int kMinMicLevel = 12;
class MockGainControl : public GainControl {
public:
virtual ~MockGainControl() {}
- MOCK_METHOD0(Initialize, void());
- MOCK_CONST_METHOD0(is_enabled, bool());
- MOCK_METHOD1(set_stream_analog_level, int(int level));
- MOCK_CONST_METHOD0(stream_analog_level, int());
- MOCK_METHOD1(set_mode, int(Mode mode));
- MOCK_CONST_METHOD0(mode, Mode());
- MOCK_METHOD1(set_target_level_dbfs, int(int level));
- MOCK_CONST_METHOD0(target_level_dbfs, int());
- MOCK_METHOD1(set_compression_gain_db, int(int gain));
- MOCK_CONST_METHOD0(compression_gain_db, int());
- MOCK_METHOD1(enable_limiter, int(bool enable));
- MOCK_CONST_METHOD0(is_limiter_enabled, bool());
- MOCK_METHOD2(set_analog_level_limits, int(int minimum, int maximum));
- MOCK_CONST_METHOD0(analog_level_minimum, int());
- MOCK_CONST_METHOD0(analog_level_maximum, int());
- MOCK_CONST_METHOD0(stream_is_saturated, bool());
+ MOCK_METHOD(int, set_stream_analog_level, (int level), (override));
+ MOCK_METHOD(int, stream_analog_level, (), (const, override));
+ MOCK_METHOD(int, set_mode, (Mode mode), (override));
+ MOCK_METHOD(Mode, mode, (), (const, override));
+ MOCK_METHOD(int, set_target_level_dbfs, (int level), (override));
+ MOCK_METHOD(int, target_level_dbfs, (), (const, override));
+ MOCK_METHOD(int, set_compression_gain_db, (int gain), (override));
+ MOCK_METHOD(int, compression_gain_db, (), (const, override));
+ MOCK_METHOD(int, enable_limiter, (bool enable), (override));
+ MOCK_METHOD(bool, is_limiter_enabled, (), (const, override));
+ MOCK_METHOD(int,
+ set_analog_level_limits,
+ (int minimum, int maximum),
+ (override));
+ MOCK_METHOD(int, analog_level_minimum, (), (const, override));
+ MOCK_METHOD(int, analog_level_maximum, (), (const, override));
+ MOCK_METHOD(bool, stream_is_saturated, (), (const, override));
};
} // namespace
diff --git a/modules/audio_processing/agc/mock_agc.h b/modules/audio_processing/agc/mock_agc.h
index 6542acc8d5..0ef41c6e52 100644
--- a/modules/audio_processing/agc/mock_agc.h
+++ b/modules/audio_processing/agc/mock_agc.h
@@ -19,14 +19,14 @@ namespace webrtc {
class MockAgc : public Agc {
public:
virtual ~MockAgc() {}
- MOCK_METHOD3(Process,
- void(const int16_t* audio, size_t length, int sample_rate_hz));
- MOCK_METHOD1(GetRmsErrorDb, bool(int* error));
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD1(set_target_level_dbfs, int(int level));
- MOCK_CONST_METHOD0(target_level_dbfs, int());
- MOCK_METHOD1(EnableStandaloneVad, void(bool enable));
- MOCK_CONST_METHOD0(standalone_vad_enabled, bool());
+ MOCK_METHOD(void,
+ Process,
+ (const int16_t* audio, size_t length, int sample_rate_hz),
+ (override));
+ MOCK_METHOD(bool, GetRmsErrorDb, (int* error), (override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int, set_target_level_dbfs, (int level), (override));
+ MOCK_METHOD(int, target_level_dbfs, (), (const, override));
};
} // namespace webrtc
diff --git a/modules/audio_processing/agc2/BUILD.gn b/modules/audio_processing/agc2/BUILD.gn
index 8d9bb14731..bfef2252c3 100644
--- a/modules/audio_processing/agc2/BUILD.gn
+++ b/modules/audio_processing/agc2/BUILD.gn
@@ -153,6 +153,7 @@ rtc_library("noise_level_estimator") {
"../../../common_audio/third_party/ooura:fft_size_128",
"../../../rtc_base:checks",
"../../../rtc_base:macromagic",
+ "../../../system_wrappers:cpu_features_api",
]
configs += [ "..:apm_debug_dump" ]
diff --git a/modules/audio_processing/agc2/rnn_vad/BUILD.gn b/modules/audio_processing/agc2/rnn_vad/BUILD.gn
index 5d96fad583..99b4e82488 100644
--- a/modules/audio_processing/agc2/rnn_vad/BUILD.gn
+++ b/modules/audio_processing/agc2/rnn_vad/BUILD.gn
@@ -117,9 +117,9 @@ if (rtc_include_tests) {
"../../../../rtc_base/system:arch",
"../../../../test:test_support",
"../../utility:pffft_wrapper",
- "//third_party/abseil-cpp/absl/memory",
"//third_party/rnnoise:rnn_vad",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
data = unittest_resources
if (is_ios) {
deps += [ ":unittests_bundle_data" ]
diff --git a/modules/audio_processing/agc2/signal_classifier.cc b/modules/audio_processing/agc2/signal_classifier.cc
index 8778c49426..38334f7ec5 100644
--- a/modules/audio_processing/agc2/signal_classifier.cc
+++ b/modules/audio_processing/agc2/signal_classifier.cc
@@ -19,10 +19,19 @@
#include "modules/audio_processing/agc2/noise_spectrum_estimator.h"
#include "modules/audio_processing/logging/apm_data_dumper.h"
#include "rtc_base/checks.h"
+#include "system_wrappers/include/cpu_features_wrapper.h"
namespace webrtc {
namespace {
+bool IsSse2Available() {
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+ return WebRtc_GetCPUInfo(kSSE2) != 0;
+#else
+ return false;
+#endif
+}
+
void RemoveDcLevel(rtc::ArrayView<float> x) {
RTC_DCHECK_LT(0, x.size());
float mean = std::accumulate(x.data(), x.data() + x.size(), 0.f);
@@ -109,7 +118,8 @@ void SignalClassifier::FrameExtender::ExtendFrame(
SignalClassifier::SignalClassifier(ApmDataDumper* data_dumper)
: data_dumper_(data_dumper),
down_sampler_(data_dumper_),
- noise_spectrum_estimator_(data_dumper_) {
+ noise_spectrum_estimator_(data_dumper_),
+ ooura_fft_(IsSse2Available()) {
Initialize(48000);
}
SignalClassifier::~SignalClassifier() {}
diff --git a/modules/audio_processing/audio_buffer_unittest.cc b/modules/audio_processing/audio_buffer_unittest.cc
index 7cb51ca5f1..f3b2ddc689 100644
--- a/modules/audio_processing/audio_buffer_unittest.cc
+++ b/modules/audio_processing/audio_buffer_unittest.cc
@@ -40,7 +40,7 @@ TEST(AudioBufferTest, SetNumChannelsSetsChannelBuffersNumChannels) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(AudioBufferTest, SetNumChannelsDeathTest) {
+TEST(AudioBufferDeathTest, SetNumChannelsDeathTest) {
AudioBuffer ab(kSampleRateHz, kMono, kSampleRateHz, kMono, kSampleRateHz,
kMono);
RTC_EXPECT_DEATH(ab.set_num_channels(kStereo), "num_channels");
diff --git a/modules/audio_processing/audio_processing_impl.cc b/modules/audio_processing/audio_processing_impl.cc
index 6abebd2612..b155bdbadb 100644
--- a/modules/audio_processing/audio_processing_impl.cc
+++ b/modules/audio_processing/audio_processing_impl.cc
@@ -313,8 +313,8 @@ AudioProcessingImpl::~AudioProcessingImpl() = default;
int AudioProcessingImpl::Initialize() {
// Run in a single-threaded manner during initialization.
- rtc::CritScope cs_render(&crit_render_);
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_render(&mutex_render_);
+ MutexLock lock_capture(&mutex_capture_);
return InitializeLocked();
}
@@ -340,8 +340,8 @@ int AudioProcessingImpl::Initialize(int capture_input_sample_rate_hz,
int AudioProcessingImpl::Initialize(const ProcessingConfig& processing_config) {
// Run in a single-threaded manner during initialization.
- rtc::CritScope cs_render(&crit_render_);
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_render(&mutex_render_);
+ MutexLock lock_capture(&mutex_capture_);
return InitializeLocked(processing_config);
}
@@ -352,7 +352,7 @@ int AudioProcessingImpl::MaybeInitializeRender(
return kNoError;
}
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_capture(&mutex_capture_);
return InitializeLocked(processing_config);
}
@@ -526,8 +526,8 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) {
RTC_LOG(LS_INFO) << "AudioProcessing::ApplyConfig: " << config.ToString();
// Run in a single-threaded manner when applying the settings.
- rtc::CritScope cs_render(&crit_render_);
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_render(&mutex_render_);
+ MutexLock lock_capture(&mutex_capture_);
const bool pipeline_config_changed =
config_.pipeline.multi_channel_render !=
@@ -643,7 +643,7 @@ void AudioProcessingImpl::SetExtraOptions(const webrtc::Config& config) {}
void AudioProcessingImpl::OverrideSubmoduleCreationForTesting(
const ApmSubmoduleCreationOverrides& overrides) {
- rtc::CritScope cs(&crit_capture_);
+ MutexLock lock(&mutex_capture_);
submodule_creation_overrides_ = overrides;
}
@@ -689,7 +689,7 @@ size_t AudioProcessingImpl::num_output_channels() const {
}
void AudioProcessingImpl::set_output_will_be_muted(bool muted) {
- rtc::CritScope cs(&crit_capture_);
+ MutexLock lock(&mutex_capture_);
capture_.output_will_be_muted = muted;
if (submodules_.agc_manager.get()) {
submodules_.agc_manager->SetCaptureMuted(capture_.output_will_be_muted);
@@ -751,7 +751,7 @@ int AudioProcessingImpl::MaybeInitializeCapture(
// Acquire the capture lock in order to access api_format. The lock is
// released immediately, as we may need to acquire the render lock as part
// of the conditional reinitialization.
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_capture(&mutex_capture_);
processing_config = formats_.api_format;
reinitialization_required = UpdateActiveSubmoduleStates();
}
@@ -767,8 +767,8 @@ int AudioProcessingImpl::MaybeInitializeCapture(
}
if (reinitialization_required) {
- rtc::CritScope cs_render(&crit_render_);
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_render(&mutex_render_);
+ MutexLock lock_capture(&mutex_capture_);
RETURN_ON_ERR(InitializeLocked(processing_config));
}
return kNoError;
@@ -785,7 +785,7 @@ int AudioProcessingImpl::ProcessStream(const float* const* src,
RETURN_ON_ERR(MaybeInitializeCapture(input_config, output_config));
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_capture(&mutex_capture_);
if (aec_dump_) {
RecordUnprocessedCaptureStream(src);
@@ -991,7 +991,11 @@ void AudioProcessingImpl::AllocateRenderQueue() {
}
void AudioProcessingImpl::EmptyQueuedRenderAudio() {
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_capture(&mutex_capture_);
+ EmptyQueuedRenderAudioLocked();
+}
+
+void AudioProcessingImpl::EmptyQueuedRenderAudioLocked() {
if (submodules_.echo_control_mobile) {
RTC_DCHECK(aecm_render_signal_queue_);
while (aecm_render_signal_queue_->Remove(&aecm_capture_queue_buffer_)) {
@@ -1019,7 +1023,7 @@ int AudioProcessingImpl::ProcessStream(const int16_t* const src,
TRACE_EVENT0("webrtc", "AudioProcessing::ProcessStream_AudioFrame");
RETURN_ON_ERR(MaybeInitializeCapture(input_config, output_config));
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_capture(&mutex_capture_);
if (aec_dump_) {
RecordUnprocessedCaptureStream(src, input_config);
@@ -1047,7 +1051,7 @@ int AudioProcessingImpl::ProcessStream(const int16_t* const src,
}
int AudioProcessingImpl::ProcessCaptureStreamLocked() {
- EmptyQueuedRenderAudio();
+ EmptyQueuedRenderAudioLocked();
HandleCaptureRuntimeSettings();
// Ensure that not both the AEC and AECM are active at the same time.
@@ -1087,7 +1091,7 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() {
if (submodules_.echo_controller) {
// Detect and flag any change in the analog gain.
- int analog_mic_level = recommended_stream_analog_level();
+ int analog_mic_level = recommended_stream_analog_level_locked();
capture_.echo_path_gain_change =
capture_.prev_analog_mic_level != analog_mic_level &&
capture_.prev_analog_mic_level != -1;
@@ -1256,7 +1260,7 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() {
if (submodules_.gain_controller2) {
submodules_.gain_controller2->NotifyAnalogLevel(
- recommended_stream_analog_level());
+ recommended_stream_analog_level_locked());
submodules_.gain_controller2->Process(capture_buffer);
}
@@ -1284,7 +1288,7 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() {
}
if (submodules_.agc_manager) {
- int level = recommended_stream_analog_level();
+ int level = recommended_stream_analog_level_locked();
data_dumper_->DumpRaw("experimental_gain_control_stream_analog_level", 1,
&level);
}
@@ -1316,7 +1320,7 @@ int AudioProcessingImpl::AnalyzeReverseStream(
const float* const* data,
const StreamConfig& reverse_config) {
TRACE_EVENT0("webrtc", "AudioProcessing::AnalyzeReverseStream_StreamConfig");
- rtc::CritScope cs(&crit_render_);
+ MutexLock lock(&mutex_render_);
return AnalyzeReverseStreamLocked(data, reverse_config, reverse_config);
}
@@ -1325,7 +1329,7 @@ int AudioProcessingImpl::ProcessReverseStream(const float* const* src,
const StreamConfig& output_config,
float* const* dest) {
TRACE_EVENT0("webrtc", "AudioProcessing::ProcessReverseStream_StreamConfig");
- rtc::CritScope cs(&crit_render_);
+ MutexLock lock(&mutex_render_);
RETURN_ON_ERR(AnalyzeReverseStreamLocked(src, input_config, output_config));
if (submodule_states_.RenderMultiBandProcessingActive() ||
submodule_states_.RenderFullBandProcessingActive()) {
@@ -1386,7 +1390,7 @@ int AudioProcessingImpl::ProcessReverseStream(const int16_t* const src,
return AudioProcessing::Error::kBadNumberChannelsError;
}
- rtc::CritScope cs(&crit_render_);
+ MutexLock lock(&mutex_render_);
ProcessingConfig processing_config = formats_.api_format;
processing_config.reverse_input_stream().set_sample_rate_hz(
input_config.sample_rate_hz());
@@ -1453,7 +1457,7 @@ int AudioProcessingImpl::ProcessRenderStreamLocked() {
}
int AudioProcessingImpl::set_stream_delay_ms(int delay) {
- rtc::CritScope cs(&crit_capture_);
+ MutexLock lock(&mutex_capture_);
Error retval = kNoError;
capture_.was_stream_delay_set = true;
@@ -1474,7 +1478,7 @@ int AudioProcessingImpl::set_stream_delay_ms(int delay) {
bool AudioProcessingImpl::GetLinearAecOutput(
rtc::ArrayView<std::array<float, 160>> linear_output) const {
- rtc::CritScope cs(&crit_capture_);
+ MutexLock lock(&mutex_capture_);
AudioBuffer* linear_aec_buffer = capture_.linear_aec_output.get();
RTC_DCHECK(linear_aec_buffer);
@@ -1503,12 +1507,12 @@ int AudioProcessingImpl::stream_delay_ms() const {
}
void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) {
- rtc::CritScope cs(&crit_capture_);
+ MutexLock lock(&mutex_capture_);
capture_.key_pressed = key_pressed;
}
void AudioProcessingImpl::set_stream_analog_level(int level) {
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_capture(&mutex_capture_);
if (submodules_.agc_manager) {
submodules_.agc_manager->set_stream_analog_level(level);
@@ -1523,7 +1527,11 @@ void AudioProcessingImpl::set_stream_analog_level(int level) {
}
int AudioProcessingImpl::recommended_stream_analog_level() const {
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_capture(&mutex_capture_);
+ return recommended_stream_analog_level_locked();
+}
+
+int AudioProcessingImpl::recommended_stream_analog_level_locked() const {
if (submodules_.agc_manager) {
return submodules_.agc_manager->stream_analog_level();
} else if (submodules_.gain_control) {
@@ -1561,8 +1569,8 @@ bool AudioProcessingImpl::CreateAndAttachAecDump(FILE* handle,
void AudioProcessingImpl::AttachAecDump(std::unique_ptr<AecDump> aec_dump) {
RTC_DCHECK(aec_dump);
- rtc::CritScope cs_render(&crit_render_);
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_render(&mutex_render_);
+ MutexLock lock_capture(&mutex_capture_);
// The previously attached AecDump will be destroyed with the
// 'aec_dump' parameter, which is after locks are released.
@@ -1577,23 +1585,23 @@ void AudioProcessingImpl::DetachAecDump() {
// the render and capture locks.
std::unique_ptr<AecDump> aec_dump = nullptr;
{
- rtc::CritScope cs_render(&crit_render_);
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_render(&mutex_render_);
+ MutexLock lock_capture(&mutex_capture_);
aec_dump = std::move(aec_dump_);
}
}
void AudioProcessingImpl::MutateConfig(
rtc::FunctionView<void(AudioProcessing::Config*)> mutator) {
- rtc::CritScope cs_render(&crit_render_);
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_render(&mutex_render_);
+ MutexLock lock_capture(&mutex_capture_);
mutator(&config_);
ApplyConfig(config_);
}
AudioProcessing::Config AudioProcessingImpl::GetConfig() const {
- rtc::CritScope cs_render(&crit_render_);
- rtc::CritScope cs_capture(&crit_capture_);
+ MutexLock lock_render(&mutex_render_);
+ MutexLock lock_capture(&mutex_capture_);
return config_;
}
@@ -2006,7 +2014,7 @@ void AudioProcessingImpl::RecordAudioProcessingState() {
AecDump::AudioProcessingState audio_proc_state;
audio_proc_state.delay = capture_nonlocked_.stream_delay_ms;
audio_proc_state.drift = 0;
- audio_proc_state.level = recommended_stream_analog_level();
+ audio_proc_state.level = recommended_stream_analog_level_locked();
audio_proc_state.keypress = capture_.key_pressed;
aec_dump_->AddAudioProcessingState(audio_proc_state);
}
@@ -2046,7 +2054,7 @@ AudioProcessingImpl::ApmStatsReporter::ApmStatsReporter()
AudioProcessingImpl::ApmStatsReporter::~ApmStatsReporter() = default;
AudioProcessingStats AudioProcessingImpl::ApmStatsReporter::GetStatistics() {
- rtc::CritScope cs_stats(&crit_stats_);
+ MutexLock lock_stats(&mutex_stats_);
bool new_stats_available = stats_message_queue_.Remove(&cached_stats_);
// If the message queue is full, return the cached stats.
static_cast<void>(new_stats_available);
diff --git a/modules/audio_processing/audio_processing_impl.h b/modules/audio_processing/audio_processing_impl.h
index 3aa86ac5a1..dfd5f63c51 100644
--- a/modules/audio_processing/audio_processing_impl.h
+++ b/modules/audio_processing/audio_processing_impl.h
@@ -39,10 +39,10 @@
#include "modules/audio_processing/rms_level.h"
#include "modules/audio_processing/transient/transient_suppressor.h"
#include "modules/audio_processing/voice_detection.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/gtest_prod_util.h"
#include "rtc_base/ignore_wundef.h"
#include "rtc_base/swap_queue.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -100,7 +100,8 @@ class AudioProcessingImpl : public AudioProcessing {
int set_stream_delay_ms(int delay) override;
void set_stream_key_pressed(bool key_pressed) override;
void set_stream_analog_level(int level) override;
- int recommended_stream_analog_level() const override;
+ int recommended_stream_analog_level() const
+ RTC_LOCKS_EXCLUDED(mutex_capture_) override;
// Render-side exclusive methods possibly running APM in a
// multi-threaded manner. Acquire the render lock.
@@ -140,7 +141,7 @@ class AudioProcessingImpl : public AudioProcessing {
protected:
// Overridden in a mock.
virtual int InitializeLocked()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_);
private:
// TODO(peah): These friend classes should be removed as soon as the new
@@ -155,6 +156,9 @@ class AudioProcessingImpl : public AudioProcessing {
FRIEND_TEST_ALL_PREFIXES(ApmWithSubmodulesExcludedTest,
BitexactWithDisabledModules);
+ int recommended_stream_analog_level_locked() const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
+
void OverrideSubmoduleCreationForTesting(
const ApmSubmoduleCreationOverrides& overrides);
@@ -234,7 +238,7 @@ class AudioProcessingImpl : public AudioProcessing {
// Called by render: Holds the render lock when reading the format struct and
// acquires both locks if reinitialization is required.
int MaybeInitializeRender(const ProcessingConfig& processing_config)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_);
// Called by capture: Holds the capture lock when reading the format struct
// and acquires both locks if reinitialization is needed.
int MaybeInitializeCapture(const StreamConfig& input_config,
@@ -243,55 +247,58 @@ class AudioProcessingImpl : public AudioProcessing {
// Method for updating the state keeping track of the active submodules.
// Returns a bool indicating whether the state has changed.
bool UpdateActiveSubmoduleStates()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
// Methods requiring APM running in a single-threaded manner, requiring both
// the render and capture lock to be acquired.
int InitializeLocked(const ProcessingConfig& config)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_);
void InitializeResidualEchoDetector()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_);
void InitializeEchoController()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_);
// Initializations of capture-only submodules, requiring the capture lock
// already acquired.
void InitializeHighPassFilter(bool forced_reset)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
- void InitializeVoiceDetector() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
- void InitializeGainController1() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
+ void InitializeVoiceDetector() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
+ void InitializeGainController1() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
void InitializeTransientSuppressor()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
- void InitializeGainController2() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
- void InitializeNoiseSuppressor() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
- void InitializePreAmplifier() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
- void InitializePostProcessor() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
- void InitializeAnalyzer() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
+ void InitializeGainController2() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
+ void InitializeNoiseSuppressor() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
+ void InitializePreAmplifier() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
+ void InitializePostProcessor() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
+ void InitializeAnalyzer() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
// Initializations of render-only submodules, requiring the render lock
// already acquired.
- void InitializePreProcessor() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+ void InitializePreProcessor() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_);
// Sample rate used for the fullband processing.
int proc_fullband_sample_rate_hz() const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
// Empties and handles the respective RuntimeSetting queues.
void HandleCaptureRuntimeSettings()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
- void HandleRenderRuntimeSettings() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
+ void HandleRenderRuntimeSettings()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_);
- void EmptyQueuedRenderAudio();
+ void EmptyQueuedRenderAudio() RTC_LOCKS_EXCLUDED(mutex_capture_);
+ void EmptyQueuedRenderAudioLocked()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
void AllocateRenderQueue()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_);
void QueueBandedRenderAudio(AudioBuffer* audio)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_);
void QueueNonbandedRenderAudio(AudioBuffer* audio)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_);
// Capture-side exclusive methods possibly running APM in a multi-threaded
// manner that are called with the render lock already acquired.
- int ProcessCaptureStreamLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ int ProcessCaptureStreamLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
// Render-side exclusive methods possibly running APM in a multi-threaded
// manner that are called with the render lock already acquired.
@@ -299,8 +306,8 @@ class AudioProcessingImpl : public AudioProcessing {
int AnalyzeReverseStreamLocked(const float* const* src,
const StreamConfig& input_config,
const StreamConfig& output_config)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
- int ProcessRenderStreamLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_);
+ int ProcessRenderStreamLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_);
// Collects configuration settings from public and private
// submodules to be saved as an audioproc::Config message on the
@@ -308,29 +315,30 @@ class AudioProcessingImpl : public AudioProcessing {
// config if it is different from the last saved one; if |forced|,
// writes the config regardless of the last saved.
void WriteAecDumpConfigMessage(bool forced)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
// Notifies attached AecDump of current configuration and capture data.
void RecordUnprocessedCaptureStream(const float* const* capture_stream)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
void RecordUnprocessedCaptureStream(const int16_t* const data,
const StreamConfig& config)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
// Notifies attached AecDump of current configuration and
// processed capture data and issues a capture stream recording
// request.
void RecordProcessedCaptureStream(
const float* const* processed_capture_stream)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
void RecordProcessedCaptureStream(const int16_t* const data,
const StreamConfig& config)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
// Notifies attached AecDump about current state (delay, drift, etc).
- void RecordAudioProcessingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ void RecordAudioProcessingState()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_);
// AecDump instance used for optionally logging APM config, input
// and output to file in the AEC-dump format defined in debug.proto.
@@ -338,18 +346,18 @@ class AudioProcessingImpl : public AudioProcessing {
// Hold the last config written with AecDump for avoiding writing
// the same config twice.
- InternalAPMConfig apm_config_for_aec_dump_ RTC_GUARDED_BY(crit_capture_);
+ InternalAPMConfig apm_config_for_aec_dump_ RTC_GUARDED_BY(mutex_capture_);
// Critical sections.
- rtc::CriticalSection crit_render_ RTC_ACQUIRED_BEFORE(crit_capture_);
- rtc::CriticalSection crit_capture_;
+ mutable Mutex mutex_render_ RTC_ACQUIRED_BEFORE(mutex_capture_);
+ mutable Mutex mutex_capture_;
// Struct containing the Config specifying the behavior of APM.
AudioProcessing::Config config_;
// Overrides for testing the exclusion of some submodules from the build.
ApmSubmoduleCreationOverrides submodule_creation_overrides_
- RTC_GUARDED_BY(crit_capture_);
+ RTC_GUARDED_BY(mutex_capture_);
// Class containing information about what submodules are active.
SubmoduleStates submodule_states_;
@@ -438,7 +446,7 @@ class AudioProcessingImpl : public AudioProcessing {
const float* keyboard_data = nullptr;
} keyboard_info;
int cached_stream_analog_level_ = 0;
- } capture_ RTC_GUARDED_BY(crit_capture_);
+ } capture_ RTC_GUARDED_BY(mutex_capture_);
struct ApmCaptureNonLockedState {
ApmCaptureNonLockedState()
@@ -459,7 +467,7 @@ class AudioProcessingImpl : public AudioProcessing {
~ApmRenderState();
std::unique_ptr<AudioConverter> render_converter;
std::unique_ptr<AudioBuffer> render_audio;
- } render_ RTC_GUARDED_BY(crit_render_);
+ } render_ RTC_GUARDED_BY(mutex_render_);
// Class for statistics reporting. The class is thread-safe and no lock is
// needed when accessing it.
@@ -475,27 +483,28 @@ class AudioProcessingImpl : public AudioProcessing {
void UpdateStatistics(const AudioProcessingStats& new_stats);
private:
- rtc::CriticalSection crit_stats_;
- AudioProcessingStats cached_stats_ RTC_GUARDED_BY(crit_stats_);
+ Mutex mutex_stats_;
+ AudioProcessingStats cached_stats_ RTC_GUARDED_BY(mutex_stats_);
SwapQueue<AudioProcessingStats> stats_message_queue_;
} stats_reporter_;
- std::vector<int16_t> aecm_render_queue_buffer_ RTC_GUARDED_BY(crit_render_);
- std::vector<int16_t> aecm_capture_queue_buffer_ RTC_GUARDED_BY(crit_capture_);
+ std::vector<int16_t> aecm_render_queue_buffer_ RTC_GUARDED_BY(mutex_render_);
+ std::vector<int16_t> aecm_capture_queue_buffer_
+ RTC_GUARDED_BY(mutex_capture_);
- size_t agc_render_queue_element_max_size_ RTC_GUARDED_BY(crit_render_)
- RTC_GUARDED_BY(crit_capture_) = 0;
- std::vector<int16_t> agc_render_queue_buffer_ RTC_GUARDED_BY(crit_render_);
- std::vector<int16_t> agc_capture_queue_buffer_ RTC_GUARDED_BY(crit_capture_);
+ size_t agc_render_queue_element_max_size_ RTC_GUARDED_BY(mutex_render_)
+ RTC_GUARDED_BY(mutex_capture_) = 0;
+ std::vector<int16_t> agc_render_queue_buffer_ RTC_GUARDED_BY(mutex_render_);
+ std::vector<int16_t> agc_capture_queue_buffer_ RTC_GUARDED_BY(mutex_capture_);
- size_t red_render_queue_element_max_size_ RTC_GUARDED_BY(crit_render_)
- RTC_GUARDED_BY(crit_capture_) = 0;
- std::vector<float> red_render_queue_buffer_ RTC_GUARDED_BY(crit_render_);
- std::vector<float> red_capture_queue_buffer_ RTC_GUARDED_BY(crit_capture_);
+ size_t red_render_queue_element_max_size_ RTC_GUARDED_BY(mutex_render_)
+ RTC_GUARDED_BY(mutex_capture_) = 0;
+ std::vector<float> red_render_queue_buffer_ RTC_GUARDED_BY(mutex_render_);
+ std::vector<float> red_capture_queue_buffer_ RTC_GUARDED_BY(mutex_capture_);
- RmsLevel capture_input_rms_ RTC_GUARDED_BY(crit_capture_);
- RmsLevel capture_output_rms_ RTC_GUARDED_BY(crit_capture_);
- int capture_rms_interval_counter_ RTC_GUARDED_BY(crit_capture_) = 0;
+ RmsLevel capture_input_rms_ RTC_GUARDED_BY(mutex_capture_);
+ RmsLevel capture_output_rms_ RTC_GUARDED_BY(mutex_capture_);
+ int capture_rms_interval_counter_ RTC_GUARDED_BY(mutex_capture_) = 0;
// Lock protection not needed.
std::unique_ptr<
diff --git a/modules/audio_processing/audio_processing_impl_locking_unittest.cc b/modules/audio_processing/audio_processing_impl_locking_unittest.cc
index 500539405b..ec165aa146 100644
--- a/modules/audio_processing/audio_processing_impl_locking_unittest.cc
+++ b/modules/audio_processing/audio_processing_impl_locking_unittest.cc
@@ -16,10 +16,10 @@
#include "modules/audio_processing/audio_processing_impl.h"
#include "modules/audio_processing/test/audio_processing_builder_for_testing.h"
#include "modules/audio_processing/test/test_utils.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/random.h"
+#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/sleep.h"
#include "test/gtest.h"
@@ -62,23 +62,23 @@ class RandomGenerator {
RandomGenerator() : rand_gen_(42U) {}
int RandInt(int min, int max) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return rand_gen_.Rand(min, max);
}
int RandInt(int max) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return rand_gen_.Rand(max);
}
float RandFloat() {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return rand_gen_.Rand<float>();
}
private:
- rtc::CriticalSection crit_;
- Random rand_gen_ RTC_GUARDED_BY(crit_);
+ Mutex mutex_;
+ Random rand_gen_ RTC_GUARDED_BY(mutex_);
};
// Variables related to the audio data and formats.
@@ -258,27 +258,27 @@ struct TestConfig {
class FrameCounters {
public:
void IncreaseRenderCounter() {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
render_count++;
}
void IncreaseCaptureCounter() {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
capture_count++;
}
int GetCaptureCounter() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return capture_count;
}
int GetRenderCounter() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return render_count;
}
int CaptureMinusRenderCounters() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return capture_count - render_count;
}
@@ -287,14 +287,14 @@ class FrameCounters {
}
bool BothCountersExceedeThreshold(int threshold) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return (render_count > threshold && capture_count > threshold);
}
private:
- rtc::CriticalSection crit_;
- int render_count RTC_GUARDED_BY(crit_) = 0;
- int capture_count RTC_GUARDED_BY(crit_) = 0;
+ mutable Mutex mutex_;
+ int render_count RTC_GUARDED_BY(mutex_) = 0;
+ int capture_count RTC_GUARDED_BY(mutex_) = 0;
};
// Class for handling the capture side processing.
diff --git a/modules/audio_processing/audio_processing_impl_unittest.cc b/modules/audio_processing/audio_processing_impl_unittest.cc
index 71352bc65a..8f28941cdf 100644
--- a/modules/audio_processing/audio_processing_impl_unittest.cc
+++ b/modules/audio_processing/audio_processing_impl_unittest.cc
@@ -37,13 +37,13 @@ class MockInitialize : public AudioProcessingImpl {
explicit MockInitialize(const webrtc::Config& config)
: AudioProcessingImpl(config) {}
- MOCK_METHOD0(InitializeLocked, int());
+ MOCK_METHOD(int, InitializeLocked, (), (override));
int RealInitializeLocked() RTC_NO_THREAD_SAFETY_ANALYSIS {
return AudioProcessingImpl::InitializeLocked();
}
- MOCK_CONST_METHOD0(AddRef, void());
- MOCK_CONST_METHOD0(Release, rtc::RefCountReleaseStatus());
+ MOCK_METHOD(void, AddRef, (), (const, override));
+ MOCK_METHOD(rtc::RefCountReleaseStatus, Release, (), (const, override));
};
// Creates MockEchoControl instances and provides a raw pointer access to
diff --git a/modules/audio_processing/audio_processing_unittest.cc b/modules/audio_processing/audio_processing_unittest.cc
index 90413a84be..93ddc97366 100644
--- a/modules/audio_processing/audio_processing_unittest.cc
+++ b/modules/audio_processing/audio_processing_unittest.cc
@@ -962,49 +962,51 @@ TEST_F(ApmTest, GainControl) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(ApmTest, GainControlDiesOnTooLowTargetLevelDbfs) {
+using ApmDeathTest = ApmTest;
+
+TEST_F(ApmDeathTest, GainControlDiesOnTooLowTargetLevelDbfs) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.target_level_dbfs = -1;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooHighTargetLevelDbfs) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooHighTargetLevelDbfs) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.target_level_dbfs = 32;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooLowCompressionGainDb) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooLowCompressionGainDb) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.compression_gain_db = -1;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooHighCompressionGainDb) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooHighCompressionGainDb) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.compression_gain_db = 91;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooLowAnalogLevelLowerLimit) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooLowAnalogLevelLowerLimit) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_minimum = -1;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooHighAnalogLevelUpperLimit) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooHighAnalogLevelUpperLimit) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_maximum = 65536;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnInvertedAnalogLevelLimits) {
+TEST_F(ApmDeathTest, GainControlDiesOnInvertedAnalogLevelLimits) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_minimum = 512;
@@ -1012,7 +1014,7 @@ TEST_F(ApmTest, GainControlDiesOnInvertedAnalogLevelLimits) {
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, ApmDiesOnTooLowAnalogLevel) {
+TEST_F(ApmDeathTest, ApmDiesOnTooLowAnalogLevel) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_minimum = 255;
@@ -1021,7 +1023,7 @@ TEST_F(ApmTest, ApmDiesOnTooLowAnalogLevel) {
EXPECT_DEATH(apm_->set_stream_analog_level(254), "");
}
-TEST_F(ApmTest, ApmDiesOnTooHighAnalogLevel) {
+TEST_F(ApmDeathTest, ApmDiesOnTooHighAnalogLevel) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_minimum = 255;
@@ -2414,7 +2416,7 @@ TEST(RuntimeSettingTest, TestDefaultCtor) {
EXPECT_EQ(AudioProcessing::RuntimeSetting::Type::kNotSpecified, s.type());
}
-TEST(RuntimeSettingTest, TestCapturePreGain) {
+TEST(RuntimeSettingDeathTest, TestCapturePreGain) {
using Type = AudioProcessing::RuntimeSetting::Type;
{
auto s = AudioProcessing::RuntimeSetting::CreateCapturePreGain(1.25f);
@@ -2429,7 +2431,7 @@ TEST(RuntimeSettingTest, TestCapturePreGain) {
#endif
}
-TEST(RuntimeSettingTest, TestCaptureFixedPostGain) {
+TEST(RuntimeSettingDeathTest, TestCaptureFixedPostGain) {
using Type = AudioProcessing::RuntimeSetting::Type;
{
auto s = AudioProcessing::RuntimeSetting::CreateCaptureFixedPostGain(1.25f);
diff --git a/modules/audio_processing/echo_control_mobile_unittest.cc b/modules/audio_processing/echo_control_mobile_unittest.cc
index 84e1c845ca..ed0393043c 100644
--- a/modules/audio_processing/echo_control_mobile_unittest.cc
+++ b/modules/audio_processing/echo_control_mobile_unittest.cc
@@ -13,7 +13,6 @@
#include "modules/audio_processing/echo_control_mobile_impl.h"
#include "modules/audio_processing/include/audio_processing.h"
-#include "rtc_base/critical_section.h"
#include "test/gtest.h"
namespace webrtc {
diff --git a/modules/audio_processing/include/mock_audio_processing.h b/modules/audio_processing/include/mock_audio_processing.h
index bdae99a91a..562b23f7d5 100644
--- a/modules/audio_processing/include/mock_audio_processing.h
+++ b/modules/audio_processing/include/mock_audio_processing.h
@@ -24,35 +24,47 @@ namespace test {
class MockCustomProcessing : public CustomProcessing {
public:
virtual ~MockCustomProcessing() {}
- MOCK_METHOD2(Initialize, void(int sample_rate_hz, int num_channels));
- MOCK_METHOD1(Process, void(AudioBuffer* audio));
- MOCK_METHOD1(SetRuntimeSetting,
- void(AudioProcessing::RuntimeSetting setting));
- MOCK_CONST_METHOD0(ToString, std::string());
+ MOCK_METHOD(void,
+ Initialize,
+ (int sample_rate_hz, int num_channels),
+ (override));
+ MOCK_METHOD(void, Process, (AudioBuffer * audio), (override));
+ MOCK_METHOD(void,
+ SetRuntimeSetting,
+ (AudioProcessing::RuntimeSetting setting),
+ (override));
+ MOCK_METHOD(std::string, ToString, (), (const, override));
};
class MockCustomAudioAnalyzer : public CustomAudioAnalyzer {
public:
virtual ~MockCustomAudioAnalyzer() {}
- MOCK_METHOD2(Initialize, void(int sample_rate_hz, int num_channels));
- MOCK_METHOD1(Analyze, void(const AudioBuffer* audio));
- MOCK_CONST_METHOD0(ToString, std::string());
+ MOCK_METHOD(void,
+ Initialize,
+ (int sample_rate_hz, int num_channels),
+ (override));
+ MOCK_METHOD(void, Analyze, (const AudioBuffer* audio), (override));
+ MOCK_METHOD(std::string, ToString, (), (const, override));
};
class MockEchoControl : public EchoControl {
public:
virtual ~MockEchoControl() {}
- MOCK_METHOD1(AnalyzeRender, void(AudioBuffer* render));
- MOCK_METHOD1(AnalyzeCapture, void(AudioBuffer* capture));
- MOCK_METHOD2(ProcessCapture,
- void(AudioBuffer* capture, bool echo_path_change));
- MOCK_METHOD3(ProcessCapture,
- void(AudioBuffer* capture,
- AudioBuffer* linear_output,
- bool echo_path_change));
- MOCK_CONST_METHOD0(GetMetrics, Metrics());
- MOCK_METHOD1(SetAudioBufferDelay, void(int delay_ms));
- MOCK_CONST_METHOD0(ActiveProcessing, bool());
+ MOCK_METHOD(void, AnalyzeRender, (AudioBuffer * render), (override));
+ MOCK_METHOD(void, AnalyzeCapture, (AudioBuffer * capture), (override));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (AudioBuffer * capture, bool echo_path_change),
+ (override));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (AudioBuffer * capture,
+ AudioBuffer* linear_output,
+ bool echo_path_change),
+ (override));
+ MOCK_METHOD(Metrics, GetMetrics, (), (const, override));
+ MOCK_METHOD(void, SetAudioBufferDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, ActiveProcessing, (), (const, override));
};
class MockAudioProcessing : public ::testing::NiceMock<AudioProcessing> {
@@ -61,87 +73,93 @@ class MockAudioProcessing : public ::testing::NiceMock<AudioProcessing> {
virtual ~MockAudioProcessing() {}
- MOCK_METHOD0(Initialize, int());
- MOCK_METHOD6(Initialize,
- int(int capture_input_sample_rate_hz,
- int capture_output_sample_rate_hz,
- int render_sample_rate_hz,
- ChannelLayout capture_input_layout,
- ChannelLayout capture_output_layout,
- ChannelLayout render_input_layout));
- MOCK_METHOD1(Initialize, int(const ProcessingConfig& processing_config));
- MOCK_METHOD1(ApplyConfig, void(const Config& config));
- MOCK_METHOD1(SetExtraOptions, void(const webrtc::Config& config));
- MOCK_CONST_METHOD0(proc_sample_rate_hz, int());
- MOCK_CONST_METHOD0(proc_split_sample_rate_hz, int());
- MOCK_CONST_METHOD0(num_input_channels, size_t());
- MOCK_CONST_METHOD0(num_proc_channels, size_t());
- MOCK_CONST_METHOD0(num_output_channels, size_t());
- MOCK_CONST_METHOD0(num_reverse_channels, size_t());
- MOCK_METHOD1(set_output_will_be_muted, void(bool muted));
- MOCK_METHOD1(SetRuntimeSetting, void(RuntimeSetting setting));
- MOCK_METHOD4(ProcessStream,
- int(const int16_t* const src,
- const StreamConfig& input_config,
- const StreamConfig& output_config,
- int16_t* const dest));
- MOCK_METHOD7(ProcessStream,
- int(const float* const* src,
- size_t samples_per_channel,
- int input_sample_rate_hz,
- ChannelLayout input_layout,
- int output_sample_rate_hz,
- ChannelLayout output_layout,
- float* const* dest));
- MOCK_METHOD4(ProcessStream,
- int(const float* const* src,
- const StreamConfig& input_config,
- const StreamConfig& output_config,
- float* const* dest));
- MOCK_METHOD4(ProcessReverseStream,
- int(const int16_t* const src,
- const StreamConfig& input_config,
- const StreamConfig& output_config,
- int16_t* const dest));
- MOCK_METHOD4(AnalyzeReverseStream,
- int(const float* const* data,
- size_t samples_per_channel,
- int sample_rate_hz,
- ChannelLayout layout));
- MOCK_METHOD2(AnalyzeReverseStream,
- int(const float* const* data,
- const StreamConfig& reverse_config));
- MOCK_METHOD4(ProcessReverseStream,
- int(const float* const* src,
- const StreamConfig& input_config,
- const StreamConfig& output_config,
- float* const* dest));
- MOCK_CONST_METHOD1(
- GetLinearAecOutput,
- bool(rtc::ArrayView<std::array<float, 160>> linear_output));
- MOCK_METHOD1(set_stream_delay_ms, int(int delay));
- MOCK_CONST_METHOD0(stream_delay_ms, int());
- MOCK_CONST_METHOD0(was_stream_delay_set, bool());
- MOCK_METHOD1(set_stream_key_pressed, void(bool key_pressed));
- MOCK_METHOD1(set_delay_offset_ms, void(int offset));
- MOCK_CONST_METHOD0(delay_offset_ms, int());
- MOCK_METHOD1(set_stream_analog_level, void(int));
- MOCK_CONST_METHOD0(recommended_stream_analog_level, int());
- MOCK_METHOD3(CreateAndAttachAecDump,
- bool(const std::string& file_name,
- int64_t max_log_size_bytes,
- rtc::TaskQueue* worker_queue));
- MOCK_METHOD3(CreateAndAttachAecDump,
- bool(FILE* handle,
- int64_t max_log_size_bytes,
- rtc::TaskQueue* worker_queue));
- MOCK_METHOD1(AttachAecDump, void(std::unique_ptr<AecDump>));
- MOCK_METHOD0(DetachAecDump, void());
+ MOCK_METHOD(int, Initialize, (), (override));
+ MOCK_METHOD(int,
+ Initialize,
+ (int capture_input_sample_rate_hz,
+ int capture_output_sample_rate_hz,
+ int render_sample_rate_hz,
+ ChannelLayout capture_input_layout,
+ ChannelLayout capture_output_layout,
+ ChannelLayout render_input_layout),
+ (override));
+ MOCK_METHOD(int,
+ Initialize,
+ (const ProcessingConfig& processing_config),
+ (override));
+ MOCK_METHOD(void, ApplyConfig, (const Config& config), (override));
+ MOCK_METHOD(void,
+ SetExtraOptions,
+ (const webrtc::Config& config),
+ (override));
+ MOCK_METHOD(int, proc_sample_rate_hz, (), (const, override));
+ MOCK_METHOD(int, proc_split_sample_rate_hz, (), (const, override));
+ MOCK_METHOD(size_t, num_input_channels, (), (const, override));
+ MOCK_METHOD(size_t, num_proc_channels, (), (const, override));
+ MOCK_METHOD(size_t, num_output_channels, (), (const, override));
+ MOCK_METHOD(size_t, num_reverse_channels, (), (const, override));
+ MOCK_METHOD(void, set_output_will_be_muted, (bool muted), (override));
+ MOCK_METHOD(void, SetRuntimeSetting, (RuntimeSetting setting), (override));
+ MOCK_METHOD(int,
+ ProcessStream,
+ (const int16_t* const src,
+ const StreamConfig& input_config,
+ const StreamConfig& output_config,
+ int16_t* const dest),
+ (override));
+ MOCK_METHOD(int,
+ ProcessStream,
+ (const float* const* src,
+ const StreamConfig& input_config,
+ const StreamConfig& output_config,
+ float* const* dest),
+ (override));
+ MOCK_METHOD(int,
+ ProcessReverseStream,
+ (const int16_t* const src,
+ const StreamConfig& input_config,
+ const StreamConfig& output_config,
+ int16_t* const dest),
+ (override));
+ MOCK_METHOD(int,
+ AnalyzeReverseStream,
+ (const float* const* data, const StreamConfig& reverse_config),
+ (override));
+ MOCK_METHOD(int,
+ ProcessReverseStream,
+ (const float* const* src,
+ const StreamConfig& input_config,
+ const StreamConfig& output_config,
+ float* const* dest),
+ (override));
+ MOCK_METHOD(bool,
+ GetLinearAecOutput,
+ ((rtc::ArrayView<std::array<float, 160>> linear_output)),
+ (const, override));
+ MOCK_METHOD(int, set_stream_delay_ms, (int delay), (override));
+ MOCK_METHOD(int, stream_delay_ms, (), (const, override));
+ MOCK_METHOD(void, set_stream_key_pressed, (bool key_pressed), (override));
+ MOCK_METHOD(void, set_stream_analog_level, (int), (override));
+ MOCK_METHOD(int, recommended_stream_analog_level, (), (const, override));
+ MOCK_METHOD(bool,
+ CreateAndAttachAecDump,
+ (const std::string& file_name,
+ int64_t max_log_size_bytes,
+ rtc::TaskQueue* worker_queue),
+ (override));
+ MOCK_METHOD(bool,
+ CreateAndAttachAecDump,
+ (FILE * handle,
+ int64_t max_log_size_bytes,
+ rtc::TaskQueue* worker_queue),
+ (override));
+ MOCK_METHOD(void, AttachAecDump, (std::unique_ptr<AecDump>), (override));
+ MOCK_METHOD(void, DetachAecDump, (), (override));
- MOCK_METHOD0(GetStatistics, AudioProcessingStats());
- MOCK_METHOD1(GetStatistics, AudioProcessingStats(bool));
+ MOCK_METHOD(AudioProcessingStats, GetStatistics, (), (override));
+ MOCK_METHOD(AudioProcessingStats, GetStatistics, (bool), (override));
- MOCK_CONST_METHOD0(GetConfig, AudioProcessing::Config());
+ MOCK_METHOD(AudioProcessing::Config, GetConfig, (), (const, override));
};
} // namespace test
diff --git a/modules/audio_processing/ns/BUILD.gn b/modules/audio_processing/ns/BUILD.gn
index 442a313e64..7197705c3d 100644
--- a/modules/audio_processing/ns/BUILD.gn
+++ b/modules/audio_processing/ns/BUILD.gn
@@ -64,8 +64,8 @@ rtc_static_library("ns") {
"../../../system_wrappers:field_trial",
"../../../system_wrappers:metrics",
"../utility:cascaded_biquad_filter",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -90,8 +90,8 @@ if (rtc_include_tests) {
"../../../system_wrappers:cpu_features_api",
"../../../test:test_support",
"../utility:cascaded_biquad_filter",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
diff --git a/modules/audio_processing/test/aec_dump_based_simulator.cc b/modules/audio_processing/test/aec_dump_based_simulator.cc
index b3b113da44..f5bd6452e3 100644
--- a/modules/audio_processing/test/aec_dump_based_simulator.cc
+++ b/modules/audio_processing/test/aec_dump_based_simulator.cc
@@ -66,8 +66,11 @@ bool VerifyFloatBitExactness(const webrtc::audioproc::Stream& msg,
AecDumpBasedSimulator::AecDumpBasedSimulator(
const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder)
- : AudioProcessingSimulator(settings, std::move(ap_builder)) {
+ : AudioProcessingSimulator(settings,
+ std::move(audio_processing),
+ std::move(ap_builder)) {
MaybeOpenCallOrderFile();
}
@@ -206,7 +209,8 @@ void AecDumpBasedSimulator::PrepareReverseProcessStreamCall(
}
void AecDumpBasedSimulator::Process() {
- CreateAudioProcessor();
+ ConfigureAudioProcessor();
+
if (settings_.artificial_nearend_filename) {
std::unique_ptr<WavReader> artificial_nearend_file(
new WavReader(settings_.artificial_nearend_filename->c_str()));
@@ -237,7 +241,7 @@ void AecDumpBasedSimulator::Process() {
fclose(dump_input_file_);
}
- DestroyAudioProcessor();
+ DetachAecDump();
}
void AecDumpBasedSimulator::HandleEvent(
diff --git a/modules/audio_processing/test/aec_dump_based_simulator.h b/modules/audio_processing/test/aec_dump_based_simulator.h
index ef032d0316..092b82bdbc 100644
--- a/modules/audio_processing/test/aec_dump_based_simulator.h
+++ b/modules/audio_processing/test/aec_dump_based_simulator.h
@@ -33,6 +33,7 @@ namespace test {
class AecDumpBasedSimulator final : public AudioProcessingSimulator {
public:
AecDumpBasedSimulator(const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder);
~AecDumpBasedSimulator() override;
diff --git a/modules/audio_processing/test/audio_processing_simulator.cc b/modules/audio_processing/test/audio_processing_simulator.cc
index a37a83f1e3..adbc298e9e 100644
--- a/modules/audio_processing/test/audio_processing_simulator.cc
+++ b/modules/audio_processing/test/audio_processing_simulator.cc
@@ -113,10 +113,10 @@ SimulationSettings::~SimulationSettings() = default;
AudioProcessingSimulator::AudioProcessingSimulator(
const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder)
: settings_(settings),
- ap_builder_(ap_builder ? std::move(ap_builder)
- : std::make_unique<AudioProcessingBuilder>()),
+ ap_(std::move(audio_processing)),
analog_mic_level_(settings.initial_mic_level),
fake_recording_device_(
settings.initial_mic_level,
@@ -139,6 +139,51 @@ AudioProcessingSimulator::AudioProcessingSimulator(
if (settings_.simulate_mic_gain)
RTC_LOG(LS_VERBOSE) << "Simulating analog mic gain";
+
+ // Create the audio processing object.
+ RTC_CHECK(!(ap_ && ap_builder))
+ << "The AudioProcessing and the AudioProcessingBuilder cannot both be "
+ "specified at the same time.";
+
+ if (ap_) {
+ RTC_CHECK(!settings_.aec_settings_filename);
+ RTC_CHECK(!settings_.print_aec_parameter_values);
+ } else {
+ // Use specied builder if such is provided, otherwise create a new builder.
+ std::unique_ptr<AudioProcessingBuilder> builder =
+ !!ap_builder ? std::move(ap_builder)
+ : std::make_unique<AudioProcessingBuilder>();
+
+ // Create and set an EchoCanceller3Factory if needed.
+ const bool use_aec = settings_.use_aec && *settings_.use_aec;
+ if (use_aec) {
+ EchoCanceller3Config cfg;
+ if (settings_.aec_settings_filename) {
+ if (settings_.use_verbose_logging) {
+ std::cout << "Reading AEC Parameters from JSON input." << std::endl;
+ }
+ cfg = ReadAec3ConfigFromJsonFile(*settings_.aec_settings_filename);
+ }
+
+ if (settings_.linear_aec_output_filename) {
+ cfg.filter.export_linear_aec_output = true;
+ }
+
+ if (settings_.print_aec_parameter_values) {
+ if (!settings_.use_quiet_output) {
+ std::cout << "AEC settings:" << std::endl;
+ }
+ std::cout << Aec3ConfigToJsonString(cfg) << std::endl;
+ }
+
+ auto echo_control_factory = std::make_unique<EchoCanceller3Factory>(cfg);
+ builder->SetEchoControlFactory(std::move(echo_control_factory));
+ }
+
+ // Create an audio processing object.
+ ap_ = builder->Create();
+ RTC_CHECK(ap_);
+ }
}
AudioProcessingSimulator::~AudioProcessingSimulator() {
@@ -369,16 +414,14 @@ void AudioProcessingSimulator::SetupOutput() {
++output_reset_counter_;
}
-void AudioProcessingSimulator::DestroyAudioProcessor() {
+void AudioProcessingSimulator::DetachAecDump() {
if (settings_.aec_dump_output_filename) {
ap_->DetachAecDump();
}
}
-void AudioProcessingSimulator::CreateAudioProcessor() {
- Config config;
+void AudioProcessingSimulator::ConfigureAudioProcessor() {
AudioProcessing::Config apm_config;
- std::unique_ptr<EchoControlFactory> echo_control_factory;
if (settings_.use_ts) {
apm_config.transient_suppression.enabled = *settings_.use_ts;
}
@@ -421,29 +464,6 @@ void AudioProcessingSimulator::CreateAudioProcessor() {
apm_config.echo_canceller.export_linear_aec_output =
!!settings_.linear_aec_output_filename;
- if (use_aec) {
- EchoCanceller3Config cfg;
- if (settings_.aec_settings_filename) {
- if (settings_.use_verbose_logging) {
- std::cout << "Reading AEC Parameters from JSON input." << std::endl;
- }
- cfg = ReadAec3ConfigFromJsonFile(*settings_.aec_settings_filename);
- }
-
- if (settings_.linear_aec_output_filename) {
- cfg.filter.export_linear_aec_output = true;
- }
-
- echo_control_factory.reset(new EchoCanceller3Factory(cfg));
-
- if (settings_.print_aec_parameter_values) {
- if (!settings_.use_quiet_output) {
- std::cout << "AEC settings:" << std::endl;
- }
- std::cout << Aec3ConfigToJsonString(cfg) << std::endl;
- }
- }
-
if (settings_.use_hpf) {
apm_config.high_pass_filter.enabled = *settings_.use_hpf;
}
@@ -512,14 +532,6 @@ void AudioProcessingSimulator::CreateAudioProcessor() {
*settings_.ns_analysis_on_linear_aec_output;
}
- RTC_CHECK(ap_builder_);
- if (echo_control_factory) {
- ap_builder_->SetEchoControlFactory(std::move(echo_control_factory));
- }
- ap_.reset((*ap_builder_).Create(config));
-
- RTC_CHECK(ap_);
-
ap_->ApplyConfig(apm_config);
if (settings_.use_ts) {
diff --git a/modules/audio_processing/test/audio_processing_simulator.h b/modules/audio_processing/test/audio_processing_simulator.h
index fa6efc2842..8579f4b4d0 100644
--- a/modules/audio_processing/test/audio_processing_simulator.h
+++ b/modules/audio_processing/test/audio_processing_simulator.h
@@ -150,8 +150,8 @@ struct SimulationSettings {
// Provides common functionality for performing audioprocessing simulations.
class AudioProcessingSimulator {
public:
-
AudioProcessingSimulator(const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder);
virtual ~AudioProcessingSimulator();
@@ -174,8 +174,8 @@ class AudioProcessingSimulator {
protected:
void ProcessStream(bool fixed_interface);
void ProcessReverseStream(bool fixed_interface);
- void CreateAudioProcessor();
- void DestroyAudioProcessor();
+ void ConfigureAudioProcessor();
+ void DetachAecDump();
void SetupBuffersConfigsOutputs(int input_sample_rate_hz,
int output_sample_rate_hz,
int reverse_input_sample_rate_hz,
@@ -186,8 +186,7 @@ class AudioProcessingSimulator {
int reverse_output_num_channels);
const SimulationSettings settings_;
- std::unique_ptr<AudioProcessing> ap_;
- std::unique_ptr<AudioProcessingBuilder> ap_builder_;
+ rtc::scoped_refptr<AudioProcessing> ap_;
std::unique_ptr<ChannelBuffer<float>> in_buf_;
std::unique_ptr<ChannelBuffer<float>> out_buf_;
diff --git a/modules/audio_processing/test/audioproc_float_impl.cc b/modules/audio_processing/test/audioproc_float_impl.cc
index d9a4227eb7..ab395f1018 100644
--- a/modules/audio_processing/test/audioproc_float_impl.cc
+++ b/modules/audio_processing/test/audioproc_float_impl.cc
@@ -457,7 +457,10 @@ void ReportConditionalErrorAndExit(bool condition, const std::string& message) {
}
}
-void PerformBasicParameterSanityChecks(const SimulationSettings& settings) {
+void PerformBasicParameterSanityChecks(
+ const SimulationSettings& settings,
+ bool pre_constructed_ap_provided,
+ bool pre_constructed_ap_builder_provided) {
if (settings.input_filename || settings.reverse_input_filename) {
ReportConditionalErrorAndExit(
!!settings.aec_dump_input_filename,
@@ -624,21 +627,41 @@ void PerformBasicParameterSanityChecks(const SimulationSettings& settings) {
settings.pre_amplifier_gain_factor.has_value(),
"Error: --pre_amplifier_gain_factor needs --pre_amplifier to be "
"specified and set.\n");
-}
-} // namespace
+ ReportConditionalErrorAndExit(
+ pre_constructed_ap_provided && pre_constructed_ap_builder_provided,
+ "Error: The AudioProcessing and the AudioProcessingBuilder cannot both "
+ "be specified at the same time.\n");
-int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
- int argc,
- char* argv[],
- absl::string_view input_aecdump,
- std::vector<float>* processed_capture_samples) {
+ ReportConditionalErrorAndExit(
+ settings.aec_settings_filename && pre_constructed_ap_provided,
+ "Error: The aec_settings_filename cannot be specified when a "
+ "pre-constructed audio processing object is provided.\n");
+
+ ReportConditionalErrorAndExit(
+ settings.aec_settings_filename && pre_constructed_ap_provided,
+ "Error: The print_aec_parameter_values cannot be set when a "
+ "pre-constructed audio processing object is provided.\n");
+
+ if (settings.linear_aec_output_filename && pre_constructed_ap_provided) {
+ std::cout << "Warning: For the linear AEC output to be stored, this must "
+ "be configured in the AEC that is part of the provided "
+ "AudioProcessing object."
+ << std::endl;
+ }
+}
+
+int RunSimulation(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ std::unique_ptr<AudioProcessingBuilder> ap_builder,
+ int argc,
+ char* argv[],
+ absl::string_view input_aecdump,
+ std::vector<float>* processed_capture_samples) {
std::vector<char*> args = absl::ParseCommandLine(argc, argv);
if (args.size() != 1) {
printf("%s", kUsageDescription);
return 1;
}
-
// InitFieldTrialsFromString stores the char*, so the char array must
// outlive the application.
const std::string field_trials = absl::GetFlag(FLAGS_force_fieldtrials);
@@ -650,13 +673,15 @@ int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
settings.processed_capture_samples = processed_capture_samples;
RTC_CHECK(settings.processed_capture_samples);
}
- PerformBasicParameterSanityChecks(settings);
+ PerformBasicParameterSanityChecks(settings, !!audio_processing, !!ap_builder);
std::unique_ptr<AudioProcessingSimulator> processor;
if (settings.aec_dump_input_filename || settings.aec_dump_input_string) {
- processor.reset(new AecDumpBasedSimulator(settings, std::move(ap_builder)));
+ processor.reset(new AecDumpBasedSimulator(
+ settings, std::move(audio_processing), std::move(ap_builder)));
} else {
- processor.reset(new WavBasedSimulator(settings, std::move(ap_builder)));
+ processor.reset(new WavBasedSimulator(settings, std::move(audio_processing),
+ std::move(ap_builder)));
}
processor->Process();
@@ -680,5 +705,24 @@ int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
return 0;
}
+} // namespace
+
+int AudioprocFloatImpl(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ int argc,
+ char* argv[]) {
+ return RunSimulation(
+ std::move(audio_processing), /*ap_builder=*/nullptr, argc, argv,
+ /*input_aecdump=*/"", /*processed_capture_samples=*/nullptr);
+}
+
+int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
+ int argc,
+ char* argv[],
+ absl::string_view input_aecdump,
+ std::vector<float>* processed_capture_samples) {
+ return RunSimulation(/*audio_processing=*/nullptr, std::move(ap_builder),
+ argc, argv, input_aecdump, processed_capture_samples);
+}
+
} // namespace test
} // namespace webrtc
diff --git a/modules/audio_processing/test/audioproc_float_impl.h b/modules/audio_processing/test/audioproc_float_impl.h
index 9a9013c644..0687c43a5d 100644
--- a/modules/audio_processing/test/audioproc_float_impl.h
+++ b/modules/audio_processing/test/audioproc_float_impl.h
@@ -24,6 +24,21 @@ namespace test {
// via the |argv| argument. Pass |processed_capture_samples| to write in it the
// samples processed on the capture side; if |processed_capture_samples| is not
// passed, the output file can optionally be specified via the |argv| argument.
+// Any audio_processing object specified in the input is used for the
+// simulation. Note that when the audio_processing object is specified all
+// functionality that relies on using the internal builder is deactivated,
+// since the AudioProcessing object is already created and the builder is not
+// used in the simulation.
+int AudioprocFloatImpl(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ int argc,
+ char* argv[]);
+
+// This function implements the audio processing simulation utility. Pass
+// |input_aecdump| to provide the content of an AEC dump file as a string; if
+// |input_aecdump| is not passed, a WAV or AEC input dump file must be specified
+// via the |argv| argument. Pass |processed_capture_samples| to write in it the
+// samples processed on the capture side; if |processed_capture_samples| is not
+// passed, the output file can optionally be specified via the |argv| argument.
int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
int argc,
char* argv[],
diff --git a/modules/audio_processing/test/conversational_speech/BUILD.gn b/modules/audio_processing/test/conversational_speech/BUILD.gn
index fb532befb9..b311abdbd1 100644
--- a/modules/audio_processing/test/conversational_speech/BUILD.gn
+++ b/modules/audio_processing/test/conversational_speech/BUILD.gn
@@ -68,6 +68,6 @@ rtc_library("unittest") {
"../../../../test:fileutils",
"../../../../test:test_support",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/modules/audio_processing/test/conversational_speech/mock_wavreader.h b/modules/audio_processing/test/conversational_speech/mock_wavreader.h
index 591299eefe..94e20b9ec6 100644
--- a/modules/audio_processing/test/conversational_speech/mock_wavreader.h
+++ b/modules/audio_processing/test/conversational_speech/mock_wavreader.h
@@ -28,12 +28,12 @@ class MockWavReader : public WavReaderInterface {
~MockWavReader();
// TODO(alessiob): use ON_CALL to return random samples if needed.
- MOCK_METHOD1(ReadFloatSamples, size_t(rtc::ArrayView<float>));
- MOCK_METHOD1(ReadInt16Samples, size_t(rtc::ArrayView<int16_t>));
+ MOCK_METHOD(size_t, ReadFloatSamples, (rtc::ArrayView<float>), (override));
+ MOCK_METHOD(size_t, ReadInt16Samples, (rtc::ArrayView<int16_t>), (override));
- MOCK_CONST_METHOD0(SampleRate, int());
- MOCK_CONST_METHOD0(NumChannels, size_t());
- MOCK_CONST_METHOD0(NumSamples, size_t());
+ MOCK_METHOD(int, SampleRate, (), (const, override));
+ MOCK_METHOD(size_t, NumChannels, (), (const, override));
+ MOCK_METHOD(size_t, NumSamples, (), (const, override));
private:
const int sample_rate_;
diff --git a/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h b/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h
index e84f5f35cc..c2db85f6f6 100644
--- a/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h
+++ b/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h
@@ -36,8 +36,10 @@ class MockWavReaderFactory : public WavReaderAbstractFactory {
explicit MockWavReaderFactory(const Params& default_params);
~MockWavReaderFactory();
- MOCK_CONST_METHOD1(Create,
- std::unique_ptr<WavReaderInterface>(const std::string&));
+ MOCK_METHOD(std::unique_ptr<WavReaderInterface>,
+ Create,
+ (const std::string&),
+ (const, override));
private:
// Creates a MockWavReader instance using the parameters in
diff --git a/modules/audio_processing/test/echo_control_mock.h b/modules/audio_processing/test/echo_control_mock.h
index 95d3be5cdf..927de43ae0 100644
--- a/modules/audio_processing/test/echo_control_mock.h
+++ b/modules/audio_processing/test/echo_control_mock.h
@@ -20,17 +20,21 @@ class AudioBuffer;
class MockEchoControl : public EchoControl {
public:
- MOCK_METHOD1(AnalyzeRender, void(AudioBuffer* render));
- MOCK_METHOD1(AnalyzeCapture, void(AudioBuffer* capture));
- MOCK_METHOD2(ProcessCapture,
- void(AudioBuffer* capture, bool echo_path_change));
- MOCK_METHOD3(ProcessCapture,
- void(AudioBuffer* capture,
- AudioBuffer* linear_output,
- bool echo_path_change));
- MOCK_CONST_METHOD0(GetMetrics, EchoControl::Metrics());
- MOCK_METHOD1(SetAudioBufferDelay, void(int delay_ms));
- MOCK_CONST_METHOD0(ActiveProcessing, bool());
+ MOCK_METHOD(void, AnalyzeRender, (AudioBuffer * render), (override));
+ MOCK_METHOD(void, AnalyzeCapture, (AudioBuffer * capture), (override));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (AudioBuffer * capture, bool echo_path_change),
+ (override));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (AudioBuffer * capture,
+ AudioBuffer* linear_output,
+ bool echo_path_change),
+ (override));
+ MOCK_METHOD(EchoControl::Metrics, GetMetrics, (), (const, override));
+ MOCK_METHOD(void, SetAudioBufferDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, ActiveProcessing, (), (const, override));
};
} // namespace webrtc
diff --git a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_unittest.py b/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_unittest.py
index f5240f8696..c0f3d0e2a7 100644
--- a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_unittest.py
+++ b/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_unittest.py
@@ -9,14 +9,9 @@
"""Unit tests for the apm_quality_assessment module.
"""
-import os
import sys
import unittest
-SRC = os.path.abspath(os.path.join(
- os.path.dirname((__file__)), os.pardir, os.pardir, os.pardir))
-sys.path.append(os.path.join(SRC, 'third_party', 'pymock'))
-
import mock
import apm_quality_assessment
diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py
index b212614199..87257e2fc9 100644
--- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py
+++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py
@@ -12,14 +12,9 @@
import logging
import os
import shutil
-import sys
import tempfile
import unittest
-SRC = os.path.abspath(os.path.join(
- os.path.dirname((__file__)), os.pardir, os.pardir, os.pardir, os.pardir))
-sys.path.append(os.path.join(SRC, 'third_party', 'pymock'))
-
import mock
from . import exceptions
diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py
index c39b12dd00..7d9ad6ca79 100644
--- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py
+++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py
@@ -12,14 +12,9 @@
import logging
import os
import shutil
-import sys
import tempfile
import unittest
-SRC = os.path.abspath(os.path.join(
- os.path.dirname((__file__)), os.pardir, os.pardir, os.pardir, os.pardir))
-sys.path.append(os.path.join(SRC, 'third_party', 'pymock'))
-
import mock
import pydub
diff --git a/modules/audio_processing/test/wav_based_simulator.cc b/modules/audio_processing/test/wav_based_simulator.cc
index 7179fc3431..75946fb3fa 100644
--- a/modules/audio_processing/test/wav_based_simulator.cc
+++ b/modules/audio_processing/test/wav_based_simulator.cc
@@ -56,8 +56,18 @@ WavBasedSimulator::GetCustomEventChain(const std::string& filename) {
WavBasedSimulator::WavBasedSimulator(
const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder)
- : AudioProcessingSimulator(settings, std::move(ap_builder)) {}
+ : AudioProcessingSimulator(settings,
+ std::move(audio_processing),
+ std::move(ap_builder)) {
+ if (settings_.call_order_input_filename) {
+ call_chain_ = WavBasedSimulator::GetCustomEventChain(
+ *settings_.call_order_input_filename);
+ } else {
+ call_chain_ = WavBasedSimulator::GetDefaultEventChain();
+ }
+}
WavBasedSimulator::~WavBasedSimulator() = default;
@@ -89,13 +99,7 @@ void WavBasedSimulator::PrepareReverseProcessStreamCall() {
}
void WavBasedSimulator::Process() {
- if (settings_.call_order_input_filename) {
- call_chain_ = WavBasedSimulator::GetCustomEventChain(
- *settings_.call_order_input_filename);
- } else {
- call_chain_ = WavBasedSimulator::GetDefaultEventChain();
- }
- CreateAudioProcessor();
+ ConfigureAudioProcessor();
Initialize();
@@ -120,7 +124,7 @@ void WavBasedSimulator::Process() {
call_chain_index = (call_chain_index + 1) % call_chain_.size();
}
- DestroyAudioProcessor();
+ DetachAecDump();
}
bool WavBasedSimulator::HandleProcessStreamCall() {
diff --git a/modules/audio_processing/test/wav_based_simulator.h b/modules/audio_processing/test/wav_based_simulator.h
index 991f1dbaad..3adbe7022c 100644
--- a/modules/audio_processing/test/wav_based_simulator.h
+++ b/modules/audio_processing/test/wav_based_simulator.h
@@ -23,6 +23,7 @@ namespace test {
class WavBasedSimulator final : public AudioProcessingSimulator {
public:
WavBasedSimulator(const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder);
~WavBasedSimulator() override;
diff --git a/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc b/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc
index 989e362a49..ff7022dba4 100644
--- a/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc
+++ b/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc
@@ -103,7 +103,7 @@ TEST(CascadedBiquadFilter, TransparentConfiguration) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check of the lengths for the input and output works for the
// non-in-place call.
-TEST(CascadedBiquadFilter, InputSizeCheckVerification) {
+TEST(CascadedBiquadFilterDeathTest, InputSizeCheckVerification) {
const std::vector<float> input = CreateInputWithIncreasingValues(10);
std::vector<float> output(input.size() - 1);
diff --git a/modules/audio_processing/utility/pffft_wrapper_unittest.cc b/modules/audio_processing/utility/pffft_wrapper_unittest.cc
index 9aed548934..2ad6849cd4 100644
--- a/modules/audio_processing/utility/pffft_wrapper_unittest.cc
+++ b/modules/audio_processing/utility/pffft_wrapper_unittest.cc
@@ -125,23 +125,24 @@ TEST(PffftTest, CreateWrapperWithValidSize) {
#if !defined(NDEBUG) && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-class PffftInvalidSizeTest : public ::testing::Test,
- public ::testing::WithParamInterface<size_t> {};
+class PffftInvalidSizeDeathTest : public ::testing::Test,
+ public ::testing::WithParamInterface<size_t> {
+};
-TEST_P(PffftInvalidSizeTest, DoNotCreateRealWrapper) {
+TEST_P(PffftInvalidSizeDeathTest, DoNotCreateRealWrapper) {
size_t fft_size = GetParam();
ASSERT_FALSE(Pffft::IsValidFftSize(fft_size, Pffft::FftType::kReal));
EXPECT_DEATH(CreatePffftWrapper(fft_size, Pffft::FftType::kReal), "");
}
-TEST_P(PffftInvalidSizeTest, DoNotCreateComplexWrapper) {
+TEST_P(PffftInvalidSizeDeathTest, DoNotCreateComplexWrapper) {
size_t fft_size = GetParam();
ASSERT_FALSE(Pffft::IsValidFftSize(fft_size, Pffft::FftType::kComplex));
EXPECT_DEATH(CreatePffftWrapper(fft_size, Pffft::FftType::kComplex), "");
}
INSTANTIATE_TEST_SUITE_P(PffftTest,
- PffftInvalidSizeTest,
+ PffftInvalidSizeDeathTest,
::testing::Values(17,
33,
65,
diff --git a/modules/congestion_controller/BUILD.gn b/modules/congestion_controller/BUILD.gn
index 6f2b853f8f..231ff5e0dd 100644
--- a/modules/congestion_controller/BUILD.gn
+++ b/modules/congestion_controller/BUILD.gn
@@ -28,6 +28,7 @@ rtc_library("congestion_controller") {
"..:module_api",
"../../api/transport:field_trial_based_config",
"../../api/transport:network_control",
+ "../../rtc_base/synchronization:mutex",
"../pacing",
"../remote_bitrate_estimator",
"../rtp_rtcp:rtp_rtcp_format",
diff --git a/modules/congestion_controller/goog_cc/BUILD.gn b/modules/congestion_controller/goog_cc/BUILD.gn
index fa95bc186c..52daad2bce 100644
--- a/modules/congestion_controller/goog_cc/BUILD.gn
+++ b/modules/congestion_controller/goog_cc/BUILD.gn
@@ -51,6 +51,8 @@ rtc_library("goog_cc") {
"../../../rtc_base/experiments:rate_control_settings",
"../../../system_wrappers",
"../../remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -64,8 +66,8 @@ rtc_library("link_capacity_estimator") {
deps = [
"../../../api/units:data_rate",
"../../../rtc_base:safe_minmax",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("pushback_controller") {
@@ -79,6 +81,8 @@ rtc_library("pushback_controller") {
"../../../api/units:data_size",
"../../../rtc_base:checks",
"../../../rtc_base/experiments:rate_control_settings",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -100,8 +104,8 @@ rtc_library("alr_detector") {
"../../../rtc_base/experiments:alr_experiment",
"../../../rtc_base/experiments:field_trial_parser",
"../../pacing:interval_budget",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("estimators") {
configs += [ ":bwe_test_logging" ]
@@ -137,6 +141,8 @@ rtc_library("estimators") {
"../../../rtc_base:safe_minmax",
"../../../rtc_base/experiments:field_trial_parser",
"../../remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -163,6 +169,8 @@ rtc_library("loss_based_controller") {
"../../../system_wrappers:field_trial",
"../../../system_wrappers:metrics",
"../../remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -189,6 +197,8 @@ rtc_library("delay_based_bwe") {
"../../../system_wrappers:metrics",
"../../pacing",
"../../remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -216,6 +226,8 @@ rtc_library("probe_controller") {
"../../../rtc_base/experiments:field_trial_parser",
"../../../rtc_base/system:unused",
"../../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -240,8 +252,8 @@ if (rtc_include_tests) {
"../../../rtc_base:checks",
"../../../test/logging:log_writer",
"../../remote_bitrate_estimator",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("goog_cc_unittests") {
testonly = true
diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc
index 9031c5d272..e5b733b119 100644
--- a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc
+++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc
@@ -35,10 +35,12 @@ constexpr size_t kPayloadSize = 10;
class MockBitrateEstimator : public BitrateEstimator {
public:
using BitrateEstimator::BitrateEstimator;
- MOCK_METHOD3(Update,
- void(Timestamp at_time, DataSize data_size, bool in_alr));
- MOCK_CONST_METHOD0(bitrate, absl::optional<DataRate>());
- MOCK_METHOD0(ExpectFastRateChange, void());
+ MOCK_METHOD(void,
+ Update,
+ (Timestamp at_time, DataSize data_size, bool in_alr),
+ (override));
+ MOCK_METHOD(absl::optional<DataRate>, bitrate, (), (const, override));
+ MOCK_METHOD(void, ExpectFastRateChange, (), (override));
};
struct AcknowledgedBitrateEstimatorTestStates {
diff --git a/modules/congestion_controller/include/receive_side_congestion_controller.h b/modules/congestion_controller/include/receive_side_congestion_controller.h
index 4f13b4d549..6cd8be39a9 100644
--- a/modules/congestion_controller/include/receive_side_congestion_controller.h
+++ b/modules/congestion_controller/include/receive_side_congestion_controller.h
@@ -19,7 +19,7 @@
#include "modules/include/module.h"
#include "modules/remote_bitrate_estimator/remote_estimator_proxy.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
class RemoteBitrateEstimator;
@@ -87,11 +87,11 @@ class ReceiveSideCongestionController : public CallStatsObserver,
private:
void PickEstimatorFromHeader(const RTPHeader& header)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- void PickEstimator() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ void PickEstimator() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
RemoteBitrateObserver* observer_;
Clock* const clock_;
- rtc::CriticalSection crit_sect_;
+ mutable Mutex mutex_;
std::unique_ptr<RemoteBitrateEstimator> rbe_;
bool using_absolute_send_time_;
uint32_t packets_since_absolute_send_time_;
diff --git a/modules/congestion_controller/pcc/BUILD.gn b/modules/congestion_controller/pcc/BUILD.gn
index d0111725d2..2f378769e7 100644
--- a/modules/congestion_controller/pcc/BUILD.gn
+++ b/modules/congestion_controller/pcc/BUILD.gn
@@ -37,8 +37,8 @@ rtc_library("pcc_controller") {
"../../../api/units:timestamp",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("monitor_interval") {
@@ -94,8 +94,8 @@ rtc_library("bitrate_controller") {
"../../../api/transport:network_control",
"../../../api/units:data_rate",
"../../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
diff --git a/modules/congestion_controller/pcc/bitrate_controller_unittest.cc b/modules/congestion_controller/pcc/bitrate_controller_unittest.cc
index 6693b7a833..957d99b1de 100644
--- a/modules/congestion_controller/pcc/bitrate_controller_unittest.cc
+++ b/modules/congestion_controller/pcc/bitrate_controller_unittest.cc
@@ -67,8 +67,10 @@ std::vector<PacketResult> CreatePacketResults(
class MockUtilityFunction : public PccUtilityFunctionInterface {
public:
- MOCK_CONST_METHOD1(Compute,
- double(const PccMonitorInterval& monitor_interval));
+ MOCK_METHOD(double,
+ Compute,
+ (const PccMonitorInterval& monitor_interval),
+ (const, override));
};
} // namespace
diff --git a/modules/congestion_controller/receive_side_congestion_controller.cc b/modules/congestion_controller/receive_side_congestion_controller.cc
index 7448ec28b2..638cb2d295 100644
--- a/modules/congestion_controller/receive_side_congestion_controller.cc
+++ b/modules/congestion_controller/receive_side_congestion_controller.cc
@@ -38,45 +38,45 @@ void ReceiveSideCongestionController::WrappingBitrateEstimator::IncomingPacket(
int64_t arrival_time_ms,
size_t payload_size,
const RTPHeader& header) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
PickEstimatorFromHeader(header);
rbe_->IncomingPacket(arrival_time_ms, payload_size, header);
}
void ReceiveSideCongestionController::WrappingBitrateEstimator::Process() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
rbe_->Process();
}
int64_t ReceiveSideCongestionController::WrappingBitrateEstimator::
TimeUntilNextProcess() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return rbe_->TimeUntilNextProcess();
}
void ReceiveSideCongestionController::WrappingBitrateEstimator::OnRttUpdate(
int64_t avg_rtt_ms,
int64_t max_rtt_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
rbe_->OnRttUpdate(avg_rtt_ms, max_rtt_ms);
}
void ReceiveSideCongestionController::WrappingBitrateEstimator::RemoveStream(
unsigned int ssrc) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
rbe_->RemoveStream(ssrc);
}
bool ReceiveSideCongestionController::WrappingBitrateEstimator::LatestEstimate(
std::vector<unsigned int>* ssrcs,
unsigned int* bitrate_bps) const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return rbe_->LatestEstimate(ssrcs, bitrate_bps);
}
void ReceiveSideCongestionController::WrappingBitrateEstimator::SetMinBitrate(
int min_bitrate_bps) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
rbe_->SetMinBitrate(min_bitrate_bps);
min_bitrate_bps_ = min_bitrate_bps;
}
diff --git a/modules/congestion_controller/receive_side_congestion_controller_unittest.cc b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc
index 95143f7175..b5846237ee 100644
--- a/modules/congestion_controller/receive_side_congestion_controller_unittest.cc
+++ b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc
@@ -37,8 +37,10 @@ uint32_t AbsSendTime(int64_t t, int64_t denom) {
class MockPacketRouter : public PacketRouter {
public:
- MOCK_METHOD2(OnReceiveBitrateChanged,
- void(const std::vector<uint32_t>& ssrcs, uint32_t bitrate));
+ MOCK_METHOD(void,
+ OnReceiveBitrateChanged,
+ (const std::vector<uint32_t>& ssrcs, uint32_t bitrate),
+ (override));
};
const uint32_t kInitialBitrateBps = 60000;
diff --git a/modules/congestion_controller/rtp/BUILD.gn b/modules/congestion_controller/rtp/BUILD.gn
index b444f5495b..2f97b67263 100644
--- a/modules/congestion_controller/rtp/BUILD.gn
+++ b/modules/congestion_controller/rtp/BUILD.gn
@@ -33,8 +33,8 @@ rtc_library("control_handler") {
"../../../rtc_base/synchronization:sequence_checker",
"../../../system_wrappers:field_trial",
"../../pacing",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (!build_with_mozilla) {
deps += [ "../../../rtc_base" ]
@@ -58,9 +58,12 @@ rtc_library("transport_feedback") {
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/network:sent_packet",
+ "../../../rtc_base/synchronization:mutex",
"../../../system_wrappers",
"../../../system_wrappers:field_trial",
"../../rtp_rtcp:rtp_rtcp_format",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter.h b/modules/congestion_controller/rtp/transport_feedback_adapter.h
index b8148a252f..c41a7c67f8 100644
--- a/modules/congestion_controller/rtp/transport_feedback_adapter.h
+++ b/modules/congestion_controller/rtp/transport_feedback_adapter.h
@@ -19,7 +19,6 @@
#include "api/transport/network_types.h"
#include "modules/include/module_common_types_public.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/network/sent_packet.h"
#include "rtc_base/network_route.h"
#include "rtc_base/thread_annotations.h"
diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc
index 1c74b196d8..3849cb3707 100644
--- a/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc
+++ b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc
@@ -83,8 +83,10 @@ namespace test {
class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver {
public:
- MOCK_METHOD1(OnPacketFeedbackVector,
- void(std::vector<StreamPacketInfo> packet_feedback_vector));
+ MOCK_METHOD(void,
+ OnPacketFeedbackVector,
+ (std::vector<StreamPacketInfo> packet_feedback_vector),
+ (override));
};
class TransportFeedbackAdapterTest : public ::testing::Test {
diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc
index 045ba38cd3..c958a1c3cb 100644
--- a/modules/congestion_controller/rtp/transport_feedback_demuxer.cc
+++ b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc
@@ -18,7 +18,7 @@ static const size_t kMaxPacketsInHistory = 5000;
void TransportFeedbackDemuxer::RegisterStreamFeedbackObserver(
std::vector<uint32_t> ssrcs,
StreamFeedbackObserver* observer) {
- rtc::CritScope cs(&observers_lock_);
+ MutexLock lock(&observers_lock_);
RTC_DCHECK(observer);
RTC_DCHECK(absl::c_find_if(observers_, [=](const auto& pair) {
return pair.second == observer;
@@ -28,7 +28,7 @@ void TransportFeedbackDemuxer::RegisterStreamFeedbackObserver(
void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver(
StreamFeedbackObserver* observer) {
- rtc::CritScope cs(&observers_lock_);
+ MutexLock lock(&observers_lock_);
RTC_DCHECK(observer);
const auto it = absl::c_find_if(
observers_, [=](const auto& pair) { return pair.second == observer; });
@@ -37,7 +37,7 @@ void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver(
}
void TransportFeedbackDemuxer::AddPacket(const RtpPacketSendInfo& packet_info) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (packet_info.ssrc != 0) {
StreamFeedbackObserver::StreamPacketInfo info;
info.ssrc = packet_info.ssrc;
@@ -56,7 +56,7 @@ void TransportFeedbackDemuxer::OnTransportFeedback(
const rtcp::TransportFeedback& feedback) {
std::vector<StreamFeedbackObserver::StreamPacketInfo> stream_feedbacks;
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
for (const auto& packet : feedback.GetAllPackets()) {
int64_t seq_num =
seq_num_unwrapper_.UnwrapWithoutUpdate(packet.sequence_number());
@@ -71,7 +71,7 @@ void TransportFeedbackDemuxer::OnTransportFeedback(
}
}
- rtc::CritScope cs(&observers_lock_);
+ MutexLock lock(&observers_lock_);
for (auto& observer : observers_) {
std::vector<StreamFeedbackObserver::StreamPacketInfo> selected_feedback;
for (const auto& packet_info : stream_feedbacks) {
diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer.h b/modules/congestion_controller/rtp/transport_feedback_demuxer.h
index bcd25d5835..634a37ea1a 100644
--- a/modules/congestion_controller/rtp/transport_feedback_demuxer.h
+++ b/modules/congestion_controller/rtp/transport_feedback_demuxer.h
@@ -16,7 +16,7 @@
#include "modules/include/module_common_types_public.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -32,7 +32,7 @@ class TransportFeedbackDemuxer : public StreamFeedbackProvider {
void OnTransportFeedback(const rtcp::TransportFeedback& feedback);
private:
- rtc::CriticalSection lock_;
+ Mutex lock_;
SequenceNumberUnwrapper seq_num_unwrapper_ RTC_GUARDED_BY(&lock_);
std::map<int64_t, StreamFeedbackObserver::StreamPacketInfo> history_
RTC_GUARDED_BY(&lock_);
@@ -40,7 +40,7 @@ class TransportFeedbackDemuxer : public StreamFeedbackProvider {
// Maps a set of ssrcs to corresponding observer. Vectors are used rather than
// set/map to ensure that the processing order is consistent independently of
// the randomized ssrcs.
- rtc::CriticalSection observers_lock_;
+ Mutex observers_lock_;
std::vector<std::pair<std::vector<uint32_t>, StreamFeedbackObserver*>>
observers_ RTC_GUARDED_BY(&observers_lock_);
};
diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc
index dce52de557..6514a4eda7 100644
--- a/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc
+++ b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc
@@ -21,8 +21,10 @@ static constexpr uint32_t kSsrc = 8492;
class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver {
public:
- MOCK_METHOD1(OnPacketFeedbackVector,
- void(std::vector<StreamPacketInfo> packet_feedback_vector));
+ MOCK_METHOD(void,
+ OnPacketFeedbackVector,
+ (std::vector<StreamPacketInfo> packet_feedback_vector),
+ (override));
};
RtpPacketSendInfo CreatePacket(uint32_t ssrc,
diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn
index 4f93c246fe..eb26f5c7bb 100644
--- a/modules/desktop_capture/BUILD.gn
+++ b/modules/desktop_capture/BUILD.gn
@@ -42,7 +42,8 @@ rtc_library("primitives") {
]
if (!build_with_mozilla) {
- deps += [ "../../rtc_base" ] # TODO(kjellander): Cleanup in bugs.webrtc.org/3806.
+ deps += [ "../../rtc_base" ] # TODO(kjellander): Cleanup in
+ # bugs.webrtc.org/3806.
}
}
@@ -224,7 +225,8 @@ if (is_linux) {
rtc_source_set("desktop_capture") {
visibility = [ "*" ]
- public_deps = [ ":desktop_capture_generic" ] # no-presubmit-check TODO(webrtc:8603)
+ public_deps = # no-presubmit-check TODO(webrtc:8603)
+ [ ":desktop_capture_generic" ]
if (is_mac) {
public_deps += [ ":desktop_capture_objc" ]
}
@@ -260,7 +262,7 @@ if (is_mac) {
"../../rtc_base/system:rtc_export",
"../../sdk:helpers_objc",
]
- libs = [
+ frameworks = [
"AppKit.framework",
"IOKit.framework",
"IOSurface.framework",
@@ -457,6 +459,10 @@ rtc_library("desktop_capture_generic") {
"win/selected_window_context.h",
"win/window_capture_utils.cc",
"win/window_capture_utils.h",
+ "win/window_capturer_win_gdi.cc",
+ "win/window_capturer_win_gdi.h",
+ "win/window_capturer_win_wgc.cc",
+ "win/window_capturer_win_wgc.h",
"window_capturer_win.cc",
"window_finder_win.cc",
"window_finder_win.h",
@@ -474,12 +480,15 @@ rtc_library("desktop_capture_generic") {
"../../api:scoped_refptr",
"../../rtc_base", # TODO(kjellander): Cleanup in bugs.webrtc.org/3806.
"../../rtc_base:checks",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:rw_lock_wrapper",
"../../rtc_base/system:arch",
"../../rtc_base/system:rtc_export",
"../../system_wrappers",
"../../system_wrappers:cpu_features_api",
"../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]
diff --git a/modules/desktop_capture/linux/screen_capturer_x11.cc b/modules/desktop_capture/linux/screen_capturer_x11.cc
index 63c5abfff8..1b17071411 100644
--- a/modules/desktop_capture/linux/screen_capturer_x11.cc
+++ b/modules/desktop_capture/linux/screen_capturer_x11.cc
@@ -243,7 +243,7 @@ void ScreenCapturerX11::CaptureFrame() {
return;
}
- // If the current frame is from an older generation then allocate a new one.
+ // Allocate the current frame buffer only if it is not already allocated.
// Note that we can't reallocate other buffers at this point, since the caller
// may still be reading from them.
if (!queue_.current_frame()) {
@@ -293,6 +293,12 @@ bool ScreenCapturerX11::GetSourceList(SourceList* sources) {
}
bool ScreenCapturerX11::SelectSource(SourceId id) {
+ // Prevent the reuse of any frame buffers allocated for a previously selected
+ // source. This is required to stop crashes, or old data from appearing in
+ // a captured frame, when the new source is sized differently then the source
+ // that was selected at the time a reused frame buffer was created.
+ queue_.Reset();
+
if (!use_randr_ || id == kFullDesktopScreenId) {
selected_monitor_name_ = kFullDesktopScreenId;
selected_monitor_rect_ =
diff --git a/modules/desktop_capture/mac/desktop_configuration_monitor.cc b/modules/desktop_capture/mac/desktop_configuration_monitor.cc
index e2225cd4a9..048a679ecc 100644
--- a/modules/desktop_capture/mac/desktop_configuration_monitor.cc
+++ b/modules/desktop_capture/mac/desktop_configuration_monitor.cc
@@ -21,7 +21,7 @@ DesktopConfigurationMonitor::DesktopConfigurationMonitor() {
DesktopConfigurationMonitor::DisplaysReconfiguredCallback, this);
if (err != kCGErrorSuccess)
RTC_LOG(LS_ERROR) << "CGDisplayRegisterReconfigurationCallback " << err;
- rtc::CritScope cs(&desktop_configuration_lock_);
+ MutexLock lock(&desktop_configuration_lock_);
desktop_configuration_ = MacDesktopConfiguration::GetCurrent(
MacDesktopConfiguration::TopLeftOrigin);
}
@@ -34,7 +34,7 @@ DesktopConfigurationMonitor::~DesktopConfigurationMonitor() {
}
MacDesktopConfiguration DesktopConfigurationMonitor::desktop_configuration() {
- rtc::CritScope crit(&desktop_configuration_lock_);
+ MutexLock lock(&desktop_configuration_lock_);
return desktop_configuration_;
}
@@ -64,7 +64,7 @@ void DesktopConfigurationMonitor::DisplaysReconfigured(
reconfiguring_displays_.erase(display);
if (reconfiguring_displays_.empty()) {
- rtc::CritScope cs(&desktop_configuration_lock_);
+ MutexLock lock(&desktop_configuration_lock_);
desktop_configuration_ = MacDesktopConfiguration::GetCurrent(
MacDesktopConfiguration::TopLeftOrigin);
}
diff --git a/modules/desktop_capture/mac/desktop_configuration_monitor.h b/modules/desktop_capture/mac/desktop_configuration_monitor.h
index 1ed4c6bbcf..46a66d1d4c 100644
--- a/modules/desktop_capture/mac/desktop_configuration_monitor.h
+++ b/modules/desktop_capture/mac/desktop_configuration_monitor.h
@@ -19,7 +19,7 @@
#include "api/ref_counted_base.h"
#include "modules/desktop_capture/mac/desktop_configuration.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -41,7 +41,7 @@ class DesktopConfigurationMonitor : public rtc::RefCountedBase {
void DisplaysReconfigured(CGDirectDisplayID display,
CGDisplayChangeSummaryFlags flags);
- rtc::CriticalSection desktop_configuration_lock_;
+ Mutex desktop_configuration_lock_;
MacDesktopConfiguration desktop_configuration_
RTC_GUARDED_BY(&desktop_configuration_lock_);
std::set<CGDirectDisplayID> reconfiguring_displays_;
diff --git a/modules/desktop_capture/mock_desktop_capturer_callback.h b/modules/desktop_capture/mock_desktop_capturer_callback.h
index 659239ab9d..6530dc5542 100644
--- a/modules/desktop_capture/mock_desktop_capturer_callback.h
+++ b/modules/desktop_capture/mock_desktop_capturer_callback.h
@@ -22,9 +22,10 @@ class MockDesktopCapturerCallback : public DesktopCapturer::Callback {
MockDesktopCapturerCallback();
~MockDesktopCapturerCallback() override;
- MOCK_METHOD2(OnCaptureResultPtr,
- void(DesktopCapturer::Result result,
- std::unique_ptr<DesktopFrame>* frame));
+ MOCK_METHOD(void,
+ OnCaptureResultPtr,
+ (DesktopCapturer::Result result,
+ std::unique_ptr<DesktopFrame>* frame));
void OnCaptureResult(DesktopCapturer::Result result,
std::unique_ptr<DesktopFrame> frame) final;
diff --git a/modules/desktop_capture/win/dxgi_duplicator_controller.h b/modules/desktop_capture/win/dxgi_duplicator_controller.h
index a24e9781b3..b6f8e78649 100644
--- a/modules/desktop_capture/win/dxgi_duplicator_controller.h
+++ b/modules/desktop_capture/win/dxgi_duplicator_controller.h
@@ -25,7 +25,7 @@
#include "modules/desktop_capture/win/dxgi_adapter_duplicator.h"
#include "modules/desktop_capture/win/dxgi_context.h"
#include "modules/desktop_capture/win/dxgi_frame.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
namespace webrtc {
@@ -219,7 +219,7 @@ class DxgiDuplicatorController {
std::atomic_int refcount_;
// This lock must be locked whenever accessing any of the following objects.
- rtc::CriticalSection lock_;
+ rtc::RecursiveCriticalSection lock_;
// A self-incremented integer to compare with the one in Context. It ensures
// a Context instance is always initialized after DxgiDuplicatorController.
diff --git a/modules/desktop_capture/win/dxgi_output_duplicator.h b/modules/desktop_capture/win/dxgi_output_duplicator.h
index 5395146042..3079d3967a 100644
--- a/modules/desktop_capture/win/dxgi_output_duplicator.h
+++ b/modules/desktop_capture/win/dxgi_output_duplicator.h
@@ -27,7 +27,6 @@
#include "modules/desktop_capture/win/d3d_device.h"
#include "modules/desktop_capture/win/dxgi_context.h"
#include "modules/desktop_capture/win/dxgi_texture.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
diff --git a/modules/desktop_capture/win/window_capture_utils.cc b/modules/desktop_capture/win/window_capture_utils.cc
index 226b564b64..006870f3c5 100644
--- a/modules/desktop_capture/win/window_capture_utils.cc
+++ b/modules/desktop_capture/win/window_capture_utils.cc
@@ -13,9 +13,13 @@
// Just for the DWMWINDOWATTRIBUTE enums (DWMWA_CLOAKED).
#include <dwmapi.h>
+#include <algorithm>
+
#include "modules/desktop_capture/win/scoped_gdi_object.h"
+#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
+#include "rtc_base/string_utils.h"
#include "rtc_base/win32.h"
namespace webrtc {
@@ -157,6 +161,63 @@ bool IsWindowMaximized(HWND window, bool* result) {
return true;
}
+bool IsWindowValidAndVisible(HWND window) {
+ return IsWindow(window) && IsWindowVisible(window) && !IsIconic(window);
+}
+
+BOOL CALLBACK FilterUncapturableWindows(HWND hwnd, LPARAM param) {
+ DesktopCapturer::SourceList* list =
+ reinterpret_cast<DesktopCapturer::SourceList*>(param);
+
+ // Skip windows that are invisible, minimized, have no title, or are owned,
+ // unless they have the app window style set.
+ int len = GetWindowTextLength(hwnd);
+ HWND owner = GetWindow(hwnd, GW_OWNER);
+ LONG exstyle = GetWindowLong(hwnd, GWL_EXSTYLE);
+ if (len == 0 || !IsWindowValidAndVisible(hwnd) ||
+ (owner && !(exstyle & WS_EX_APPWINDOW))) {
+ return TRUE;
+ }
+
+ // Skip unresponsive windows. Set timout with 50ms, in case system is under
+ // heavy load. We could wait longer and have a lower false negative, but that
+ // would delay the the enumeration.
+ const UINT timeout = 50; // ms
+ if (!SendMessageTimeout(hwnd, WM_NULL, 0, 0, SMTO_ABORTIFHUNG, timeout,
+ nullptr)) {
+ return TRUE;
+ }
+
+ // Skip the Program Manager window and the Start button.
+ WCHAR class_name[256];
+ const int class_name_length =
+ GetClassNameW(hwnd, class_name, arraysize(class_name));
+ if (class_name_length < 1)
+ return TRUE;
+
+ // Skip Program Manager window and the Start button. This is the same logic
+ // that's used in Win32WindowPicker in libjingle. Consider filtering other
+ // windows as well (e.g. toolbars).
+ if (wcscmp(class_name, L"Progman") == 0 || wcscmp(class_name, L"Button") == 0)
+ return TRUE;
+
+ DesktopCapturer::Source window;
+ window.id = reinterpret_cast<WindowId>(hwnd);
+
+ // Truncate the title if it's longer than 500 characters.
+ WCHAR window_title[500];
+ GetWindowTextW(hwnd, window_title, arraysize(window_title));
+ window.title = rtc::ToUtf8(window_title);
+
+ // Skip windows when we failed to convert the title or it is empty.
+ if (window.title.empty())
+ return TRUE;
+
+ list->push_back(window);
+
+ return TRUE;
+}
+
// WindowCaptureHelperWin implementation.
WindowCaptureHelperWin::WindowCaptureHelperWin() {
// Try to load dwmapi.dll dynamically since it is not available on XP.
@@ -223,12 +284,13 @@ bool WindowCaptureHelperWin::IsWindowChromeNotification(HWND hwnd) {
}
// |content_rect| is preferred because,
-// 1. WindowCapturerWin is using GDI capturer, which cannot capture DX output.
+// 1. WindowCapturerWinGdi is using GDI capturer, which cannot capture DX
+// output.
// So ScreenCapturer should be used as much as possible to avoid
// uncapturable cases. Note: lots of new applications are using DX output
// (hardware acceleration) to improve the performance which cannot be
-// captured by WindowCapturerWin. See bug http://crbug.com/741770.
-// 2. WindowCapturerWin is still useful because we do not want to expose the
+// captured by WindowCapturerWinGdi. See bug http://crbug.com/741770.
+// 2. WindowCapturerWinGdi is still useful because we do not want to expose the
// content on other windows if the target window is covered by them.
// 3. Shadow and borders should not be considered as "content" on other
// windows because they do not expose any useful information.
@@ -288,8 +350,8 @@ bool WindowCaptureHelperWin::IsWindowOnCurrentDesktop(HWND hwnd) {
}
bool WindowCaptureHelperWin::IsWindowVisibleOnCurrentDesktop(HWND hwnd) {
- return !::IsIconic(hwnd) && ::IsWindowVisible(hwnd) &&
- IsWindowOnCurrentDesktop(hwnd) && !IsWindowCloaked(hwnd);
+ return IsWindowValidAndVisible(hwnd) && IsWindowOnCurrentDesktop(hwnd) &&
+ !IsWindowCloaked(hwnd);
}
// A cloaked window is composited but not visible to the user.
@@ -303,11 +365,28 @@ bool WindowCaptureHelperWin::IsWindowCloaked(HWND hwnd) {
int res = 0;
if (dwm_get_window_attribute_func_(hwnd, DWMWA_CLOAKED, &res, sizeof(res)) !=
S_OK) {
- // Cannot tell so assume not cloacked for backward compatibility.
+ // Cannot tell so assume not cloaked for backward compatibility.
return false;
}
return res != 0;
}
+bool WindowCaptureHelperWin::EnumerateCapturableWindows(
+ DesktopCapturer::SourceList* results) {
+ LPARAM param = reinterpret_cast<LPARAM>(results);
+ if (!EnumWindows(&FilterUncapturableWindows, param))
+ return false;
+
+ for (auto it = results->begin(); it != results->end();) {
+ if (!IsWindowVisibleOnCurrentDesktop(reinterpret_cast<HWND>(it->id))) {
+ it = results->erase(it);
+ } else {
+ ++it;
+ }
+ }
+
+ return true;
+}
+
} // namespace webrtc
diff --git a/modules/desktop_capture/win/window_capture_utils.h b/modules/desktop_capture/win/window_capture_utils.h
index 20a475510b..af55ceb534 100644
--- a/modules/desktop_capture/win/window_capture_utils.h
+++ b/modules/desktop_capture/win/window_capture_utils.h
@@ -15,6 +15,7 @@
#include <windows.h>
#include <wrl/client.h>
+#include "modules/desktop_capture/desktop_capturer.h"
#include "modules/desktop_capture/desktop_geometry.h"
#include "rtc_base/constructor_magic.h"
@@ -40,7 +41,7 @@ bool GetWindowRect(HWND window, DesktopRect* result);
// This function should only be used by CroppingWindowCapturerWin. Instead a
// DesktopRect CropWindowRect(const DesktopRect& rect)
// should be added as a utility function to help CroppingWindowCapturerWin and
-// WindowCapturerWin to crop out the borders or shadow according to their
+// WindowCapturerWinGdi to crop out the borders or shadow according to their
// scenarios. But this function is too generic and easy to be misused.
bool GetCroppedWindowRect(HWND window,
bool avoid_cropping_border,
@@ -66,6 +67,15 @@ bool GetDcSize(HDC hdc, DesktopSize* size);
// function returns false if native APIs fail.
bool IsWindowMaximized(HWND window, bool* result);
+// Checks that the HWND is for a valid window, that window's visibility state is
+// visible, and that it is not minimized.
+bool IsWindowValidAndVisible(HWND window);
+
+// This function is passed into the EnumWindows API and filters out windows that
+// we don't want to capture, e.g. minimized or unresponsive windows and the
+// Start menu.
+BOOL CALLBACK FilterUncapturableWindows(HWND hwnd, LPARAM param);
+
typedef HRESULT(WINAPI* DwmIsCompositionEnabledFunc)(BOOL* enabled);
typedef HRESULT(WINAPI* DwmGetWindowAttributeFunc)(HWND hwnd,
DWORD flag,
@@ -84,6 +94,7 @@ class WindowCaptureHelperWin {
bool IsWindowOnCurrentDesktop(HWND hwnd);
bool IsWindowVisibleOnCurrentDesktop(HWND hwnd);
bool IsWindowCloaked(HWND hwnd);
+ bool EnumerateCapturableWindows(DesktopCapturer::SourceList* results);
private:
HMODULE dwmapi_library_ = nullptr;
diff --git a/modules/desktop_capture/win/window_capturer_win_gdi.cc b/modules/desktop_capture/win/window_capturer_win_gdi.cc
new file mode 100644
index 0000000000..82a8551831
--- /dev/null
+++ b/modules/desktop_capture/win/window_capturer_win_gdi.cc
@@ -0,0 +1,367 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/desktop_capture/win/window_capturer_win_gdi.h"
+
+#include <map>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "modules/desktop_capture/cropped_desktop_frame.h"
+#include "modules/desktop_capture/desktop_capturer.h"
+#include "modules/desktop_capture/desktop_frame_win.h"
+#include "modules/desktop_capture/win/screen_capture_utils.h"
+#include "modules/desktop_capture/win/selected_window_context.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/trace_event.h"
+#include "rtc_base/win32.h"
+
+namespace webrtc {
+
+// Used to pass input/output data during the EnumWindows call to collect
+// owned/pop-up windows that should be captured.
+struct OwnedWindowCollectorContext : public SelectedWindowContext {
+ OwnedWindowCollectorContext(HWND selected_window,
+ DesktopRect selected_window_rect,
+ WindowCaptureHelperWin* window_capture_helper,
+ std::vector<HWND>* owned_windows)
+ : SelectedWindowContext(selected_window,
+ selected_window_rect,
+ window_capture_helper),
+ owned_windows(owned_windows) {}
+
+ std::vector<HWND>* owned_windows;
+};
+
+// Called via EnumWindows for each root window; adds owned/pop-up windows that
+// should be captured to a vector it's passed.
+BOOL CALLBACK OwnedWindowCollector(HWND hwnd, LPARAM param) {
+ OwnedWindowCollectorContext* context =
+ reinterpret_cast<OwnedWindowCollectorContext*>(param);
+ if (hwnd == context->selected_window()) {
+ // Windows are enumerated in top-down z-order, so we can stop enumerating
+ // upon reaching the selected window.
+ return FALSE;
+ }
+
+ // Skip windows that aren't visible pop-up windows.
+ if (!(GetWindowLong(hwnd, GWL_STYLE) & WS_POPUP) ||
+ !context->window_capture_helper()->IsWindowVisibleOnCurrentDesktop(
+ hwnd)) {
+ return TRUE;
+ }
+
+ // Owned windows that intersect the selected window should be captured.
+ if (context->IsWindowOwnedBySelectedWindow(hwnd) &&
+ context->IsWindowOverlappingSelectedWindow(hwnd)) {
+ // Skip windows that draw shadows around menus. These "SysShadow" windows
+ // would otherwise be captured as solid black bars with no transparency
+ // gradient (since this capturer doesn't detect / respect variations in the
+ // window alpha channel). Any other semi-transparent owned windows will be
+ // captured fully-opaque. This seems preferable to excluding them (at least
+ // when they have content aside from a solid fill color / visual adornment;
+ // e.g. some tooltips have the transparent style set).
+ if (GetWindowLong(hwnd, GWL_EXSTYLE) & WS_EX_TRANSPARENT) {
+ const WCHAR kSysShadow[] = L"SysShadow";
+ const size_t kClassLength = arraysize(kSysShadow);
+ WCHAR class_name[kClassLength];
+ const int class_name_length =
+ GetClassNameW(hwnd, class_name, kClassLength);
+ if (class_name_length == kClassLength - 1 &&
+ wcscmp(class_name, kSysShadow) == 0) {
+ return TRUE;
+ }
+ }
+
+ context->owned_windows->push_back(hwnd);
+ }
+
+ return TRUE;
+}
+
+WindowCapturerWinGdi::WindowCapturerWinGdi() {}
+WindowCapturerWinGdi::~WindowCapturerWinGdi() {}
+
+bool WindowCapturerWinGdi::GetSourceList(SourceList* sources) {
+ if (!window_capture_helper_.EnumerateCapturableWindows(sources))
+ return false;
+
+ std::map<HWND, DesktopSize> new_map;
+ for (const auto& item : *sources) {
+ HWND hwnd = reinterpret_cast<HWND>(item.id);
+ new_map[hwnd] = window_size_map_[hwnd];
+ }
+ window_size_map_.swap(new_map);
+
+ return true;
+}
+
+bool WindowCapturerWinGdi::SelectSource(SourceId id) {
+ HWND window = reinterpret_cast<HWND>(id);
+ if (!IsWindowValidAndVisible(window))
+ return false;
+
+ window_ = window;
+ // When a window is not in the map, window_size_map_[window] will create an
+ // item with DesktopSize (0, 0).
+ previous_size_ = window_size_map_[window];
+ return true;
+}
+
+bool WindowCapturerWinGdi::FocusOnSelectedSource() {
+ if (!window_)
+ return false;
+
+ if (!IsWindowValidAndVisible(window_))
+ return false;
+
+ return BringWindowToTop(window_) && SetForegroundWindow(window_);
+}
+
+bool WindowCapturerWinGdi::IsOccluded(const DesktopVector& pos) {
+ DesktopVector sys_pos = pos.add(GetFullscreenRect().top_left());
+ HWND hwnd =
+ reinterpret_cast<HWND>(window_finder_.GetWindowUnderPoint(sys_pos));
+
+ return hwnd != window_ &&
+ std::find(owned_windows_.begin(), owned_windows_.end(), hwnd) ==
+ owned_windows_.end();
+}
+
+void WindowCapturerWinGdi::Start(Callback* callback) {
+ RTC_DCHECK(!callback_);
+ RTC_DCHECK(callback);
+
+ callback_ = callback;
+}
+
+void WindowCapturerWinGdi::CaptureFrame() {
+ RTC_DCHECK(callback_);
+
+ CaptureResults results = CaptureFrame(/*capture_owned_windows*/ true);
+ callback_->OnCaptureResult(results.result, std::move(results.frame));
+}
+
+WindowCapturerWinGdi::CaptureResults WindowCapturerWinGdi::CaptureFrame(
+ bool capture_owned_windows) {
+ TRACE_EVENT0("webrtc", "WindowCapturerWinGdi::CaptureFrame");
+
+ if (!window_) {
+ RTC_LOG(LS_ERROR) << "Window hasn't been selected: " << GetLastError();
+ return {Result::ERROR_PERMANENT, nullptr};
+ }
+
+ // Stop capturing if the window has been closed.
+ if (!IsWindow(window_)) {
+ RTC_LOG(LS_ERROR) << "Target window has been closed.";
+ return {Result::ERROR_PERMANENT, nullptr};
+ }
+
+ // Determine the window region excluding any resize border, and including
+ // any visible border if capturing an owned window / dialog. (Don't include
+ // any visible border for the selected window for consistency with
+ // CroppingWindowCapturerWin, which would expose a bit of the background
+ // through the partially-transparent border.)
+ const bool avoid_cropping_border = !capture_owned_windows;
+ DesktopRect cropped_rect;
+ DesktopRect original_rect;
+
+ if (!GetCroppedWindowRect(window_, avoid_cropping_border, &cropped_rect,
+ &original_rect)) {
+ RTC_LOG(LS_WARNING) << "Failed to get drawable window area: "
+ << GetLastError();
+ return {Result::ERROR_TEMPORARY, nullptr};
+ }
+
+ // Return a 1x1 black frame if the window is minimized or invisible on current
+ // desktop, to match behavior on mace. Window can be temporarily invisible
+ // during the transition of full screen mode on/off.
+ if (original_rect.is_empty() ||
+ !window_capture_helper_.IsWindowVisibleOnCurrentDesktop(window_)) {
+ std::unique_ptr<DesktopFrame> frame(
+ new BasicDesktopFrame(DesktopSize(1, 1)));
+
+ previous_size_ = frame->size();
+ window_size_map_[window_] = previous_size_;
+ return {Result::SUCCESS, std::move(frame)};
+ }
+
+ HDC window_dc = GetWindowDC(window_);
+ if (!window_dc) {
+ RTC_LOG(LS_WARNING) << "Failed to get window DC: " << GetLastError();
+ return {Result::ERROR_TEMPORARY, nullptr};
+ }
+
+ DesktopRect unscaled_cropped_rect = cropped_rect;
+ double horizontal_scale = 1.0;
+ double vertical_scale = 1.0;
+
+ DesktopSize window_dc_size;
+ if (GetDcSize(window_dc, &window_dc_size)) {
+ // The |window_dc_size| is used to detect the scaling of the original
+ // window. If the application does not support high-DPI settings, it will
+ // be scaled by Windows according to the scaling setting.
+ // https://www.google.com/search?q=windows+scaling+settings&ie=UTF-8
+ // So the size of the |window_dc|, i.e. the bitmap we can retrieve from
+ // PrintWindow() or BitBlt() function, will be smaller than
+ // |original_rect| and |cropped_rect|. Part of the captured desktop frame
+ // will be black. See
+ // bug https://bugs.chromium.org/p/webrtc/issues/detail?id=8112 for
+ // details.
+
+ // If |window_dc_size| is smaller than |window_rect|, let's resize both
+ // |original_rect| and |cropped_rect| according to the scaling factor.
+ horizontal_scale =
+ static_cast<double>(window_dc_size.width()) / original_rect.width();
+ vertical_scale =
+ static_cast<double>(window_dc_size.height()) / original_rect.height();
+ original_rect.Scale(horizontal_scale, vertical_scale);
+ cropped_rect.Scale(horizontal_scale, vertical_scale);
+ }
+
+ std::unique_ptr<DesktopFrameWin> frame(
+ DesktopFrameWin::Create(original_rect.size(), nullptr, window_dc));
+ if (!frame.get()) {
+ RTC_LOG(LS_WARNING) << "Failed to create frame.";
+ ReleaseDC(window_, window_dc);
+ return {Result::ERROR_TEMPORARY, nullptr};
+ }
+
+ HDC mem_dc = CreateCompatibleDC(window_dc);
+ HGDIOBJ previous_object = SelectObject(mem_dc, frame->bitmap());
+ BOOL result = FALSE;
+
+ // When desktop composition (Aero) is enabled each window is rendered to a
+ // private buffer allowing BitBlt() to get the window content even if the
+ // window is occluded. PrintWindow() is slower but lets rendering the window
+ // contents to an off-screen device context when Aero is not available.
+ // PrintWindow() is not supported by some applications.
+ //
+ // If Aero is enabled, we prefer BitBlt() because it's faster and avoids
+ // window flickering. Otherwise, we prefer PrintWindow() because BitBlt() may
+ // render occluding windows on top of the desired window.
+ //
+ // When composition is enabled the DC returned by GetWindowDC() doesn't always
+ // have window frame rendered correctly. Windows renders it only once and then
+ // caches the result between captures. We hack it around by calling
+ // PrintWindow() whenever window size changes, including the first time of
+ // capturing - it somehow affects what we get from BitBlt() on the subsequent
+ // captures.
+ //
+ // For Windows 8.1 and later, we want to always use PrintWindow when the
+ // cropping screen capturer falls back to the window capturer. I.e.
+ // on Windows 8.1 and later, PrintWindow is only used when the window is
+ // occluded. When the window is not occluded, it is much faster to capture
+ // the screen and to crop it to the window position and size.
+ if (rtc::IsWindows8OrLater()) {
+ // Special flag that makes PrintWindow to work on Windows 8.1 and later.
+ // Indeed certain apps (e.g. those using DirectComposition rendering) can't
+ // be captured using BitBlt or PrintWindow without this flag. Note that on
+ // Windows 8.0 this flag is not supported so the block below will fallback
+ // to the other call to PrintWindow. It seems to be very tricky to detect
+ // Windows 8.0 vs 8.1 so a try/fallback is more approriate here.
+ const UINT flags = PW_RENDERFULLCONTENT;
+ result = PrintWindow(window_, mem_dc, flags);
+ }
+
+ if (!result && (!window_capture_helper_.IsAeroEnabled() ||
+ !previous_size_.equals(frame->size()))) {
+ result = PrintWindow(window_, mem_dc, 0);
+ }
+
+ // Aero is enabled or PrintWindow() failed, use BitBlt.
+ if (!result) {
+ result = BitBlt(mem_dc, 0, 0, frame->size().width(), frame->size().height(),
+ window_dc, 0, 0, SRCCOPY);
+ }
+
+ SelectObject(mem_dc, previous_object);
+ DeleteDC(mem_dc);
+ ReleaseDC(window_, window_dc);
+
+ previous_size_ = frame->size();
+ window_size_map_[window_] = previous_size_;
+
+ frame->mutable_updated_region()->SetRect(
+ DesktopRect::MakeSize(frame->size()));
+ frame->set_top_left(
+ original_rect.top_left().subtract(GetFullscreenRect().top_left()));
+
+ if (!result) {
+ RTC_LOG(LS_ERROR) << "Both PrintWindow() and BitBlt() failed.";
+ return {Result::ERROR_TEMPORARY, nullptr};
+ }
+
+ // Rect for the data is relative to the first pixel of the frame.
+ cropped_rect.Translate(-original_rect.left(), -original_rect.top());
+ std::unique_ptr<DesktopFrame> cropped_frame =
+ CreateCroppedDesktopFrame(std::move(frame), cropped_rect);
+ RTC_DCHECK(cropped_frame);
+
+ if (capture_owned_windows) {
+ // If any owned/pop-up windows overlap the selected window, capture them
+ // and copy/composite their contents into the frame.
+ owned_windows_.clear();
+ OwnedWindowCollectorContext context(window_, unscaled_cropped_rect,
+ &window_capture_helper_,
+ &owned_windows_);
+
+ if (context.IsSelectedWindowValid()) {
+ EnumWindows(OwnedWindowCollector, reinterpret_cast<LPARAM>(&context));
+
+ if (!owned_windows_.empty()) {
+ if (!owned_window_capturer_) {
+ owned_window_capturer_ = std::make_unique<WindowCapturerWinGdi>();
+ }
+
+ // Owned windows are stored in top-down z-order, so this iterates in
+ // reverse to capture / draw them in bottom-up z-order
+ for (auto it = owned_windows_.rbegin(); it != owned_windows_.rend();
+ it++) {
+ HWND hwnd = *it;
+ if (owned_window_capturer_->SelectSource(
+ reinterpret_cast<SourceId>(hwnd))) {
+ CaptureResults results = owned_window_capturer_->CaptureFrame(
+ /*capture_owned_windows*/ false);
+
+ if (results.result != DesktopCapturer::Result::SUCCESS) {
+ // Simply log any error capturing an owned/pop-up window without
+ // bubbling it up to the caller (an expected error here is that
+ // the owned/pop-up window was closed; any unexpected errors won't
+ // fail the outer capture).
+ RTC_LOG(LS_INFO) << "Capturing owned window failed (previous "
+ "error/warning pertained to that)";
+ } else {
+ // Copy / composite the captured frame into the outer frame. This
+ // may no-op if they no longer intersect (if the owned window was
+ // moved outside the owner bounds since scheduled for capture.)
+ cropped_frame->CopyIntersectingPixelsFrom(
+ *results.frame, horizontal_scale, vertical_scale);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return {Result::SUCCESS, std::move(cropped_frame)};
+}
+
+// static
+std::unique_ptr<DesktopCapturer> WindowCapturerWinGdi::CreateRawWindowCapturer(
+ const DesktopCaptureOptions& options) {
+ return std::unique_ptr<DesktopCapturer>(new WindowCapturerWinGdi());
+}
+
+} // namespace webrtc
diff --git a/modules/desktop_capture/win/window_capturer_win_gdi.h b/modules/desktop_capture/win/window_capturer_win_gdi.h
new file mode 100644
index 0000000000..c954c230c9
--- /dev/null
+++ b/modules/desktop_capture/win/window_capturer_win_gdi.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_GDI_H_
+#define MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_GDI_H_
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include "modules/desktop_capture/desktop_capture_options.h"
+#include "modules/desktop_capture/desktop_capturer.h"
+#include "modules/desktop_capture/win/window_capture_utils.h"
+#include "modules/desktop_capture/window_finder_win.h"
+
+namespace webrtc {
+
+class WindowCapturerWinGdi : public DesktopCapturer {
+ public:
+ WindowCapturerWinGdi();
+
+ // Disallow copy and assign
+ WindowCapturerWinGdi(const WindowCapturerWinGdi&) = delete;
+ WindowCapturerWinGdi& operator=(const WindowCapturerWinGdi&) = delete;
+
+ ~WindowCapturerWinGdi() override;
+
+ static std::unique_ptr<DesktopCapturer> CreateRawWindowCapturer(
+ const DesktopCaptureOptions& options);
+
+ // DesktopCapturer interface.
+ void Start(Callback* callback) override;
+ void CaptureFrame() override;
+ bool GetSourceList(SourceList* sources) override;
+ bool SelectSource(SourceId id) override;
+ bool FocusOnSelectedSource() override;
+ bool IsOccluded(const DesktopVector& pos) override;
+
+ private:
+ struct CaptureResults {
+ Result result;
+ std::unique_ptr<DesktopFrame> frame;
+ };
+
+ CaptureResults CaptureFrame(bool capture_owned_windows);
+
+ Callback* callback_ = nullptr;
+
+ // HWND and HDC for the currently selected window or nullptr if window is not
+ // selected.
+ HWND window_ = nullptr;
+
+ DesktopSize previous_size_;
+
+ WindowCaptureHelperWin window_capture_helper_;
+
+ // This map is used to avoid flickering for the case when SelectWindow() calls
+ // are interleaved with Capture() calls.
+ std::map<HWND, DesktopSize> window_size_map_;
+
+ WindowFinderWin window_finder_;
+
+ std::vector<HWND> owned_windows_;
+ std::unique_ptr<WindowCapturerWinGdi> owned_window_capturer_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_GDI_H_
diff --git a/modules/desktop_capture/win/window_capturer_win_wgc.cc b/modules/desktop_capture/win/window_capturer_win_wgc.cc
new file mode 100644
index 0000000000..3f64983e0d
--- /dev/null
+++ b/modules/desktop_capture/win/window_capturer_win_wgc.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/desktop_capture/win/window_capturer_win_wgc.h"
+
+#include <memory>
+
+#include "modules/desktop_capture/desktop_capturer.h"
+
+namespace webrtc {
+
+WindowCapturerWinWgc::WindowCapturerWinWgc() = default;
+WindowCapturerWinWgc::~WindowCapturerWinWgc() = default;
+
+bool WindowCapturerWinWgc::GetSourceList(SourceList* sources) {
+ return window_capture_helper_.EnumerateCapturableWindows(sources);
+}
+
+bool WindowCapturerWinWgc::SelectSource(SourceId id) {
+ HWND window = reinterpret_cast<HWND>(id);
+ if (!IsWindowValidAndVisible(window))
+ return false;
+
+ window_ = window;
+ return true;
+}
+
+void WindowCapturerWinWgc::Start(Callback* callback) {
+ RTC_DCHECK(!callback_);
+ RTC_DCHECK(callback);
+
+ callback_ = callback;
+}
+
+void WindowCapturerWinWgc::CaptureFrame() {
+ RTC_DCHECK(callback_);
+
+ callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr);
+}
+
+// static
+std::unique_ptr<DesktopCapturer> WindowCapturerWinWgc::CreateRawWindowCapturer(
+ const DesktopCaptureOptions& options) {
+ return std::unique_ptr<DesktopCapturer>(new WindowCapturerWinWgc());
+}
+
+} // namespace webrtc
diff --git a/modules/desktop_capture/win/window_capturer_win_wgc.h b/modules/desktop_capture/win/window_capturer_win_wgc.h
new file mode 100644
index 0000000000..6617a2d4d9
--- /dev/null
+++ b/modules/desktop_capture/win/window_capturer_win_wgc.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_WGC_H_
+#define MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_WGC_H_
+
+#include <memory>
+
+#include "modules/desktop_capture/desktop_capture_options.h"
+#include "modules/desktop_capture/desktop_capturer.h"
+#include "modules/desktop_capture/win/window_capture_utils.h"
+
+namespace webrtc {
+
+class WindowCapturerWinWgc : public DesktopCapturer {
+ public:
+ WindowCapturerWinWgc();
+
+ // Disallow copy and assign
+ WindowCapturerWinWgc(const WindowCapturerWinWgc&) = delete;
+ WindowCapturerWinWgc& operator=(const WindowCapturerWinWgc&) = delete;
+
+ ~WindowCapturerWinWgc() override;
+
+ static std::unique_ptr<DesktopCapturer> CreateRawWindowCapturer(
+ const DesktopCaptureOptions& options);
+
+ // DesktopCapturer interface.
+ void Start(Callback* callback) override;
+ void CaptureFrame() override;
+ bool GetSourceList(SourceList* sources) override;
+ bool SelectSource(SourceId id) override;
+
+ private:
+ Callback* callback_ = nullptr;
+
+ // HWND for the currently selected window or nullptr if window is not
+ // selected.
+ HWND window_ = nullptr;
+ WindowCaptureHelperWin window_capture_helper_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_WGC_H_
diff --git a/modules/desktop_capture/window_capturer_win.cc b/modules/desktop_capture/window_capturer_win.cc
index 4e16c44ced..a63a24df58 100644
--- a/modules/desktop_capture/window_capturer_win.cc
+++ b/modules/desktop_capture/window_capturer_win.cc
@@ -8,472 +8,24 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include <assert.h>
-
-#include <memory>
-
-#include "modules/desktop_capture/cropped_desktop_frame.h"
+#include "modules/desktop_capture/desktop_capture_options.h"
#include "modules/desktop_capture/desktop_capturer.h"
-#include "modules/desktop_capture/desktop_frame_win.h"
-#include "modules/desktop_capture/win/screen_capture_utils.h"
-#include "modules/desktop_capture/win/selected_window_context.h"
-#include "modules/desktop_capture/win/window_capture_utils.h"
-#include "modules/desktop_capture/window_finder_win.h"
-#include "rtc_base/arraysize.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/constructor_magic.h"
-#include "rtc_base/logging.h"
-#include "rtc_base/string_utils.h"
-#include "rtc_base/trace_event.h"
-#include "rtc_base/win32.h"
+#include "modules/desktop_capture/win/window_capturer_win_gdi.h"
+#include "modules/desktop_capture/win/window_capturer_win_wgc.h"
namespace webrtc {
-namespace {
-
-BOOL CALLBACK WindowsEnumerationHandler(HWND hwnd, LPARAM param) {
- DesktopCapturer::SourceList* list =
- reinterpret_cast<DesktopCapturer::SourceList*>(param);
-
- // Skip windows that are invisible, minimized, have no title, or are owned,
- // unless they have the app window style set.
- int len = GetWindowTextLength(hwnd);
- HWND owner = GetWindow(hwnd, GW_OWNER);
- LONG exstyle = GetWindowLong(hwnd, GWL_EXSTYLE);
- if (len == 0 || IsIconic(hwnd) || !IsWindowVisible(hwnd) ||
- (owner && !(exstyle & WS_EX_APPWINDOW))) {
- return TRUE;
- }
- // Skip unresponsive windows. Set timout with 50ms, in case system is under
- // heavy load, the check can wait longer but wont' be too long to delay the
- // the enumeration.
- const UINT uTimeout = 50; // ms
- if (!SendMessageTimeout(hwnd, WM_NULL, 0, 0, SMTO_ABORTIFHUNG, uTimeout,
- nullptr)) {
- return TRUE;
- }
-
- // Skip the Program Manager window and the Start button.
- const size_t kClassLength = 256;
- WCHAR class_name[kClassLength];
- const int class_name_length = GetClassNameW(hwnd, class_name, kClassLength);
- if (class_name_length < 1)
- return TRUE;
-
- // Skip Program Manager window and the Start button. This is the same logic
- // that's used in Win32WindowPicker in libjingle. Consider filtering other
- // windows as well (e.g. toolbars).
- if (wcscmp(class_name, L"Progman") == 0 || wcscmp(class_name, L"Button") == 0)
- return TRUE;
-
- DesktopCapturer::Source window;
- window.id = reinterpret_cast<WindowId>(hwnd);
-
- const size_t kTitleLength = 500;
- WCHAR window_title[kTitleLength];
- // Truncate the title if it's longer than kTitleLength.
- GetWindowTextW(hwnd, window_title, kTitleLength);
- window.title = rtc::ToUtf8(window_title);
-
- // Skip windows when we failed to convert the title or it is empty.
- if (window.title.empty())
- return TRUE;
-
- list->push_back(window);
-
- return TRUE;
-}
-
-// Used to pass input/output data during the EnumWindows call to collect
-// owned/pop-up windows that should be captured.
-struct OwnedWindowCollectorContext : public SelectedWindowContext {
- OwnedWindowCollectorContext(HWND selected_window,
- DesktopRect selected_window_rect,
- WindowCaptureHelperWin* window_capture_helper,
- std::vector<HWND>* owned_windows)
- : SelectedWindowContext(selected_window,
- selected_window_rect,
- window_capture_helper),
- owned_windows(owned_windows) {}
-
- std::vector<HWND>* owned_windows;
-};
-
-// Called via EnumWindows for each root window; adds owned/pop-up windows that
-// should be captured to a vector it's passed.
-BOOL CALLBACK OwnedWindowCollector(HWND hwnd, LPARAM param) {
- OwnedWindowCollectorContext* context =
- reinterpret_cast<OwnedWindowCollectorContext*>(param);
- if (hwnd == context->selected_window()) {
- // Windows are enumerated in top-down z-order, so we can stop enumerating
- // upon reaching the selected window.
- return FALSE;
- }
-
- // Skip windows that aren't visible pop-up windows.
- if (!(GetWindowLong(hwnd, GWL_STYLE) & WS_POPUP) ||
- !context->window_capture_helper()->IsWindowVisibleOnCurrentDesktop(
- hwnd)) {
- return TRUE;
- }
-
- // Owned windows that intersect the selected window should be captured.
- if (context->IsWindowOwnedBySelectedWindow(hwnd) &&
- context->IsWindowOverlappingSelectedWindow(hwnd)) {
- // Skip windows that draw shadows around menus. These "SysShadow" windows
- // would otherwise be captured as solid black bars with no transparency
- // gradient (since this capturer doesn't detect / respect variations in the
- // window alpha channel). Any other semi-transparent owned windows will be
- // captured fully-opaque. This seems preferable to excluding them (at least
- // when they have content aside from a solid fill color / visual adornment;
- // e.g. some tooltips have the transparent style set).
- if (GetWindowLong(hwnd, GWL_EXSTYLE) & WS_EX_TRANSPARENT) {
- const WCHAR kSysShadow[] = L"SysShadow";
- const size_t kClassLength = arraysize(kSysShadow);
- WCHAR class_name[kClassLength];
- const int class_name_length =
- GetClassNameW(hwnd, class_name, kClassLength);
- if (class_name_length == kClassLength - 1 &&
- wcscmp(class_name, kSysShadow) == 0) {
- return TRUE;
- }
- }
-
- context->owned_windows->push_back(hwnd);
- }
-
- return TRUE;
-}
-
-class WindowCapturerWin : public DesktopCapturer {
- public:
- WindowCapturerWin();
- ~WindowCapturerWin() override;
-
- // DesktopCapturer interface.
- void Start(Callback* callback) override;
- void CaptureFrame() override;
- bool GetSourceList(SourceList* sources) override;
- bool SelectSource(SourceId id) override;
- bool FocusOnSelectedSource() override;
- bool IsOccluded(const DesktopVector& pos) override;
-
- private:
- struct CaptureResults {
- Result result;
- std::unique_ptr<DesktopFrame> frame;
- };
-
- CaptureResults CaptureFrame(bool capture_owned_windows);
-
- Callback* callback_ = nullptr;
-
- // HWND and HDC for the currently selected window or nullptr if window is not
- // selected.
- HWND window_ = nullptr;
-
- DesktopSize previous_size_;
-
- WindowCaptureHelperWin window_capture_helper_;
-
- // This map is used to avoid flickering for the case when SelectWindow() calls
- // are interleaved with Capture() calls.
- std::map<HWND, DesktopSize> window_size_map_;
-
- WindowFinderWin window_finder_;
-
- std::vector<HWND> owned_windows_;
- std::unique_ptr<WindowCapturerWin> owned_window_capturer_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(WindowCapturerWin);
-};
-
-WindowCapturerWin::WindowCapturerWin() {}
-WindowCapturerWin::~WindowCapturerWin() {}
-
-bool WindowCapturerWin::GetSourceList(SourceList* sources) {
- SourceList result;
- LPARAM param = reinterpret_cast<LPARAM>(&result);
- // EnumWindows only enumerates root windows.
- if (!EnumWindows(&WindowsEnumerationHandler, param))
- return false;
-
- for (auto it = result.begin(); it != result.end();) {
- if (!window_capture_helper_.IsWindowVisibleOnCurrentDesktop(
- reinterpret_cast<HWND>(it->id))) {
- it = result.erase(it);
- } else {
- ++it;
- }
- }
- sources->swap(result);
-
- std::map<HWND, DesktopSize> new_map;
- for (const auto& item : *sources) {
- HWND hwnd = reinterpret_cast<HWND>(item.id);
- new_map[hwnd] = window_size_map_[hwnd];
- }
- window_size_map_.swap(new_map);
-
- return true;
-}
-
-bool WindowCapturerWin::SelectSource(SourceId id) {
- HWND window = reinterpret_cast<HWND>(id);
- if (!IsWindow(window) || !IsWindowVisible(window) || IsIconic(window))
- return false;
- window_ = window;
- // When a window is not in the map, window_size_map_[window] will create an
- // item with DesktopSize (0, 0).
- previous_size_ = window_size_map_[window];
- return true;
-}
-
-bool WindowCapturerWin::FocusOnSelectedSource() {
- if (!window_)
- return false;
-
- if (!IsWindow(window_) || !IsWindowVisible(window_) || IsIconic(window_))
- return false;
-
- return BringWindowToTop(window_) != FALSE &&
- SetForegroundWindow(window_) != FALSE;
-}
-
-bool WindowCapturerWin::IsOccluded(const DesktopVector& pos) {
- DesktopVector sys_pos = pos.add(GetFullscreenRect().top_left());
- HWND hwnd =
- reinterpret_cast<HWND>(window_finder_.GetWindowUnderPoint(sys_pos));
-
- return hwnd != window_ &&
- std::find(owned_windows_.begin(), owned_windows_.end(), hwnd) ==
- owned_windows_.end();
-}
-
-void WindowCapturerWin::Start(Callback* callback) {
- assert(!callback_);
- assert(callback);
-
- callback_ = callback;
-}
-
-void WindowCapturerWin::CaptureFrame() {
- CaptureResults results = CaptureFrame(/*capture_owned_windows*/ true);
-
- callback_->OnCaptureResult(results.result, std::move(results.frame));
-}
-
-WindowCapturerWin::CaptureResults WindowCapturerWin::CaptureFrame(
- bool capture_owned_windows) {
- TRACE_EVENT0("webrtc", "WindowCapturerWin::CaptureFrame");
-
- if (!window_) {
- RTC_LOG(LS_ERROR) << "Window hasn't been selected: " << GetLastError();
- return {Result::ERROR_PERMANENT, nullptr};
- }
-
- // Stop capturing if the window has been closed.
- if (!IsWindow(window_)) {
- RTC_LOG(LS_ERROR) << "target window has been closed";
- return {Result::ERROR_PERMANENT, nullptr};
- }
-
- // Determine the window region excluding any resize border, and including
- // any visible border if capturing an owned window / dialog. (Don't include
- // any visible border for the selected window for consistency with
- // CroppingWindowCapturerWin, which would expose a bit of the background
- // through the partially-transparent border.)
- const bool avoid_cropping_border = !capture_owned_windows;
- DesktopRect cropped_rect;
- DesktopRect original_rect;
-
- if (!GetCroppedWindowRect(window_, avoid_cropping_border, &cropped_rect,
- &original_rect)) {
- RTC_LOG(LS_WARNING) << "Failed to get drawable window area: "
- << GetLastError();
- return {Result::ERROR_TEMPORARY, nullptr};
- }
-
- // Return a 1x1 black frame if the window is minimized or invisible on current
- // desktop, to match behavior on mace. Window can be temporarily invisible
- // during the transition of full screen mode on/off.
- if (original_rect.is_empty() ||
- !window_capture_helper_.IsWindowVisibleOnCurrentDesktop(window_)) {
- std::unique_ptr<DesktopFrame> frame(
- new BasicDesktopFrame(DesktopSize(1, 1)));
-
- previous_size_ = frame->size();
- window_size_map_[window_] = previous_size_;
- return {Result::SUCCESS, std::move(frame)};
- }
-
- HDC window_dc = GetWindowDC(window_);
- if (!window_dc) {
- RTC_LOG(LS_WARNING) << "Failed to get window DC: " << GetLastError();
- return {Result::ERROR_TEMPORARY, nullptr};
- }
-
- DesktopRect unscaled_cropped_rect = cropped_rect;
- double horizontal_scale = 1.0;
- double vertical_scale = 1.0;
-
- DesktopSize window_dc_size;
- if (GetDcSize(window_dc, &window_dc_size)) {
- // The |window_dc_size| is used to detect the scaling of the original
- // window. If the application does not support high-DPI settings, it will
- // be scaled by Windows according to the scaling setting.
- // https://www.google.com/search?q=windows+scaling+settings&ie=UTF-8
- // So the size of the |window_dc|, i.e. the bitmap we can retrieve from
- // PrintWindow() or BitBlt() function, will be smaller than
- // |original_rect| and |cropped_rect|. Part of the captured desktop frame
- // will be black. See
- // bug https://bugs.chromium.org/p/webrtc/issues/detail?id=8112 for
- // details.
-
- // If |window_dc_size| is smaller than |window_rect|, let's resize both
- // |original_rect| and |cropped_rect| according to the scaling factor.
- horizontal_scale =
- static_cast<double>(window_dc_size.width()) / original_rect.width();
- vertical_scale =
- static_cast<double>(window_dc_size.height()) / original_rect.height();
- original_rect.Scale(horizontal_scale, vertical_scale);
- cropped_rect.Scale(horizontal_scale, vertical_scale);
- }
-
- std::unique_ptr<DesktopFrameWin> frame(
- DesktopFrameWin::Create(original_rect.size(), nullptr, window_dc));
- if (!frame.get()) {
- RTC_LOG(LS_WARNING) << "Failed to create frame.";
- ReleaseDC(window_, window_dc);
- return {Result::ERROR_TEMPORARY, nullptr};
- }
-
- HDC mem_dc = CreateCompatibleDC(window_dc);
- HGDIOBJ previous_object = SelectObject(mem_dc, frame->bitmap());
- BOOL result = FALSE;
-
- // When desktop composition (Aero) is enabled each window is rendered to a
- // private buffer allowing BitBlt() to get the window content even if the
- // window is occluded. PrintWindow() is slower but lets rendering the window
- // contents to an off-screen device context when Aero is not available.
- // PrintWindow() is not supported by some applications.
- //
- // If Aero is enabled, we prefer BitBlt() because it's faster and avoids
- // window flickering. Otherwise, we prefer PrintWindow() because BitBlt() may
- // render occluding windows on top of the desired window.
- //
- // When composition is enabled the DC returned by GetWindowDC() doesn't always
- // have window frame rendered correctly. Windows renders it only once and then
- // caches the result between captures. We hack it around by calling
- // PrintWindow() whenever window size changes, including the first time of
- // capturing - it somehow affects what we get from BitBlt() on the subsequent
- // captures.
- //
- // For Windows 8.1 and later, we want to always use PrintWindow when the
- // cropping screen capturer falls back to the window capturer. I.e.
- // on Windows 8.1 and later, PrintWindow is only used when the window is
- // occluded. When the window is not occluded, it is much faster to capture
- // the screen and to crop it to the window position and size.
- if (rtc::IsWindows8OrLater()) {
- // Special flag that makes PrintWindow to work on Windows 8.1 and later.
- // Indeed certain apps (e.g. those using DirectComposition rendering) can't
- // be captured using BitBlt or PrintWindow without this flag. Note that on
- // Windows 8.0 this flag is not supported so the block below will fallback
- // to the other call to PrintWindow. It seems to be very tricky to detect
- // Windows 8.0 vs 8.1 so a try/fallback is more approriate here.
- const UINT flags = PW_RENDERFULLCONTENT;
- result = PrintWindow(window_, mem_dc, flags);
- }
-
- if (!result && (!window_capture_helper_.IsAeroEnabled() ||
- !previous_size_.equals(frame->size()))) {
- result = PrintWindow(window_, mem_dc, 0);
- }
-
- // Aero is enabled or PrintWindow() failed, use BitBlt.
- if (!result) {
- result = BitBlt(mem_dc, 0, 0, frame->size().width(), frame->size().height(),
- window_dc, 0, 0, SRCCOPY);
- }
-
- SelectObject(mem_dc, previous_object);
- DeleteDC(mem_dc);
- ReleaseDC(window_, window_dc);
-
- previous_size_ = frame->size();
- window_size_map_[window_] = previous_size_;
-
- frame->mutable_updated_region()->SetRect(
- DesktopRect::MakeSize(frame->size()));
- frame->set_top_left(
- original_rect.top_left().subtract(GetFullscreenRect().top_left()));
-
- if (!result) {
- RTC_LOG(LS_ERROR) << "Both PrintWindow() and BitBlt() failed.";
- return {Result::ERROR_TEMPORARY, nullptr};
- }
-
- // Rect for the data is relative to the first pixel of the frame.
- cropped_rect.Translate(-original_rect.left(), -original_rect.top());
- std::unique_ptr<DesktopFrame> cropped_frame =
- CreateCroppedDesktopFrame(std::move(frame), cropped_rect);
- RTC_DCHECK(cropped_frame);
-
- if (capture_owned_windows) {
- // If any owned/pop-up windows overlap the selected window, capture them
- // and copy/composite their contents into the frame.
- owned_windows_.clear();
- OwnedWindowCollectorContext context(window_, unscaled_cropped_rect,
- &window_capture_helper_,
- &owned_windows_);
-
- if (context.IsSelectedWindowValid()) {
- EnumWindows(OwnedWindowCollector, reinterpret_cast<LPARAM>(&context));
-
- if (!owned_windows_.empty()) {
- if (!owned_window_capturer_) {
- owned_window_capturer_ = std::make_unique<WindowCapturerWin>();
- }
-
- // Owned windows are stored in top-down z-order, so this iterates in
- // reverse to capture / draw them in bottom-up z-order
- for (auto it = owned_windows_.rbegin(); it != owned_windows_.rend();
- it++) {
- HWND hwnd = *it;
- if (owned_window_capturer_->SelectSource(
- reinterpret_cast<SourceId>(hwnd))) {
- CaptureResults results = owned_window_capturer_->CaptureFrame(
- /*capture_owned_windows*/ false);
-
- if (results.result != DesktopCapturer::Result::SUCCESS) {
- // Simply log any error capturing an owned/pop-up window without
- // bubbling it up to the caller (an expected error here is that
- // the owned/pop-up window was closed; any unexpected errors won't
- // fail the outer capture).
- RTC_LOG(LS_INFO) << "Capturing owned window failed (previous "
- "error/warning pertained to that)";
- } else {
- // Copy / composite the captured frame into the outer frame. This
- // may no-op if they no longer intersect (if the owned window was
- // moved outside the owner bounds since scheduled for capture.)
- cropped_frame->CopyIntersectingPixelsFrom(
- *results.frame, horizontal_scale, vertical_scale);
- }
- }
- }
- }
- }
- }
-
- return {Result::SUCCESS, std::move(cropped_frame)};
-}
-
-} // namespace
-
// static
std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateRawWindowCapturer(
const DesktopCaptureOptions& options) {
- return std::unique_ptr<DesktopCapturer>(new WindowCapturerWin());
+ // TODO(bugs.webrtc.org/11760): Add a WebRTC field trial (or similar
+ // mechanism) and Windows version check here that leads to use of the WGC
+ // capturer once it is fully implemented.
+ if (true) {
+ return WindowCapturerWinGdi::CreateRawWindowCapturer(options);
+ } else {
+ return WindowCapturerWinWgc::CreateRawWindowCapturer(options);
+ }
}
} // namespace webrtc
diff --git a/modules/include/module_common_types.cc b/modules/include/module_common_types.cc
index 86f753356d..a589312ec2 100644
--- a/modules/include/module_common_types.cc
+++ b/modules/include/module_common_types.cc
@@ -90,25 +90,28 @@ void RTPFragmentationHeader::CopyFrom(const RTPFragmentationHeader& src) {
void RTPFragmentationHeader::Resize(size_t size) {
const uint16_t size16 = rtc::dchecked_cast<uint16_t>(size);
if (fragmentationVectorSize < size16) {
- uint16_t oldVectorSize = fragmentationVectorSize;
- {
- // offset
- size_t* oldOffsets = fragmentationOffset;
- fragmentationOffset = new size_t[size16];
- memset(fragmentationOffset + oldVectorSize, 0,
- sizeof(size_t) * (size16 - oldVectorSize));
- // copy old values
- memcpy(fragmentationOffset, oldOffsets, sizeof(size_t) * oldVectorSize);
- delete[] oldOffsets;
- }
- // length
- {
- size_t* oldLengths = fragmentationLength;
- fragmentationLength = new size_t[size16];
- memset(fragmentationLength + oldVectorSize, 0,
- sizeof(size_t) * (size16 - oldVectorSize));
- memcpy(fragmentationLength, oldLengths, sizeof(size_t) * oldVectorSize);
- delete[] oldLengths;
+ uint16_t old_vector_size = fragmentationVectorSize;
+ size_t* old_offsets = fragmentationOffset;
+ fragmentationOffset = new size_t[size16];
+ memset(fragmentationOffset + old_vector_size, 0,
+ sizeof(size_t) * (size16 - old_vector_size));
+ size_t* old_lengths = fragmentationLength;
+ fragmentationLength = new size_t[size16];
+ memset(fragmentationLength + old_vector_size, 0,
+ sizeof(size_t) * (size16 - old_vector_size));
+
+ // copy old values
+ if (old_vector_size > 0) {
+ if (old_offsets != nullptr) {
+ memcpy(fragmentationOffset, old_offsets,
+ sizeof(size_t) * old_vector_size);
+ delete[] old_offsets;
+ }
+ if (old_lengths != nullptr) {
+ memcpy(fragmentationLength, old_lengths,
+ sizeof(size_t) * old_vector_size);
+ delete[] old_lengths;
+ }
}
fragmentationVectorSize = size16;
}
diff --git a/modules/pacing/BUILD.gn b/modules/pacing/BUILD.gn
index 6f65c33942..1a4e9a5512 100644
--- a/modules/pacing/BUILD.gn
+++ b/modules/pacing/BUILD.gn
@@ -49,6 +49,7 @@ rtc_library("pacing") {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_task_queue",
"../../rtc_base/experiments:field_trial_parser",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers",
@@ -57,6 +58,8 @@ rtc_library("pacing") {
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
"../utility",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -99,6 +102,7 @@ if (rtc_include_tests) {
"../../rtc_base/experiments:alr_experiment",
"../../system_wrappers",
"../../system_wrappers:field_trial",
+ "../../test:explicit_key_value_config",
"../../test:field_trial",
"../../test:test_support",
"../../test/time_controller:time_controller",
diff --git a/modules/pacing/bitrate_prober.cc b/modules/pacing/bitrate_prober.cc
index e7ce01d95c..1949570d86 100644
--- a/modules/pacing/bitrate_prober.cc
+++ b/modules/pacing/bitrate_prober.cc
@@ -26,7 +26,7 @@ namespace {
// The min probe packet size is scaled with the bitrate we're probing at.
// This defines the max min probe packet size, meaning that on high bitrates
// we have a min probe packet size of 200 bytes.
-constexpr size_t kMinProbePacketSize = 200;
+constexpr DataSize kMinProbePacketSize = DataSize::Bytes(200);
constexpr TimeDelta kProbeClusterTimeout = TimeDelta::Seconds(5);
@@ -37,13 +37,17 @@ BitrateProberConfig::BitrateProberConfig(
: min_probe_packets_sent("min_probe_packets_sent", 5),
min_probe_delta("min_probe_delta", TimeDelta::Millis(1)),
min_probe_duration("min_probe_duration", TimeDelta::Millis(15)),
- max_probe_delay("max_probe_delay", TimeDelta::Millis(3)) {
- ParseFieldTrial({&min_probe_packets_sent, &min_probe_delta,
- &min_probe_duration, &max_probe_delay},
- key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration"));
- ParseFieldTrial({&min_probe_packets_sent, &min_probe_delta,
- &min_probe_duration, &max_probe_delay},
- key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior"));
+ max_probe_delay("max_probe_delay", TimeDelta::Millis(3)),
+ // TODO(bugs.webrtc.org/11780): Change to default true.
+ abort_delayed_probes("abort_delayed_probes", false) {
+ ParseFieldTrial(
+ {&min_probe_packets_sent, &min_probe_delta, &min_probe_duration,
+ &max_probe_delay, &abort_delayed_probes},
+ key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration"));
+ ParseFieldTrial(
+ {&min_probe_packets_sent, &min_probe_delta, &min_probe_duration,
+ &max_probe_delay, &abort_delayed_probes},
+ key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior"));
}
BitrateProber::~BitrateProber() {
@@ -74,12 +78,11 @@ void BitrateProber::SetEnabled(bool enable) {
}
}
-void BitrateProber::OnIncomingPacket(size_t packet_size) {
+void BitrateProber::OnIncomingPacket(DataSize packet_size) {
// Don't initialize probing unless we have something large enough to start
// probing.
if (probing_state_ == ProbingState::kInactive && !clusters_.empty() &&
- packet_size >=
- std::min<size_t>(RecommendedMinProbeSize(), kMinProbePacketSize)) {
+ packet_size >= std::min(RecommendedMinProbeSize(), kMinProbePacketSize)) {
// Send next probe right away.
next_probe_time_ = Timestamp::MinusInfinity();
probing_state_ = ProbingState::kActive;
@@ -125,7 +128,8 @@ Timestamp BitrateProber::NextProbeTime(Timestamp now) const {
return Timestamp::PlusInfinity();
}
- if (next_probe_time_.IsFinite() &&
+ // Legacy behavior, just warn about late probe and return as if not probing.
+ if (!config_.abort_delayed_probes && next_probe_time_.IsFinite() &&
now - next_probe_time_ > config_.max_probe_delay.Get()) {
RTC_DLOG(LS_WARNING) << "Probe delay too high"
" (next_ms:"
@@ -137,9 +141,24 @@ Timestamp BitrateProber::NextProbeTime(Timestamp now) const {
return next_probe_time_;
}
-PacedPacketInfo BitrateProber::CurrentCluster() const {
- RTC_DCHECK(!clusters_.empty());
- RTC_DCHECK(probing_state_ == ProbingState::kActive);
+absl::optional<PacedPacketInfo> BitrateProber::CurrentCluster(Timestamp now) {
+ if (clusters_.empty() || probing_state_ != ProbingState::kActive) {
+ return absl::nullopt;
+ }
+
+ if (config_.abort_delayed_probes && next_probe_time_.IsFinite() &&
+ now - next_probe_time_ > config_.max_probe_delay.Get()) {
+ RTC_DLOG(LS_WARNING) << "Probe delay too high"
+ " (next_ms:"
+ << next_probe_time_.ms() << ", now_ms: " << now.ms()
+ << "), discarding probe cluster.";
+ clusters_.pop();
+ if (clusters_.empty()) {
+ probing_state_ = ProbingState::kSuspended;
+ return absl::nullopt;
+ }
+ }
+
PacedPacketInfo info = clusters_.front().pace_info;
info.probe_cluster_bytes_sent = clusters_.front().sent_bytes;
return info;
@@ -148,15 +167,18 @@ PacedPacketInfo BitrateProber::CurrentCluster() const {
// Probe size is recommended based on the probe bitrate required. We choose
// a minimum of twice |kMinProbeDeltaMs| interval to allow scheduling to be
// feasible.
-size_t BitrateProber::RecommendedMinProbeSize() const {
- RTC_DCHECK(!clusters_.empty());
- return clusters_.front().pace_info.send_bitrate_bps * 2 *
- config_.min_probe_delta->ms() / (8 * 1000);
+DataSize BitrateProber::RecommendedMinProbeSize() const {
+ if (clusters_.empty()) {
+ return DataSize::Zero();
+ }
+ DataRate send_rate =
+ DataRate::BitsPerSec(clusters_.front().pace_info.send_bitrate_bps);
+ return 2 * send_rate * config_.min_probe_delta;
}
-void BitrateProber::ProbeSent(Timestamp now, size_t bytes) {
+void BitrateProber::ProbeSent(Timestamp now, DataSize size) {
RTC_DCHECK(probing_state_ == ProbingState::kActive);
- RTC_DCHECK_GT(bytes, 0);
+ RTC_DCHECK(!size.IsZero());
if (!clusters_.empty()) {
ProbeCluster* cluster = &clusters_.front();
@@ -164,7 +186,7 @@ void BitrateProber::ProbeSent(Timestamp now, size_t bytes) {
RTC_DCHECK(cluster->started_at.IsInfinite());
cluster->started_at = now;
}
- cluster->sent_bytes += static_cast<int>(bytes);
+ cluster->sent_bytes += size.bytes<int>();
cluster->sent_probes += 1;
next_probe_time_ = CalculateNextProbeTime(*cluster);
if (cluster->sent_bytes >= cluster->pace_info.probe_cluster_min_bytes &&
@@ -178,8 +200,9 @@ void BitrateProber::ProbeSent(Timestamp now, size_t bytes) {
clusters_.pop();
}
- if (clusters_.empty())
+ if (clusters_.empty()) {
probing_state_ = ProbingState::kSuspended;
+ }
}
}
diff --git a/modules/pacing/bitrate_prober.h b/modules/pacing/bitrate_prober.h
index 3ebe26ac1f..5a89aac435 100644
--- a/modules/pacing/bitrate_prober.h
+++ b/modules/pacing/bitrate_prober.h
@@ -35,9 +35,11 @@ struct BitrateProberConfig {
FieldTrialParameter<TimeDelta> min_probe_delta;
// The minimum probing duration.
FieldTrialParameter<TimeDelta> min_probe_duration;
- // Maximum amount of time each probe can be delayed. Probe cluster is reset
- // and retried from the start when this limit is reached.
+ // Maximum amount of time each probe can be delayed.
FieldTrialParameter<TimeDelta> max_probe_delay;
+ // If NextProbeTime() is called with a delay higher than specified by
+ // |max_probe_delay|, abort it.
+ FieldTrialParameter<bool> abort_delayed_probes;
};
// Note that this class isn't thread-safe by itself and therefore relies
@@ -57,29 +59,29 @@ class BitrateProber {
// Initializes a new probing session if the prober is allowed to probe. Does
// not initialize the prober unless the packet size is large enough to probe
// with.
- void OnIncomingPacket(size_t packet_size);
+ void OnIncomingPacket(DataSize packet_size);
// Create a cluster used to probe for |bitrate_bps| with |num_probes| number
// of probes.
void CreateProbeCluster(DataRate bitrate, Timestamp now, int cluster_id);
- // Returns the at which the next probe should be sent to get accurate probing.
- // If probing is not desired at this time, Timestamp::PlusInfinity() will be
- // returned.
+ // Returns the time at which the next probe should be sent to get accurate
+ // probing. If probing is not desired at this time, Timestamp::PlusInfinity()
+ // will be returned.
+ // TODO(bugs.webrtc.org/11780): Remove |now| argument when old mode is gone.
Timestamp NextProbeTime(Timestamp now) const;
// Information about the current probing cluster.
- PacedPacketInfo CurrentCluster() const;
+ absl::optional<PacedPacketInfo> CurrentCluster(Timestamp now);
// Returns the minimum number of bytes that the prober recommends for
- // the next probe.
- size_t RecommendedMinProbeSize() const;
+ // the next probe, or zero if not probing.
+ DataSize RecommendedMinProbeSize() const;
// Called to report to the prober that a probe has been sent. In case of
// multiple packets per probe, this call would be made at the end of sending
- // the last packet in probe. |probe_size| is the total size of all packets
- // in probe.
- void ProbeSent(Timestamp now, size_t probe_size);
+ // the last packet in probe. |size| is the total size of all packets in probe.
+ void ProbeSent(Timestamp now, DataSize size);
private:
enum class ProbingState {
diff --git a/modules/pacing/bitrate_prober_unittest.cc b/modules/pacing/bitrate_prober_unittest.cc
index 62277a0d2f..5627db0519 100644
--- a/modules/pacing/bitrate_prober_unittest.cc
+++ b/modules/pacing/bitrate_prober_unittest.cc
@@ -12,6 +12,7 @@
#include <algorithm>
+#include "test/explicit_key_value_config.h"
#include "test/gtest.h"
namespace webrtc {
@@ -28,7 +29,7 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) {
const DataRate kTestBitrate1 = DataRate::KilobitsPerSec(900);
const DataRate kTestBitrate2 = DataRate::KilobitsPerSec(1800);
const int kClusterSize = 5;
- const int kProbeSize = 1000;
+ const DataSize kProbeSize = DataSize::Bytes(1000);
const TimeDelta kMinProbeDuration = TimeDelta::Millis(15);
prober.CreateProbeCluster(kTestBitrate1, now, 0);
@@ -37,7 +38,7 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) {
prober.OnIncomingPacket(kProbeSize);
EXPECT_TRUE(prober.is_probing());
- EXPECT_EQ(0, prober.CurrentCluster().probe_cluster_id);
+ EXPECT_EQ(0, prober.CurrentCluster(now)->probe_cluster_id);
// First packet should probe as soon as possible.
EXPECT_EQ(Timestamp::MinusInfinity(), prober.NextProbeTime(now));
@@ -45,14 +46,13 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) {
for (int i = 0; i < kClusterSize; ++i) {
now = std::max(now, prober.NextProbeTime(now));
EXPECT_EQ(now, std::max(now, prober.NextProbeTime(now)));
- EXPECT_EQ(0, prober.CurrentCluster().probe_cluster_id);
+ EXPECT_EQ(0, prober.CurrentCluster(now)->probe_cluster_id);
prober.ProbeSent(now, kProbeSize);
}
EXPECT_GE(now - start_time, kMinProbeDuration);
// Verify that the actual bitrate is withing 10% of the target.
- DataRate bitrate =
- DataSize::Bytes(kProbeSize * (kClusterSize - 1)) / (now - start_time);
+ DataRate bitrate = kProbeSize * (kClusterSize - 1) / (now - start_time);
EXPECT_GT(bitrate, kTestBitrate1 * 0.9);
EXPECT_LT(bitrate, kTestBitrate1 * 1.1);
@@ -62,14 +62,14 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) {
for (int i = 0; i < kClusterSize; ++i) {
now = std::max(now, prober.NextProbeTime(now));
EXPECT_EQ(now, std::max(now, prober.NextProbeTime(now)));
- EXPECT_EQ(1, prober.CurrentCluster().probe_cluster_id);
+ EXPECT_EQ(1, prober.CurrentCluster(now)->probe_cluster_id);
prober.ProbeSent(now, kProbeSize);
}
// Verify that the actual bitrate is withing 10% of the target.
TimeDelta duration = now - probe2_started;
EXPECT_GE(duration, kMinProbeDuration);
- bitrate = DataSize::Bytes(kProbeSize * (kClusterSize - 1)) / duration;
+ bitrate = (kProbeSize * (kClusterSize - 1)) / duration;
EXPECT_GT(bitrate, kTestBitrate2 * 0.9);
EXPECT_LT(bitrate, kTestBitrate2 * 1.1);
@@ -80,6 +80,7 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) {
TEST(BitrateProberTest, DoesntProbeWithoutRecentPackets) {
const FieldTrialBasedConfig config;
BitrateProber prober(config);
+ const DataSize kProbeSize = DataSize::Bytes(1000);
Timestamp now = Timestamp::Zero();
EXPECT_EQ(prober.NextProbeTime(now), Timestamp::PlusInfinity());
@@ -87,19 +88,74 @@ TEST(BitrateProberTest, DoesntProbeWithoutRecentPackets) {
prober.CreateProbeCluster(DataRate::KilobitsPerSec(900), now, 0);
EXPECT_FALSE(prober.is_probing());
- prober.OnIncomingPacket(1000);
+ prober.OnIncomingPacket(kProbeSize);
EXPECT_TRUE(prober.is_probing());
EXPECT_EQ(now, std::max(now, prober.NextProbeTime(now)));
- prober.ProbeSent(now, 1000);
- // Let time pass, no large enough packets put into prober.
- now += TimeDelta::Seconds(6);
+ prober.ProbeSent(now, kProbeSize);
+}
+
+TEST(BitrateProberTest, DoesntDiscardDelayedProbesInLegacyMode) {
+ const TimeDelta kMaxProbeDelay = TimeDelta::Millis(3);
+ const test::ExplicitKeyValueConfig trials(
+ "WebRTC-Bwe-ProbingBehavior/"
+ "abort_delayed_probes:0,"
+ "max_probe_delay:3ms/");
+ BitrateProber prober(trials);
+ const DataSize kProbeSize = DataSize::Bytes(1000);
+
+ Timestamp now = Timestamp::Zero();
+ prober.CreateProbeCluster(DataRate::KilobitsPerSec(900), now, 0);
+ prober.OnIncomingPacket(kProbeSize);
+ EXPECT_TRUE(prober.is_probing());
+ EXPECT_EQ(prober.CurrentCluster(now)->probe_cluster_id, 0);
+ // Advance to first probe time and indicate sent probe.
+ now = std::max(now, prober.NextProbeTime(now));
+ prober.ProbeSent(now, kProbeSize);
+
+ // Advance time 1ms past timeout for the next probe.
+ Timestamp next_probe_time = prober.NextProbeTime(now);
+ EXPECT_GT(next_probe_time, now);
+ now += next_probe_time - now + kMaxProbeDelay + TimeDelta::Millis(1);
+
EXPECT_EQ(prober.NextProbeTime(now), Timestamp::PlusInfinity());
// Check that legacy behaviour where prober is reset in TimeUntilNextProbe is
// no longer there. Probes are no longer retried if they are timed out.
- prober.OnIncomingPacket(1000);
+ prober.OnIncomingPacket(kProbeSize);
EXPECT_EQ(prober.NextProbeTime(now), Timestamp::PlusInfinity());
}
+TEST(BitrateProberTest, DiscardsDelayedProbesWhenNotInLegacyMode) {
+ const TimeDelta kMaxProbeDelay = TimeDelta::Millis(3);
+ const test::ExplicitKeyValueConfig trials(
+ "WebRTC-Bwe-ProbingBehavior/"
+ "abort_delayed_probes:1,"
+ "max_probe_delay:3ms/");
+ BitrateProber prober(trials);
+ const DataSize kProbeSize = DataSize::Bytes(1000);
+
+ Timestamp now = Timestamp::Zero();
+
+ // Add two probe clusters.
+ prober.CreateProbeCluster(DataRate::KilobitsPerSec(900), now, /*id=*/0);
+
+ prober.OnIncomingPacket(kProbeSize);
+ EXPECT_TRUE(prober.is_probing());
+ EXPECT_EQ(prober.CurrentCluster(now)->probe_cluster_id, 0);
+ // Advance to first probe time and indicate sent probe.
+ now = std::max(now, prober.NextProbeTime(now));
+ prober.ProbeSent(now, kProbeSize);
+
+ // Advance time 1ms past timeout for the next probe.
+ Timestamp next_probe_time = prober.NextProbeTime(now);
+ EXPECT_GT(next_probe_time, now);
+ now += next_probe_time - now + kMaxProbeDelay + TimeDelta::Millis(1);
+
+ // Still indicates the time we wanted to probe at.
+ EXPECT_EQ(prober.NextProbeTime(now), next_probe_time);
+ // First and only cluster removed due to timeout.
+ EXPECT_FALSE(prober.CurrentCluster(now).has_value());
+}
+
TEST(BitrateProberTest, DoesntInitializeProbingForSmallPackets) {
const FieldTrialBasedConfig config;
BitrateProber prober(config);
@@ -107,7 +163,7 @@ TEST(BitrateProberTest, DoesntInitializeProbingForSmallPackets) {
prober.SetEnabled(true);
EXPECT_FALSE(prober.is_probing());
- prober.OnIncomingPacket(100);
+ prober.OnIncomingPacket(DataSize::Bytes(100));
EXPECT_FALSE(prober.is_probing());
}
@@ -121,7 +177,7 @@ TEST(BitrateProberTest, VerifyProbeSizeOnHighBitrate) {
/*cluster_id=*/0);
// Probe size should ensure a minimum of 1 ms interval.
EXPECT_GT(prober.RecommendedMinProbeSize(),
- (kHighBitrate * TimeDelta::Millis(1)).bytes<size_t>());
+ kHighBitrate * TimeDelta::Millis(1));
}
TEST(BitrateProberTest, MinumumNumberOfProbingPackets) {
@@ -130,14 +186,14 @@ TEST(BitrateProberTest, MinumumNumberOfProbingPackets) {
// Even when probing at a low bitrate we expect a minimum number
// of packets to be sent.
const DataRate kBitrate = DataRate::KilobitsPerSec(100);
- const int kPacketSizeBytes = 1000;
+ const DataSize kPacketSize = DataSize::Bytes(1000);
Timestamp now = Timestamp::Millis(0);
prober.CreateProbeCluster(kBitrate, now, 0);
- prober.OnIncomingPacket(kPacketSizeBytes);
+ prober.OnIncomingPacket(kPacketSize);
for (int i = 0; i < 5; ++i) {
EXPECT_TRUE(prober.is_probing());
- prober.ProbeSent(now, kPacketSizeBytes);
+ prober.ProbeSent(now, kPacketSize);
}
EXPECT_FALSE(prober.is_probing());
@@ -147,17 +203,17 @@ TEST(BitrateProberTest, ScaleBytesUsedForProbing) {
const FieldTrialBasedConfig config;
BitrateProber prober(config);
const DataRate kBitrate = DataRate::KilobitsPerSec(10000); // 10 Mbps.
- const int kPacketSizeBytes = 1000;
- const int kExpectedBytesSent = (kBitrate * TimeDelta::Millis(15)).bytes();
+ const DataSize kPacketSize = DataSize::Bytes(1000);
+ const DataSize kExpectedDataSent = kBitrate * TimeDelta::Millis(15);
Timestamp now = Timestamp::Millis(0);
prober.CreateProbeCluster(kBitrate, now, /*cluster_id=*/0);
- prober.OnIncomingPacket(kPacketSizeBytes);
- int bytes_sent = 0;
- while (bytes_sent < kExpectedBytesSent) {
+ prober.OnIncomingPacket(kPacketSize);
+ DataSize data_sent = DataSize::Zero();
+ while (data_sent < kExpectedDataSent) {
ASSERT_TRUE(prober.is_probing());
- prober.ProbeSent(now, kPacketSizeBytes);
- bytes_sent += kPacketSizeBytes;
+ prober.ProbeSent(now, kPacketSize);
+ data_sent += kPacketSize;
}
EXPECT_FALSE(prober.is_probing());
@@ -167,17 +223,17 @@ TEST(BitrateProberTest, HighBitrateProbing) {
const FieldTrialBasedConfig config;
BitrateProber prober(config);
const DataRate kBitrate = DataRate::KilobitsPerSec(1000000); // 1 Gbps.
- const int kPacketSizeBytes = 1000;
- const int kExpectedBytesSent = (kBitrate * TimeDelta::Millis(15)).bytes();
+ const DataSize kPacketSize = DataSize::Bytes(1000);
+ const DataSize kExpectedDataSent = kBitrate * TimeDelta::Millis(15);
Timestamp now = Timestamp::Millis(0);
prober.CreateProbeCluster(kBitrate, now, 0);
- prober.OnIncomingPacket(kPacketSizeBytes);
- int bytes_sent = 0;
- while (bytes_sent < kExpectedBytesSent) {
+ prober.OnIncomingPacket(kPacketSize);
+ DataSize data_sent = DataSize::Zero();
+ while (data_sent < kExpectedDataSent) {
ASSERT_TRUE(prober.is_probing());
- prober.ProbeSent(now, kPacketSizeBytes);
- bytes_sent += kPacketSizeBytes;
+ prober.ProbeSent(now, kPacketSize);
+ data_sent += kPacketSize;
}
EXPECT_FALSE(prober.is_probing());
@@ -187,9 +243,9 @@ TEST(BitrateProberTest, ProbeClusterTimeout) {
const FieldTrialBasedConfig config;
BitrateProber prober(config);
const DataRate kBitrate = DataRate::KilobitsPerSec(300);
- const int kSmallPacketSize = 20;
+ const DataSize kSmallPacketSize = DataSize::Bytes(20);
// Expecting two probe clusters of 5 packets each.
- const int kExpectedBytesSent = 20 * 2 * 5;
+ const DataSize kExpectedDataSent = kSmallPacketSize * 2 * 5;
const TimeDelta kTimeout = TimeDelta::Millis(5000);
Timestamp now = Timestamp::Millis(0);
@@ -204,11 +260,11 @@ TEST(BitrateProberTest, ProbeClusterTimeout) {
prober.CreateProbeCluster(kBitrate / 10, now, /*cluster_id=*/2);
prober.OnIncomingPacket(kSmallPacketSize);
EXPECT_TRUE(prober.is_probing());
- int bytes_sent = 0;
- while (bytes_sent < kExpectedBytesSent) {
+ DataSize data_sent = DataSize::Zero();
+ while (data_sent < kExpectedDataSent) {
ASSERT_TRUE(prober.is_probing());
prober.ProbeSent(now, kSmallPacketSize);
- bytes_sent += kSmallPacketSize;
+ data_sent += kSmallPacketSize;
}
EXPECT_FALSE(prober.is_probing());
diff --git a/modules/pacing/paced_sender.cc b/modules/pacing/paced_sender.cc
index cd298f9b0b..a0e76761e7 100644
--- a/modules/pacing/paced_sender.cc
+++ b/modules/pacing/paced_sender.cc
@@ -22,13 +22,15 @@
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
const int64_t PacedSender::kMaxQueueLengthMs = 2000;
const float PacedSender::kDefaultPaceMultiplier = 2.5f;
-PacedSender::PacedSender(Clock* clock, PacketRouter* packet_router,
+PacedSender::PacedSender(Clock* clock,
+ PacketRouter* packet_router,
RtcEventLog* event_log,
const WebRtcKeyValueConfig* field_trials,
ProcessThread* process_thread)
@@ -39,10 +41,11 @@ PacedSender::PacedSender(Clock* clock, PacketRouter* packet_router,
? PacingController::ProcessMode::kDynamic
: PacingController::ProcessMode::kPeriodic),
pacing_controller_(clock,
- static_cast<PacingController::PacketSender*>(this),
- event_log, field_trials, process_mode_),
+ packet_router,
+ event_log,
+ field_trials,
+ process_mode_),
clock_(clock),
- packet_router_(packet_router),
process_thread_(process_thread) {
if (process_thread_)
process_thread_->RegisterModule(&module_proxy_, RTC_FROM_HERE);
@@ -112,8 +115,15 @@ void PacedSender::SetPacingRates(DataRate pacing_rate, DataRate padding_rate) {
void PacedSender::EnqueuePackets(
std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
{
+ TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "PacedSender::EnqueuePackets");
rtc::CritScope cs(&critsect_);
for (auto& packet : packets) {
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "PacedSender::EnqueuePackets::Loop", "sequence_number",
+ packet->SequenceNumber(), "rtp_timestamp",
+ packet->Timestamp());
+
pacing_controller_.EnqueuePacket(std::move(packet));
}
}
@@ -194,19 +204,4 @@ void PacedSender::SetQueueTimeLimit(TimeDelta limit) {
MaybeWakupProcessThread();
}
-void PacedSender::SendRtpPacket(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) {
- critsect_.Leave();
- packet_router_->SendPacket(std::move(packet), cluster_info);
- critsect_.Enter();
-}
-
-std::vector<std::unique_ptr<RtpPacketToSend>> PacedSender::GeneratePadding(
- DataSize size) {
- std::vector<std::unique_ptr<RtpPacketToSend>> padding_packets;
- critsect_.Leave();
- padding_packets = packet_router_->GeneratePadding(size.bytes());
- critsect_.Enter();
- return padding_packets;
-}
} // namespace webrtc
diff --git a/modules/pacing/paced_sender.h b/modules/pacing/paced_sender.h
index 16137dfcd6..d255efdc3b 100644
--- a/modules/pacing/paced_sender.h
+++ b/modules/pacing/paced_sender.h
@@ -32,7 +32,7 @@
#include "modules/rtp_rtcp/include/rtp_packet_sender.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "modules/utility/include/process_thread.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -43,8 +43,7 @@ class RtcEventLog;
// updating dependencies.
class PacedSender : public Module,
public RtpPacketPacer,
- public RtpPacketSender,
- private PacingController::PacketSender {
+ public RtpPacketSender {
public:
// Expected max pacer delay in ms. If ExpectedQueueTime() is higher than
// this value, the packet producers should wait (eg drop frames rather than
@@ -140,14 +139,6 @@ class PacedSender : public Module,
// In dynamic process mode, refreshes the next process time.
void MaybeWakupProcessThread();
- // Methods implementing PacedSenderController:PacketSender.
- void SendRtpPacket(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) override
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critsect_);
-
- std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
- DataSize size) override RTC_EXCLUSIVE_LOCKS_REQUIRED(critsect_);
-
// Private implementation of Module to not expose those implementation details
// publicly and control when the class is registered/deregistered.
class ModuleProxy : public Module {
@@ -166,12 +157,11 @@ class PacedSender : public Module,
PacedSender* const delegate_;
} module_proxy_{this};
- rtc::CriticalSection critsect_;
+ rtc::RecursiveCriticalSection critsect_;
const PacingController::ProcessMode process_mode_;
PacingController pacing_controller_ RTC_GUARDED_BY(critsect_);
Clock* const clock_;
- PacketRouter* const packet_router_;
ProcessThread* const process_thread_;
};
} // namespace webrtc
diff --git a/modules/pacing/paced_sender_unittest.cc b/modules/pacing/paced_sender_unittest.cc
index 26d2eac413..53cc1c42ed 100644
--- a/modules/pacing/paced_sender_unittest.cc
+++ b/modules/pacing/paced_sender_unittest.cc
@@ -39,12 +39,15 @@ constexpr size_t kDefaultPacketSize = 234;
// Mock callback implementing the raw api.
class MockCallback : public PacketRouter {
public:
- MOCK_METHOD2(SendPacket,
- void(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info));
- MOCK_METHOD1(
- GeneratePadding,
- std::vector<std::unique_ptr<RtpPacketToSend>>(size_t target_size_bytes));
+ MOCK_METHOD(void,
+ SendPacket,
+ (std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info),
+ (override));
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ GeneratePadding,
+ (DataSize target_size),
+ (override));
};
class ProcessModeTrials : public WebRtcKeyValueConfig {
diff --git a/modules/pacing/pacing_controller.cc b/modules/pacing/pacing_controller.cc
index f21e63733f..33780e001c 100644
--- a/modules/pacing/pacing_controller.cc
+++ b/modules/pacing/pacing_controller.cc
@@ -193,6 +193,10 @@ bool PacingController::Congested() const {
return false;
}
+bool PacingController::IsProbing() const {
+ return prober_.is_probing();
+}
+
Timestamp PacingController::CurrentTime() const {
Timestamp time = clock_->CurrentTime();
if (time < last_timestamp_) {
@@ -285,7 +289,7 @@ TimeDelta PacingController::OldestPacketWaitTime() const {
void PacingController::EnqueuePacketInternal(
std::unique_ptr<RtpPacketToSend> packet,
int priority) {
- prober_.OnIncomingPacket(packet->payload_size());
+ prober_.OnIncomingPacket(DataSize::Bytes(packet->payload_size()));
// TODO(sprang): Make sure tests respect this, replace with DCHECK.
Timestamp now = CurrentTime();
@@ -331,7 +335,7 @@ bool PacingController::ShouldSendKeepalive(Timestamp now) const {
}
Timestamp PacingController::NextSendTime() const {
- Timestamp now = CurrentTime();
+ const Timestamp now = CurrentTime();
if (paused_) {
return last_send_time_ + kPausedProcessInterval;
@@ -399,7 +403,9 @@ void PacingController::ProcessPackets() {
if (target_send_time.IsMinusInfinity()) {
target_send_time = now;
} else if (now < target_send_time) {
- // We are too early, abort and regroup!
+ // We are too early, but if queue is empty still allow draining some debt.
+ TimeDelta elapsed_time = UpdateTimeAndGetElapsed(now);
+ UpdateBudgetWithElapsedTime(elapsed_time);
return;
}
@@ -434,7 +440,10 @@ void PacingController::ProcessPackets() {
for (auto& packet : keepalive_packets) {
keepalive_data_sent +=
DataSize::Bytes(packet->payload_size() + packet->padding_size());
- packet_sender_->SendRtpPacket(std::move(packet), PacedPacketInfo());
+ packet_sender_->SendPacket(std::move(packet), PacedPacketInfo());
+ for (auto& packet : packet_sender_->FetchFec()) {
+ EnqueuePacket(std::move(packet));
+ }
}
OnPaddingSent(keepalive_data_sent);
}
@@ -477,13 +486,21 @@ void PacingController::ProcessPackets() {
}
bool first_packet_in_probe = false;
- bool is_probing = prober_.is_probing();
PacedPacketInfo pacing_info;
- absl::optional<DataSize> recommended_probe_size;
+ DataSize recommended_probe_size = DataSize::Zero();
+ bool is_probing = prober_.is_probing();
if (is_probing) {
- pacing_info = prober_.CurrentCluster();
- first_packet_in_probe = pacing_info.probe_cluster_bytes_sent == 0;
- recommended_probe_size = DataSize::Bytes(prober_.RecommendedMinProbeSize());
+ // Probe timing is sensitive, and handled explicitly by BitrateProber, so
+ // use actual send time rather than target.
+ pacing_info = prober_.CurrentCluster(now).value_or(PacedPacketInfo());
+ if (pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe) {
+ first_packet_in_probe = pacing_info.probe_cluster_bytes_sent == 0;
+ recommended_probe_size = prober_.RecommendedMinProbeSize();
+ RTC_DCHECK_GT(recommended_probe_size, DataSize::Zero());
+ } else {
+ // No valid probe cluster returned, probe might have timed out.
+ is_probing = false;
+ }
}
DataSize data_sent = DataSize::Zero();
@@ -553,14 +570,21 @@ void PacingController::ProcessPackets() {
packet_size += DataSize::Bytes(rtp_packet->headers_size()) +
transport_overhead_per_packet_;
}
- packet_sender_->SendRtpPacket(std::move(rtp_packet), pacing_info);
+ packet_sender_->SendPacket(std::move(rtp_packet), pacing_info);
+ for (auto& packet : packet_sender_->FetchFec()) {
+ EnqueuePacket(std::move(packet));
+ }
data_sent += packet_size;
// Send done, update send/process time to the target send time.
OnPacketSent(packet_type, packet_size, target_send_time);
- if (recommended_probe_size && data_sent > *recommended_probe_size)
+
+ // If we are currently probing, we need to stop the send loop when we have
+ // reached the send target.
+ if (is_probing && data_sent > recommended_probe_size) {
break;
+ }
if (mode_ == ProcessMode::kDynamic) {
// Update target send time in case that are more packets that we are late
@@ -579,14 +603,13 @@ void PacingController::ProcessPackets() {
if (is_probing) {
probing_send_failure_ = data_sent == DataSize::Zero();
if (!probing_send_failure_) {
- prober_.ProbeSent(CurrentTime(), data_sent.bytes());
+ prober_.ProbeSent(CurrentTime(), data_sent);
}
}
}
-DataSize PacingController::PaddingToAdd(
- absl::optional<DataSize> recommended_probe_size,
- DataSize data_sent) const {
+DataSize PacingController::PaddingToAdd(DataSize recommended_probe_size,
+ DataSize data_sent) const {
if (!packet_queue_.Empty()) {
// Actual payload available, no need to add padding.
return DataSize::Zero();
@@ -603,9 +626,9 @@ DataSize PacingController::PaddingToAdd(
return DataSize::Zero();
}
- if (recommended_probe_size) {
- if (*recommended_probe_size > data_sent) {
- return *recommended_probe_size - data_sent;
+ if (!recommended_probe_size.IsZero()) {
+ if (recommended_probe_size > data_sent) {
+ return recommended_probe_size - data_sent;
}
return DataSize::Zero();
}
diff --git a/modules/pacing/pacing_controller.h b/modules/pacing/pacing_controller.h
index 27f1614b08..6e0f9bd5b2 100644
--- a/modules/pacing/pacing_controller.h
+++ b/modules/pacing/pacing_controller.h
@@ -31,7 +31,6 @@
#include "modules/rtp_rtcp/include/rtp_packet_sender.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/thread_annotations.h"
@@ -55,8 +54,10 @@ class PacingController {
class PacketSender {
public:
virtual ~PacketSender() = default;
- virtual void SendRtpPacket(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) = 0;
+ virtual void SendPacket(std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info) = 0;
+ // Should be called after each call to SendPacket().
+ virtual std::vector<std::unique_ptr<RtpPacketToSend>> FetchFec() = 0;
virtual std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
DataSize size) = 0;
};
@@ -146,6 +147,8 @@ class PacingController {
bool Congested() const;
+ bool IsProbing() const;
+
private:
void EnqueuePacketInternal(std::unique_ptr<RtpPacketToSend> packet,
int priority);
@@ -156,7 +159,7 @@ class PacingController {
void UpdateBudgetWithElapsedTime(TimeDelta delta);
void UpdateBudgetWithSentData(DataSize size);
- DataSize PaddingToAdd(absl::optional<DataSize> recommended_probe_size,
+ DataSize PaddingToAdd(DataSize recommended_probe_size,
DataSize data_sent) const;
std::unique_ptr<RtpPacketToSend> GetPendingPacket(
diff --git a/modules/pacing/pacing_controller_unittest.cc b/modules/pacing/pacing_controller_unittest.cc
index fa23da70a0..8aaa67ce51 100644
--- a/modules/pacing/pacing_controller_unittest.cc
+++ b/modules/pacing/pacing_controller_unittest.cc
@@ -20,6 +20,7 @@
#include "api/units/data_rate.h"
#include "modules/pacing/packet_router.h"
#include "system_wrappers/include/clock.h"
+#include "test/explicit_key_value_config.h"
#include "test/field_trial.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -69,8 +70,8 @@ std::unique_ptr<RtpPacketToSend> BuildPacket(RtpPacketMediaType type,
// methods that focus on core aspects.
class MockPacingControllerCallback : public PacingController::PacketSender {
public:
- void SendRtpPacket(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) override {
+ void SendPacket(std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info) override {
SendPacket(packet->Ssrc(), packet->SequenceNumber(),
packet->capture_time_ms(),
packet->packet_type() == RtpPacketMediaType::kRetransmission,
@@ -90,24 +91,37 @@ class MockPacingControllerCallback : public PacingController::PacketSender {
return ret;
}
- MOCK_METHOD5(SendPacket,
- void(uint32_t ssrc,
- uint16_t sequence_number,
- int64_t capture_timestamp,
- bool retransmission,
- bool padding));
- MOCK_METHOD1(SendPadding, size_t(size_t target_size));
+ MOCK_METHOD(void,
+ SendPacket,
+ (uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_timestamp,
+ bool retransmission,
+ bool padding));
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ FetchFec,
+ (),
+ (override));
+ MOCK_METHOD(size_t, SendPadding, (size_t target_size));
};
// Mock callback implementing the raw api.
class MockPacketSender : public PacingController::PacketSender {
public:
- MOCK_METHOD2(SendRtpPacket,
- void(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info));
- MOCK_METHOD1(
- GeneratePadding,
- std::vector<std::unique_ptr<RtpPacketToSend>>(DataSize target_size));
+ MOCK_METHOD(void,
+ SendPacket,
+ (std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info),
+ (override));
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ FetchFec,
+ (),
+ (override));
+
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ GeneratePadding,
+ (DataSize target_size),
+ (override));
};
class PacingControllerPadding : public PacingController::PacketSender {
@@ -116,11 +130,15 @@ class PacingControllerPadding : public PacingController::PacketSender {
PacingControllerPadding() : padding_sent_(0), total_bytes_sent_(0) {}
- void SendRtpPacket(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& pacing_info) override {
+ void SendPacket(std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& pacing_info) override {
total_bytes_sent_ += packet->payload_size();
}
+ std::vector<std::unique_ptr<RtpPacketToSend>> FetchFec() override {
+ return {};
+ }
+
std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
DataSize target_size) override {
size_t num_packets =
@@ -147,11 +165,16 @@ class PacingControllerProbing : public PacingController::PacketSender {
public:
PacingControllerProbing() : packets_sent_(0), padding_sent_(0) {}
- void SendRtpPacket(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& pacing_info) override {
+ void SendPacket(std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& pacing_info) override {
if (packet->packet_type() != RtpPacketMediaType::kPadding) {
++packets_sent_;
}
+ last_pacing_info_ = pacing_info;
+ }
+
+ std::vector<std::unique_ptr<RtpPacketToSend>> FetchFec() override {
+ return {};
}
std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
@@ -173,12 +196,14 @@ class PacingControllerProbing : public PacingController::PacketSender {
}
int packets_sent() const { return packets_sent_; }
-
int padding_sent() const { return padding_sent_; }
+ int total_packets_sent() const { return packets_sent_ + padding_sent_; }
+ PacedPacketInfo last_pacing_info() const { return last_pacing_info_; }
private:
int packets_sent_;
int padding_sent_;
+ PacedPacketInfo last_pacing_info_;
};
class PacingControllerTest
@@ -295,7 +320,7 @@ class PacingControllerTest
}
SimulatedClock clock_;
- MockPacingControllerCallback callback_;
+ ::testing::NiceMock<MockPacingControllerCallback> callback_;
std::unique_ptr<PacingController> pacer_;
};
@@ -1422,63 +1447,119 @@ TEST_P(PacingControllerTest, ProbingWithInsertedPackets) {
TEST_P(PacingControllerTest, SkipsProbesWhenProcessIntervalTooLarge) {
const size_t kPacketSize = 1200;
const int kInitialBitrateBps = 300000;
- uint32_t ssrc = 12346;
- uint16_t sequence_number = 1234;
+ const uint32_t ssrc = 12346;
+ const int kProbeClusterId = 3;
- PacingControllerProbing packet_sender;
- pacer_ = std::make_unique<PacingController>(&clock_, &packet_sender, nullptr,
- nullptr, GetParam());
- pacer_->SetPacingRates(
- DataRate::BitsPerSec(kInitialBitrateBps * kPaceMultiplier),
- DataRate::Zero());
+ // Test with both legacy and new probe discard modes.
+ // TODO(bugs.webrtc.org/11780): Clean up when legacy is gone.
+ for (bool abort_delayed_probes : {false, true}) {
+ uint16_t sequence_number = 1234;
- for (int i = 0; i < 10; ++i) {
+ PacingControllerProbing packet_sender;
+
+ const test::ExplicitKeyValueConfig trials(
+ abort_delayed_probes ? "WebRTC-Bwe-ProbingBehavior/"
+ "abort_delayed_probes:1,max_probe_delay:2ms/"
+ : "WebRTC-Bwe-ProbingBehavior/"
+ "abort_delayed_probes:0,max_probe_delay:2ms/");
+ pacer_ = std::make_unique<PacingController>(&clock_, &packet_sender,
+ nullptr, &trials, GetParam());
+ pacer_->SetPacingRates(
+ DataRate::BitsPerSec(kInitialBitrateBps * kPaceMultiplier),
+ DataRate::BitsPerSec(kInitialBitrateBps));
+
+ for (int i = 0; i < 10; ++i) {
+ Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), kPacketSize);
+ }
+ while (pacer_->QueueSizePackets() > 0) {
+ clock_.AdvanceTime(TimeUntilNextProcess());
+ pacer_->ProcessPackets();
+ }
+
+ // Probe at a very high rate.
+ pacer_->CreateProbeCluster(DataRate::KilobitsPerSec(10000), // 10 Mbps.
+ /*cluster_id=*/kProbeClusterId);
+ // We need one packet to start the probe.
Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++,
clock_.TimeInMilliseconds(), kPacketSize);
- }
- while (pacer_->QueueSizePackets() > 0) {
+ const int packets_sent_before_probe = packet_sender.packets_sent();
clock_.AdvanceTime(TimeUntilNextProcess());
pacer_->ProcessPackets();
- }
+ EXPECT_EQ(packet_sender.packets_sent(), packets_sent_before_probe + 1);
- // Probe at a very high rate.
- pacer_->CreateProbeCluster(DataRate::KilobitsPerSec(10000), // 10 Mbps.
- /*cluster_id=*/3);
- // We need one packet to start the probe.
- Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++,
- clock_.TimeInMilliseconds(), kPacketSize);
- const int packets_sent_before_probe = packet_sender.packets_sent();
- clock_.AdvanceTime(TimeUntilNextProcess());
- pacer_->ProcessPackets();
- EXPECT_EQ(packet_sender.packets_sent(), packets_sent_before_probe + 1);
+ // Figure out how long between probe packets.
+ Timestamp start_time = clock_.CurrentTime();
+ clock_.AdvanceTime(TimeUntilNextProcess());
+ TimeDelta time_between_probes = clock_.CurrentTime() - start_time;
+ // Advance that distance again + 1ms.
+ clock_.AdvanceTime(time_between_probes);
- // Figure out how long between probe packets.
- Timestamp start_time = clock_.CurrentTime();
- clock_.AdvanceTime(TimeUntilNextProcess());
- TimeDelta time_between_probes = clock_.CurrentTime() - start_time;
- // Advance that distance again + 1ms.
- clock_.AdvanceTime(time_between_probes);
+ // Send second probe packet.
+ Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), kPacketSize);
+ pacer_->ProcessPackets();
+ EXPECT_EQ(packet_sender.packets_sent(), packets_sent_before_probe + 2);
+ PacedPacketInfo last_pacing_info = packet_sender.last_pacing_info();
+ EXPECT_EQ(last_pacing_info.probe_cluster_id, kProbeClusterId);
+
+ // We're exactly where we should be for the next probe.
+ const Timestamp probe_time = clock_.CurrentTime();
+ EXPECT_EQ(pacer_->NextSendTime(), clock_.CurrentTime());
+
+ BitrateProberConfig probing_config(&trials);
+ EXPECT_GT(probing_config.max_probe_delay.Get(), TimeDelta::Zero());
+ // Advance to within max probe delay, should still return same target.
+ clock_.AdvanceTime(probing_config.max_probe_delay.Get());
+ EXPECT_EQ(pacer_->NextSendTime(), probe_time);
+
+ // Too high probe delay, drop it!
+ clock_.AdvanceTime(TimeDelta::Micros(1));
+
+ int packets_sent_before_timeout = packet_sender.total_packets_sent();
+ if (abort_delayed_probes) {
+ // Expected next process time is unchanged, but calling should not
+ // generate new packets.
+ EXPECT_EQ(pacer_->NextSendTime(), probe_time);
+ pacer_->ProcessPackets();
+ EXPECT_EQ(packet_sender.total_packets_sent(),
+ packets_sent_before_timeout);
- // Send second probe packet.
- Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++,
- clock_.TimeInMilliseconds(), kPacketSize);
- pacer_->ProcessPackets();
- EXPECT_EQ(packet_sender.packets_sent(), packets_sent_before_probe + 2);
-
- // We're exactly where we should be for the next probe.
- const Timestamp probe_time = clock_.CurrentTime();
- EXPECT_EQ(pacer_->NextSendTime(), clock_.CurrentTime());
-
- FieldTrialBasedConfig field_trial_config;
- BitrateProberConfig probing_config(&field_trial_config);
- EXPECT_GT(probing_config.max_probe_delay.Get(), TimeDelta::Zero());
- // Advance to within max probe delay, should still return same target.
- clock_.AdvanceTime(probing_config.max_probe_delay.Get());
- EXPECT_EQ(pacer_->NextSendTime(), probe_time);
-
- // Too high probe delay, drop it!
- clock_.AdvanceTime(TimeDelta::Micros(1));
- EXPECT_GT(pacer_->NextSendTime(), probe_time);
+ // Next packet sent is not part of probe.
+ if (PeriodicProcess()) {
+ do {
+ AdvanceTimeAndProcess();
+ } while (packet_sender.total_packets_sent() ==
+ packets_sent_before_timeout);
+ } else {
+ AdvanceTimeAndProcess();
+ }
+ const int expected_probe_id = PacedPacketInfo::kNotAProbe;
+ EXPECT_EQ(packet_sender.last_pacing_info().probe_cluster_id,
+ expected_probe_id);
+ } else {
+ // Legacy behaviour, probe "aborted" so send time moved back. Next call to
+ // ProcessPackets() still results in packets being marked as part of probe
+ // cluster.
+ EXPECT_GT(pacer_->NextSendTime(), probe_time);
+ AdvanceTimeAndProcess();
+ EXPECT_GT(packet_sender.total_packets_sent(),
+ packets_sent_before_timeout);
+ const int expected_probe_id = last_pacing_info.probe_cluster_id;
+ EXPECT_EQ(packet_sender.last_pacing_info().probe_cluster_id,
+ expected_probe_id);
+
+ // Time between sent packets keeps being too large, but we still mark the
+ // packets as being part of the cluster.
+ Timestamp a = clock_.CurrentTime();
+ AdvanceTimeAndProcess();
+ EXPECT_GT(packet_sender.total_packets_sent(),
+ packets_sent_before_timeout);
+ EXPECT_EQ(packet_sender.last_pacing_info().probe_cluster_id,
+ expected_probe_id);
+ EXPECT_GT(clock_.CurrentTime() - a, time_between_probes);
+ }
+ }
}
TEST_P(PacingControllerTest, ProbingWithPaddingSupport) {
@@ -1571,7 +1652,7 @@ TEST_P(PacingControllerTest, ProbeClusterId) {
// First probing cluster.
EXPECT_CALL(callback,
- SendRtpPacket(_, Field(&PacedPacketInfo::probe_cluster_id, 0)))
+ SendPacket(_, Field(&PacedPacketInfo::probe_cluster_id, 0)))
.Times(5);
for (int i = 0; i < 5; ++i) {
@@ -1580,7 +1661,7 @@ TEST_P(PacingControllerTest, ProbeClusterId) {
// Second probing cluster.
EXPECT_CALL(callback,
- SendRtpPacket(_, Field(&PacedPacketInfo::probe_cluster_id, 1)))
+ SendPacket(_, Field(&PacedPacketInfo::probe_cluster_id, 1)))
.Times(5);
for (int i = 0; i < 5; ++i) {
@@ -1598,7 +1679,7 @@ TEST_P(PacingControllerTest, ProbeClusterId) {
return padding_packets;
});
bool non_probe_packet_seen = false;
- EXPECT_CALL(callback, SendRtpPacket)
+ EXPECT_CALL(callback, SendPacket)
.WillOnce([&](std::unique_ptr<RtpPacketToSend> packet,
const PacedPacketInfo& cluster_info) {
EXPECT_EQ(cluster_info.probe_cluster_id, kNotAProbe);
@@ -1628,23 +1709,23 @@ TEST_P(PacingControllerTest, OwnedPacketPrioritizedOnType) {
::testing::InSequence seq;
EXPECT_CALL(
callback,
- SendRtpPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kAudioSsrc)), _));
- EXPECT_CALL(callback,
- SendRtpPacket(
- Pointee(Property(&RtpPacketToSend::Ssrc, kVideoRtxSsrc)), _));
+ SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kAudioSsrc)), _));
+ EXPECT_CALL(
+ callback,
+ SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kVideoRtxSsrc)), _));
// FEC and video actually have the same priority, so will come out in
// insertion order.
- EXPECT_CALL(callback,
- SendRtpPacket(
- Pointee(Property(&RtpPacketToSend::Ssrc, kFlexFecSsrc)), _));
EXPECT_CALL(
callback,
- SendRtpPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kVideoSsrc)), _));
+ SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kFlexFecSsrc)), _));
+ EXPECT_CALL(
+ callback,
+ SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kVideoSsrc)), _));
- EXPECT_CALL(callback,
- SendRtpPacket(
- Pointee(Property(&RtpPacketToSend::Ssrc, kVideoRtxSsrc)), _));
+ EXPECT_CALL(
+ callback,
+ SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kVideoRtxSsrc)), _));
while (pacer_->QueueSizePackets() > 0) {
if (PeriodicProcess()) {
@@ -1679,7 +1760,7 @@ TEST_P(PacingControllerTest, SmallFirstProbePacket) {
size_t packets_sent = 0;
bool media_seen = false;
- EXPECT_CALL(callback, SendRtpPacket)
+ EXPECT_CALL(callback, SendPacket)
.Times(::testing::AnyNumber())
.WillRepeatedly([&](std::unique_ptr<RtpPacketToSend> packet,
const PacedPacketInfo& cluster_info) {
@@ -1817,7 +1898,7 @@ TEST_P(PacingControllerTest,
for (bool account_for_audio : {false, true}) {
uint16_t sequence_number = 1234;
MockPacketSender callback;
- EXPECT_CALL(callback, SendRtpPacket).Times(::testing::AnyNumber());
+ EXPECT_CALL(callback, SendPacket).Times(::testing::AnyNumber());
pacer_ = std::make_unique<PacingController>(&clock_, &callback, nullptr,
nullptr, GetParam());
pacer_->SetAccountForAudioPackets(account_for_audio);
@@ -2025,6 +2106,38 @@ TEST_P(PacingControllerTest, PaddingTargetAccountsForPaddingRate) {
AdvanceTimeAndProcess();
}
+TEST_P(PacingControllerTest, SendsDeferredFecPackets) {
+ ScopedFieldTrials trial("WebRTC-DeferredFecGeneration/Enabled/");
+ SetUp();
+
+ const uint32_t kSsrc = 12345;
+ const uint32_t kFlexSsrc = 54321;
+ uint16_t sequence_number = 1234;
+ uint16_t flexfec_sequence_number = 4321;
+ const size_t kPacketSize = 123;
+
+ // Set pacing rate to 1000 packet/s, no padding.
+ pacer_->SetPacingRates(
+ DataSize::Bytes(1000 * kPacketSize) / TimeDelta::Seconds(1),
+ DataRate::Zero());
+
+ int64_t now = clock_.TimeInMilliseconds();
+ Send(RtpPacketMediaType::kVideo, kSsrc, sequence_number, now, kPacketSize);
+ EXPECT_CALL(callback_, SendPacket(kSsrc, sequence_number, now, false, false));
+ EXPECT_CALL(callback_, FetchFec).WillOnce([&]() {
+ EXPECT_CALL(callback_, SendPacket(kFlexSsrc, flexfec_sequence_number, now,
+ false, false));
+ EXPECT_CALL(callback_, FetchFec);
+ std::vector<std::unique_ptr<RtpPacketToSend>> fec_packets;
+ fec_packets.push_back(
+ BuildPacket(RtpPacketMediaType::kForwardErrorCorrection, kFlexSsrc,
+ flexfec_sequence_number, now, kPacketSize));
+ return fec_packets;
+ });
+ AdvanceTimeAndProcess();
+ AdvanceTimeAndProcess();
+}
+
INSTANTIATE_TEST_SUITE_P(
WithAndWithoutIntervalBudget,
PacingControllerTest,
diff --git a/modules/pacing/packet_router.cc b/modules/pacing/packet_router.cc
index fa64331493..5317f510c9 100644
--- a/modules/pacing/packet_router.cc
+++ b/modules/pacing/packet_router.cc
@@ -17,13 +17,14 @@
#include <utility>
#include "absl/types/optional.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_packet.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
namespace webrtc {
namespace {
@@ -52,8 +53,9 @@ PacketRouter::~PacketRouter() {
RTC_DCHECK(active_remb_module_ == nullptr);
}
-void PacketRouter::AddSendRtpModule(RtpRtcp* rtp_module, bool remb_candidate) {
- rtc::CritScope cs(&modules_crit_);
+void PacketRouter::AddSendRtpModule(RtpRtcpInterface* rtp_module,
+ bool remb_candidate) {
+ MutexLock lock(&modules_mutex_);
AddSendRtpModuleToMap(rtp_module, rtp_module->SSRC());
if (absl::optional<uint32_t> rtx_ssrc = rtp_module->RtxSsrc()) {
@@ -72,7 +74,8 @@ void PacketRouter::AddSendRtpModule(RtpRtcp* rtp_module, bool remb_candidate) {
}
}
-void PacketRouter::AddSendRtpModuleToMap(RtpRtcp* rtp_module, uint32_t ssrc) {
+void PacketRouter::AddSendRtpModuleToMap(RtpRtcpInterface* rtp_module,
+ uint32_t ssrc) {
RTC_DCHECK(send_modules_map_.find(ssrc) == send_modules_map_.end());
// Always keep the audio modules at the back of the list, so that when we
// iterate over the modules in order to find one that can send padding we
@@ -93,8 +96,8 @@ void PacketRouter::RemoveSendRtpModuleFromMap(uint32_t ssrc) {
send_modules_map_.erase(kv);
}
-void PacketRouter::RemoveSendRtpModule(RtpRtcp* rtp_module) {
- rtc::CritScope cs(&modules_crit_);
+void PacketRouter::RemoveSendRtpModule(RtpRtcpInterface* rtp_module) {
+ MutexLock lock(&modules_mutex_);
MaybeRemoveRembModuleCandidate(rtp_module, /* media_sender = */ true);
RemoveSendRtpModuleFromMap(rtp_module->SSRC());
@@ -112,7 +115,7 @@ void PacketRouter::RemoveSendRtpModule(RtpRtcp* rtp_module) {
void PacketRouter::AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender,
bool remb_candidate) {
- rtc::CritScope cs(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
RTC_DCHECK(std::find(rtcp_feedback_senders_.begin(),
rtcp_feedback_senders_.end(),
rtcp_sender) == rtcp_feedback_senders_.end());
@@ -126,7 +129,7 @@ void PacketRouter::AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender,
void PacketRouter::RemoveReceiveRtpModule(
RtcpFeedbackSenderInterface* rtcp_sender) {
- rtc::CritScope cs(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
MaybeRemoveRembModuleCandidate(rtcp_sender, /* media_sender = */ false);
auto it = std::find(rtcp_feedback_senders_.begin(),
rtcp_feedback_senders_.end(), rtcp_sender);
@@ -136,7 +139,11 @@ void PacketRouter::RemoveReceiveRtpModule(
void PacketRouter::SendPacket(std::unique_ptr<RtpPacketToSend> packet,
const PacedPacketInfo& cluster_info) {
- rtc::CritScope cs(&modules_crit_);
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), "PacketRouter::SendPacket",
+ "sequence_number", packet->SequenceNumber(), "rtp_timestamp",
+ packet->Timestamp());
+
+ MutexLock lock(&modules_mutex_);
// With the new pacer code path, transport sequence numbers are only set here,
// on the pacer thread. Therefore we don't need atomics/synchronization.
if (packet->HasExtension<TransportSequenceNumber>()) {
@@ -153,7 +160,7 @@ void PacketRouter::SendPacket(std::unique_ptr<RtpPacketToSend> packet,
return;
}
- RtpRtcp* rtp_module = kv->second;
+ RtpRtcpInterface* rtp_module = kv->second;
if (!rtp_module->TrySendPacket(packet.get(), cluster_info)) {
RTC_LOG(LS_WARNING) << "Failed to send packet, rejected by RTP module.";
return;
@@ -164,11 +171,26 @@ void PacketRouter::SendPacket(std::unique_ptr<RtpPacketToSend> packet,
// properties needed for payload based padding. Cache it for later use.
last_send_module_ = rtp_module;
}
+
+ for (auto& packet : rtp_module->FetchFecPackets()) {
+ pending_fec_packets_.push_back(std::move(packet));
+ }
+}
+
+std::vector<std::unique_ptr<RtpPacketToSend>> PacketRouter::FetchFec() {
+ MutexLock lock(&modules_mutex_);
+ std::vector<std::unique_ptr<RtpPacketToSend>> fec_packets =
+ std::move(pending_fec_packets_);
+ pending_fec_packets_.clear();
+ return fec_packets;
}
std::vector<std::unique_ptr<RtpPacketToSend>> PacketRouter::GeneratePadding(
- size_t target_size_bytes) {
- rtc::CritScope cs(&modules_crit_);
+ DataSize size) {
+ TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "PacketRouter::GeneratePadding", "bytes", size.bytes());
+
+ MutexLock lock(&modules_mutex_);
// First try on the last rtp module to have sent media. This increases the
// the chance that any payload based padding will be useful as it will be
// somewhat distributed over modules according the packet rate, even if it
@@ -178,30 +200,38 @@ std::vector<std::unique_ptr<RtpPacketToSend>> PacketRouter::GeneratePadding(
std::vector<std::unique_ptr<RtpPacketToSend>> padding_packets;
if (last_send_module_ != nullptr &&
last_send_module_->SupportsRtxPayloadPadding()) {
- padding_packets = last_send_module_->GeneratePadding(target_size_bytes);
- if (!padding_packets.empty()) {
- return padding_packets;
- }
+ padding_packets = last_send_module_->GeneratePadding(size.bytes());
}
- // Iterate over all modules send module. Video modules will be at the front
- // and so will be prioritized. This is important since audio packets may not
- // be taken into account by the bandwidth estimator, e.g. in FF.
- for (RtpRtcp* rtp_module : send_modules_list_) {
- if (rtp_module->SupportsPadding()) {
- padding_packets = rtp_module->GeneratePadding(target_size_bytes);
- if (!padding_packets.empty()) {
- last_send_module_ = rtp_module;
- break;
+ if (padding_packets.empty()) {
+ // Iterate over all modules send module. Video modules will be at the front
+ // and so will be prioritized. This is important since audio packets may not
+ // be taken into account by the bandwidth estimator, e.g. in FF.
+ for (RtpRtcpInterface* rtp_module : send_modules_list_) {
+ if (rtp_module->SupportsPadding()) {
+ padding_packets = rtp_module->GeneratePadding(size.bytes());
+ if (!padding_packets.empty()) {
+ last_send_module_ = rtp_module;
+ break;
+ }
}
}
}
+#if RTC_TRACE_EVENTS_ENABLED
+ for (auto& packet : padding_packets) {
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "PacketRouter::GeneratePadding::Loop", "sequence_number",
+ packet->SequenceNumber(), "rtp_timestamp",
+ packet->Timestamp());
+ }
+#endif
+
return padding_packets;
}
uint16_t PacketRouter::CurrentTransportSequenceNumber() const {
- rtc::CritScope lock(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
return transport_seq_ & 0xFFFF;
}
@@ -215,7 +245,7 @@ void PacketRouter::OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
int64_t now_ms = rtc::TimeMillis();
{
- rtc::CritScope lock(&remb_crit_);
+ MutexLock lock(&remb_mutex_);
// If we already have an estimate, check if the new total estimate is below
// kSendThresholdPercent of the previous estimate.
@@ -248,7 +278,7 @@ void PacketRouter::OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
void PacketRouter::SetMaxDesiredReceiveBitrate(int64_t bitrate_bps) {
RTC_DCHECK_GE(bitrate_bps, 0);
{
- rtc::CritScope lock(&remb_crit_);
+ MutexLock lock(&remb_mutex_);
max_bitrate_bps_ = bitrate_bps;
if (rtc::TimeMillis() - last_remb_time_ms_ < kRembSendIntervalMs &&
last_send_bitrate_bps_ > 0 &&
@@ -262,7 +292,7 @@ void PacketRouter::SetMaxDesiredReceiveBitrate(int64_t bitrate_bps) {
bool PacketRouter::SendRemb(int64_t bitrate_bps,
const std::vector<uint32_t>& ssrcs) {
- rtc::CritScope lock(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
if (!active_remb_module_) {
return false;
@@ -277,10 +307,10 @@ bool PacketRouter::SendRemb(int64_t bitrate_bps,
bool PacketRouter::SendCombinedRtcpPacket(
std::vector<std::unique_ptr<rtcp::RtcpPacket>> packets) {
- rtc::CritScope cs(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
// Prefer send modules.
- for (RtpRtcp* rtp_module : send_modules_list_) {
+ for (RtpRtcpInterface* rtp_module : send_modules_list_) {
if (rtp_module->RTCP() == RtcpMode::kOff) {
continue;
}
diff --git a/modules/pacing/packet_router.h b/modules/pacing/packet_router.h
index 40b3ad1407..2fa104b4cd 100644
--- a/modules/pacing/packet_router.h
+++ b/modules/pacing/packet_router.h
@@ -21,17 +21,18 @@
#include <vector>
#include "api/transport/network_types.h"
+#include "modules/pacing/pacing_controller.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_packet.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
-class RtpRtcp;
+class RtpRtcpInterface;
// PacketRouter keeps track of rtp send modules to support the pacer.
// In addition, it handles feedback messages, which are sent on a send
@@ -39,24 +40,25 @@ class RtpRtcp;
// (receiver report). For the latter case, we also keep track of the
// receive modules.
class PacketRouter : public RemoteBitrateObserver,
- public TransportFeedbackSenderInterface {
+ public TransportFeedbackSenderInterface,
+ public PacingController::PacketSender {
public:
PacketRouter();
explicit PacketRouter(uint16_t start_transport_seq);
~PacketRouter() override;
- void AddSendRtpModule(RtpRtcp* rtp_module, bool remb_candidate);
- void RemoveSendRtpModule(RtpRtcp* rtp_module);
+ void AddSendRtpModule(RtpRtcpInterface* rtp_module, bool remb_candidate);
+ void RemoveSendRtpModule(RtpRtcpInterface* rtp_module);
void AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender,
bool remb_candidate);
void RemoveReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender);
- virtual void SendPacket(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info);
-
- virtual std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
- size_t target_size_bytes);
+ void SendPacket(std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info) override;
+ std::vector<std::unique_ptr<RtpPacketToSend>> FetchFec() override;
+ std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
+ DataSize size) override;
uint16_t CurrentTransportSequenceNumber() const;
@@ -82,48 +84,54 @@ class PacketRouter : public RemoteBitrateObserver,
private:
void AddRembModuleCandidate(RtcpFeedbackSenderInterface* candidate_module,
bool media_sender)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
void MaybeRemoveRembModuleCandidate(
RtcpFeedbackSenderInterface* candidate_module,
- bool media_sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
- void UnsetActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
- void DetermineActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
- void AddSendRtpModuleToMap(RtpRtcp* rtp_module, uint32_t ssrc)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
+ bool media_sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
+ void UnsetActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
+ void DetermineActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
+ void AddSendRtpModuleToMap(RtpRtcpInterface* rtp_module, uint32_t ssrc)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
void RemoveSendRtpModuleFromMap(uint32_t ssrc)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
-
- rtc::CriticalSection modules_crit_;
- // Ssrc to RtpRtcp module;
- std::unordered_map<uint32_t, RtpRtcp*> send_modules_map_
- RTC_GUARDED_BY(modules_crit_);
- std::list<RtpRtcp*> send_modules_list_ RTC_GUARDED_BY(modules_crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
+
+ mutable Mutex modules_mutex_;
+ // Ssrc to RtpRtcpInterface module;
+ std::unordered_map<uint32_t, RtpRtcpInterface*> send_modules_map_
+ RTC_GUARDED_BY(modules_mutex_);
+ std::list<RtpRtcpInterface*> send_modules_list_
+ RTC_GUARDED_BY(modules_mutex_);
// The last module used to send media.
- RtpRtcp* last_send_module_ RTC_GUARDED_BY(modules_crit_);
+ RtpRtcpInterface* last_send_module_ RTC_GUARDED_BY(modules_mutex_);
// Rtcp modules of the rtp receivers.
std::vector<RtcpFeedbackSenderInterface*> rtcp_feedback_senders_
- RTC_GUARDED_BY(modules_crit_);
+ RTC_GUARDED_BY(modules_mutex_);
- // TODO(eladalon): remb_crit_ only ever held from one function, and it's not
+ // TODO(eladalon): remb_mutex_ only ever held from one function, and it's not
// clear if that function can actually be called from more than one thread.
- rtc::CriticalSection remb_crit_;
+ Mutex remb_mutex_;
// The last time a REMB was sent.
- int64_t last_remb_time_ms_ RTC_GUARDED_BY(remb_crit_);
- int64_t last_send_bitrate_bps_ RTC_GUARDED_BY(remb_crit_);
+ int64_t last_remb_time_ms_ RTC_GUARDED_BY(remb_mutex_);
+ int64_t last_send_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_);
// The last bitrate update.
- int64_t bitrate_bps_ RTC_GUARDED_BY(remb_crit_);
- int64_t max_bitrate_bps_ RTC_GUARDED_BY(remb_crit_);
+ int64_t bitrate_bps_ RTC_GUARDED_BY(remb_mutex_);
+ int64_t max_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_);
// Candidates for the REMB module can be RTP sender/receiver modules, with
// the sender modules taking precedence.
std::vector<RtcpFeedbackSenderInterface*> sender_remb_candidates_
- RTC_GUARDED_BY(modules_crit_);
+ RTC_GUARDED_BY(modules_mutex_);
std::vector<RtcpFeedbackSenderInterface*> receiver_remb_candidates_
- RTC_GUARDED_BY(modules_crit_);
+ RTC_GUARDED_BY(modules_mutex_);
RtcpFeedbackSenderInterface* active_remb_module_
- RTC_GUARDED_BY(modules_crit_);
+ RTC_GUARDED_BY(modules_mutex_);
+
+ uint64_t transport_seq_ RTC_GUARDED_BY(modules_mutex_);
- uint64_t transport_seq_ RTC_GUARDED_BY(modules_crit_);
+ // TODO(bugs.webrtc.org/10809): Replace lock with a sequence checker once the
+ // process thread is gone.
+ std::vector<std::unique_ptr<RtpPacketToSend>> pending_fec_packets_
+ RTC_GUARDED_BY(modules_mutex_);
RTC_DISALLOW_COPY_AND_ASSIGN(PacketRouter);
};
diff --git a/modules/pacing/packet_router_unittest.cc b/modules/pacing/packet_router_unittest.cc
index b8f16cb924..10cf98b3dd 100644
--- a/modules/pacing/packet_router_unittest.cc
+++ b/modules/pacing/packet_router_unittest.cc
@@ -68,7 +68,7 @@ class PacketRouterTest : public ::testing::Test {
};
TEST_F(PacketRouterTest, Sanity_NoModuleRegistered_GeneratePadding) {
- constexpr size_t bytes = 300;
+ constexpr DataSize bytes = DataSize::Bytes(300);
const PacedPacketInfo paced_info(1, kProbeMinProbes, kProbeMinBytes);
EXPECT_TRUE(packet_router_.GeneratePadding(bytes).empty());
@@ -101,12 +101,12 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesRtx) {
const uint16_t kSsrc1 = 1234;
const uint16_t kSsrc2 = 4567;
- NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
ON_CALL(rtp_1, RtxSendStatus()).WillByDefault(Return(kRtxRedundantPayloads));
ON_CALL(rtp_1, SSRC()).WillByDefault(Return(kSsrc1));
ON_CALL(rtp_1, SupportsPadding).WillByDefault(Return(false));
- NiceMock<MockRtpRtcp> rtp_2;
+ NiceMock<MockRtpRtcpInterface> rtp_2;
ON_CALL(rtp_2, RtxSendStatus()).WillByDefault(Return(kRtxOff));
ON_CALL(rtp_2, SSRC()).WillByDefault(Return(kSsrc2));
ON_CALL(rtp_2, SupportsPadding).WillByDefault(Return(true));
@@ -122,7 +122,8 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesRtx) {
return std::vector<std::unique_ptr<RtpPacketToSend>>(
kExpectedPaddingPackets);
});
- auto generated_padding = packet_router_.GeneratePadding(kPaddingSize);
+ auto generated_padding =
+ packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize));
EXPECT_EQ(generated_padding.size(), kExpectedPaddingPackets);
packet_router_.RemoveSendRtpModule(&rtp_1);
@@ -142,13 +143,13 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) {
kExpectedPaddingPackets);
};
- NiceMock<MockRtpRtcp> audio_module;
+ NiceMock<MockRtpRtcpInterface> audio_module;
ON_CALL(audio_module, RtxSendStatus()).WillByDefault(Return(kRtxOff));
ON_CALL(audio_module, SSRC()).WillByDefault(Return(kSsrc1));
ON_CALL(audio_module, SupportsPadding).WillByDefault(Return(true));
ON_CALL(audio_module, IsAudioConfigured).WillByDefault(Return(true));
- NiceMock<MockRtpRtcp> video_module;
+ NiceMock<MockRtpRtcpInterface> video_module;
ON_CALL(video_module, RtxSendStatus()).WillByDefault(Return(kRtxOff));
ON_CALL(video_module, SSRC()).WillByDefault(Return(kSsrc2));
ON_CALL(video_module, SupportsPadding).WillByDefault(Return(true));
@@ -159,7 +160,7 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) {
packet_router_.AddSendRtpModule(&audio_module, false);
EXPECT_CALL(audio_module, GeneratePadding(kPaddingSize))
.WillOnce(generate_padding);
- packet_router_.GeneratePadding(kPaddingSize);
+ packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize));
// Add the video module, this should now be prioritized since we cannot
// guarantee that audio packets will be included in the BWE.
@@ -167,7 +168,7 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) {
EXPECT_CALL(audio_module, GeneratePadding).Times(0);
EXPECT_CALL(video_module, GeneratePadding(kPaddingSize))
.WillOnce(generate_padding);
- packet_router_.GeneratePadding(kPaddingSize);
+ packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize));
// Remove and the add audio module again. Module order shouldn't matter;
// video should still be prioritized.
@@ -176,14 +177,14 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) {
EXPECT_CALL(audio_module, GeneratePadding).Times(0);
EXPECT_CALL(video_module, GeneratePadding(kPaddingSize))
.WillOnce(generate_padding);
- packet_router_.GeneratePadding(kPaddingSize);
+ packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize));
// Remove and the video module, we should fall back to padding on the
// audio module again.
packet_router_.RemoveSendRtpModule(&video_module);
EXPECT_CALL(audio_module, GeneratePadding(kPaddingSize))
.WillOnce(generate_padding);
- packet_router_.GeneratePadding(kPaddingSize);
+ packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize));
packet_router_.RemoveSendRtpModule(&audio_module);
}
@@ -194,7 +195,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
const uint16_t kSsrc3 = 8901;
// First two rtp modules send media and have rtx.
- NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1));
EXPECT_CALL(rtp_1, SupportsPadding).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_1, SupportsRtxPayloadPadding).WillRepeatedly(Return(true));
@@ -205,7 +206,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
::testing::Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc1)), _))
.WillRepeatedly(Return(true));
- NiceMock<MockRtpRtcp> rtp_2;
+ NiceMock<MockRtpRtcpInterface> rtp_2;
EXPECT_CALL(rtp_2, SSRC()).WillRepeatedly(Return(kSsrc2));
EXPECT_CALL(rtp_2, SupportsPadding).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_2, SupportsRtxPayloadPadding).WillRepeatedly(Return(true));
@@ -217,7 +218,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
.WillRepeatedly(Return(true));
// Third module is sending media, but does not support rtx.
- NiceMock<MockRtpRtcp> rtp_3;
+ NiceMock<MockRtpRtcpInterface> rtp_3;
EXPECT_CALL(rtp_3, SSRC()).WillRepeatedly(Return(kSsrc3));
EXPECT_CALL(rtp_3, SupportsPadding).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_3, SupportsRtxPayloadPadding).WillRepeatedly(Return(false));
@@ -243,7 +244,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
packets.push_back(BuildRtpPacket(kSsrc2));
return packets;
});
- packet_router_.GeneratePadding(kPaddingBytes);
+ packet_router_.GeneratePadding(DataSize::Bytes(kPaddingBytes));
// Send media on first module. Padding should be sent on that module.
packet_router_.SendPacket(BuildRtpPacket(kSsrc1), PacedPacketInfo());
@@ -255,7 +256,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
packets.push_back(BuildRtpPacket(kSsrc1));
return packets;
});
- packet_router_.GeneratePadding(kPaddingBytes);
+ packet_router_.GeneratePadding(DataSize::Bytes(kPaddingBytes));
// Send media on second module. Padding should be sent there.
packet_router_.SendPacket(BuildRtpPacket(kSsrc2), PacedPacketInfo());
@@ -265,7 +266,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
packet_router_.RemoveSendRtpModule(&rtp_2);
// Send on and then remove all remaining modules.
- RtpRtcp* last_send_module;
+ RtpRtcpInterface* last_send_module;
EXPECT_CALL(rtp_1, GeneratePadding(kPaddingBytes))
.Times(1)
.WillOnce([&](size_t target_size_bytes) {
@@ -285,7 +286,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
for (int i = 0; i < 2; ++i) {
last_send_module = nullptr;
- packet_router_.GeneratePadding(kPaddingBytes);
+ packet_router_.GeneratePadding(DataSize::Bytes(kPaddingBytes));
EXPECT_NE(last_send_module, nullptr);
packet_router_.RemoveSendRtpModule(last_send_module);
}
@@ -297,7 +298,7 @@ TEST_F(PacketRouterTest, AllocatesTransportSequenceNumbers) {
const uint16_t kSsrc1 = 1234;
PacketRouter packet_router(kStartSeq - 1);
- NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1));
EXPECT_CALL(rtp_1, TrySendPacket).WillRepeatedly(Return(true));
packet_router.AddSendRtpModule(&rtp_1, false);
@@ -315,8 +316,8 @@ TEST_F(PacketRouterTest, AllocatesTransportSequenceNumbers) {
}
TEST_F(PacketRouterTest, SendTransportFeedback) {
- NiceMock<MockRtpRtcp> rtp_1;
- NiceMock<MockRtpRtcp> rtp_2;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_2;
ON_CALL(rtp_1, RTCP()).WillByDefault(Return(RtcpMode::kCompound));
ON_CALL(rtp_2, RTCP()).WillByDefault(Return(RtcpMode::kCompound));
@@ -338,7 +339,7 @@ TEST_F(PacketRouterTest, SendTransportFeedback) {
TEST_F(PacketRouterTest, SendPacketWithoutTransportSequenceNumbers) {
const uint16_t kSsrc1 = 1234;
- NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
ON_CALL(rtp_1, SendingMedia).WillByDefault(Return(true));
ON_CALL(rtp_1, SSRC).WillByDefault(Return(kSsrc1));
packet_router_.AddSendRtpModule(&rtp_1, false);
@@ -361,8 +362,8 @@ TEST_F(PacketRouterTest, SendPacketWithoutTransportSequenceNumbers) {
}
TEST_F(PacketRouterTest, SendPacketAssignsTransportSequenceNumbers) {
- NiceMock<MockRtpRtcp> rtp_1;
- NiceMock<MockRtpRtcp> rtp_2;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_2;
const uint16_t kSsrc1 = 1234;
const uint16_t kSsrc2 = 2345;
@@ -405,8 +406,9 @@ TEST_F(PacketRouterTest, SendPacketAssignsTransportSequenceNumbers) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(PacketRouterTest, DoubleRegistrationOfSendModuleDisallowed) {
- NiceMock<MockRtpRtcp> module;
+using PacketRouterDeathTest = PacketRouterTest;
+TEST_F(PacketRouterDeathTest, DoubleRegistrationOfSendModuleDisallowed) {
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = false; // Value irrelevant.
packet_router_.AddSendRtpModule(&module, remb_candidate);
@@ -416,8 +418,8 @@ TEST_F(PacketRouterTest, DoubleRegistrationOfSendModuleDisallowed) {
packet_router_.RemoveSendRtpModule(&module);
}
-TEST_F(PacketRouterTest, DoubleRegistrationOfReceiveModuleDisallowed) {
- NiceMock<MockRtpRtcp> module;
+TEST_F(PacketRouterDeathTest, DoubleRegistrationOfReceiveModuleDisallowed) {
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = false; // Value irrelevant.
packet_router_.AddReceiveRtpModule(&module, remb_candidate);
@@ -427,14 +429,14 @@ TEST_F(PacketRouterTest, DoubleRegistrationOfReceiveModuleDisallowed) {
packet_router_.RemoveReceiveRtpModule(&module);
}
-TEST_F(PacketRouterTest, RemovalOfNeverAddedSendModuleDisallowed) {
- NiceMock<MockRtpRtcp> module;
+TEST_F(PacketRouterDeathTest, RemovalOfNeverAddedSendModuleDisallowed) {
+ NiceMock<MockRtpRtcpInterface> module;
EXPECT_DEATH(packet_router_.RemoveSendRtpModule(&module), "");
}
-TEST_F(PacketRouterTest, RemovalOfNeverAddedReceiveModuleDisallowed) {
- NiceMock<MockRtpRtcp> module;
+TEST_F(PacketRouterDeathTest, RemovalOfNeverAddedReceiveModuleDisallowed) {
+ NiceMock<MockRtpRtcpInterface> module;
EXPECT_DEATH(packet_router_.RemoveReceiveRtpModule(&module), "");
}
@@ -442,7 +444,7 @@ TEST_F(PacketRouterTest, RemovalOfNeverAddedReceiveModuleDisallowed) {
TEST(PacketRouterRembTest, LowerEstimateToSendRemb) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp, true);
@@ -468,7 +470,7 @@ TEST(PacketRouterRembTest, LowerEstimateToSendRemb) {
TEST(PacketRouterRembTest, VerifyIncreasingAndDecreasing) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp, true);
@@ -493,7 +495,7 @@ TEST(PacketRouterRembTest, VerifyIncreasingAndDecreasing) {
TEST(PacketRouterRembTest, NoRembForIncreasedBitrate) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp, true);
@@ -521,8 +523,8 @@ TEST(PacketRouterRembTest, NoRembForIncreasedBitrate) {
TEST(PacketRouterRembTest, ChangeSendRtpModule) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp_send;
- NiceMock<MockRtpRtcp> rtp_recv;
+ NiceMock<MockRtpRtcpInterface> rtp_send;
+ NiceMock<MockRtpRtcpInterface> rtp_recv;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp_send, true);
packet_router.AddReceiveRtpModule(&rtp_recv, true);
@@ -556,7 +558,7 @@ TEST(PacketRouterRembTest, ChangeSendRtpModule) {
TEST(PacketRouterRembTest, OnlyOneRembForRepeatedOnReceiveBitrateChanged) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp, true);
@@ -585,7 +587,7 @@ TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateLimitsSetRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -608,7 +610,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -630,7 +632,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -652,7 +654,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -674,7 +676,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -697,7 +699,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -719,7 +721,7 @@ TEST(PacketRouterRembTest,
// packet on this one.
TEST(PacketRouterRembTest, NoSendingRtpModule) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddReceiveRtpModule(&rtp, true);
@@ -745,7 +747,7 @@ TEST(PacketRouterRembTest, NoSendingRtpModule) {
TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> module;
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = false;
@@ -764,7 +766,7 @@ TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) {
TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> module;
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = true;
@@ -783,7 +785,7 @@ TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) {
TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> module;
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = false;
@@ -802,7 +804,7 @@ TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) {
TEST(PacketRouterRembTest, CandidateReceiveRtpModuleUsedForRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> module;
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = true;
@@ -822,8 +824,8 @@ TEST(PacketRouterRembTest,
SendCandidatePreferredOverReceiveCandidate_SendModuleAddedFirst) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> send_module;
- NiceMock<MockRtpRtcp> receive_module;
+ NiceMock<MockRtpRtcpInterface> send_module;
+ NiceMock<MockRtpRtcpInterface> receive_module;
constexpr bool remb_candidate = true;
@@ -850,8 +852,8 @@ TEST(PacketRouterRembTest,
SendCandidatePreferredOverReceiveCandidate_ReceiveModuleAddedFirst) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> send_module;
- NiceMock<MockRtpRtcp> receive_module;
+ NiceMock<MockRtpRtcpInterface> send_module;
+ NiceMock<MockRtpRtcpInterface> receive_module;
constexpr bool remb_candidate = true;
@@ -877,8 +879,8 @@ TEST(PacketRouterRembTest,
TEST(PacketRouterRembTest, ReceiveModuleTakesOverWhenLastSendModuleRemoved) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> send_module;
- NiceMock<MockRtpRtcp> receive_module;
+ NiceMock<MockRtpRtcpInterface> send_module;
+ NiceMock<MockRtpRtcpInterface> receive_module;
constexpr bool remb_candidate = true;
diff --git a/modules/pacing/task_queue_paced_sender.cc b/modules/pacing/task_queue_paced_sender.cc
index a4ce9fe9d6..db748f30b4 100644
--- a/modules/pacing/task_queue_paced_sender.cc
+++ b/modules/pacing/task_queue_paced_sender.cc
@@ -17,6 +17,7 @@
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/task_utils/to_queued_task.h"
+#include "rtc_base/trace_event.h"
namespace webrtc {
namespace {
@@ -34,11 +35,12 @@ TaskQueuePacedSender::TaskQueuePacedSender(
PacketRouter* packet_router,
RtcEventLog* event_log,
const WebRtcKeyValueConfig* field_trials,
- TaskQueueFactory* task_queue_factory)
+ TaskQueueFactory* task_queue_factory,
+ TimeDelta hold_back_window)
: clock_(clock),
- packet_router_(packet_router),
+ hold_back_window_(hold_back_window),
pacing_controller_(clock,
- static_cast<PacingController::PacketSender*>(this),
+ packet_router,
event_log,
field_trials,
PacingController::ProcessMode::kDynamic),
@@ -120,6 +122,17 @@ void TaskQueuePacedSender::SetPacingRates(DataRate pacing_rate,
void TaskQueuePacedSender::EnqueuePackets(
std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+#if RTC_TRACE_EVENTS_ENABLED
+ TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "TaskQueuePacedSender::EnqueuePackets");
+ for (auto& packet : packets) {
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "TaskQueuePacedSender::EnqueuePackets::Loop",
+ "sequence_number", packet->SequenceNumber(), "rtp_timestamp",
+ packet->Timestamp());
+ }
+#endif
+
task_queue_.PostTask([this, packets_ = std::move(packets)]() mutable {
RTC_DCHECK_RUN_ON(&task_queue_);
for (auto& packet : packets_) {
@@ -174,6 +187,11 @@ TimeDelta TaskQueuePacedSender::OldestPacketWaitTime() const {
return GetStats().oldest_packet_wait_time;
}
+void TaskQueuePacedSender::OnStatsUpdated(const Stats& stats) {
+ MutexLock lock(&stats_mutex_);
+ current_stats_ = stats;
+}
+
void TaskQueuePacedSender::MaybeProcessPackets(
Timestamp scheduled_process_time) {
RTC_DCHECK_RUN_ON(&task_queue_);
@@ -200,8 +218,10 @@ void TaskQueuePacedSender::MaybeProcessPackets(
next_process_time = pacing_controller_.NextSendTime();
}
- next_process_time =
- std::max(now + PacingController::kMinSleepTime, next_process_time);
+ const TimeDelta min_sleep = pacing_controller_.IsProbing()
+ ? PacingController::kMinSleepTime
+ : hold_back_window_;
+ next_process_time = std::max(now + min_sleep, next_process_time);
TimeDelta sleep_time = next_process_time - now;
if (next_process_time_.IsMinusInfinity() ||
@@ -217,59 +237,69 @@ void TaskQueuePacedSender::MaybeProcessPackets(
MaybeUpdateStats(false);
}
-std::vector<std::unique_ptr<RtpPacketToSend>>
-TaskQueuePacedSender::GeneratePadding(DataSize size) {
- return packet_router_->GeneratePadding(size.bytes());
-}
-
-void TaskQueuePacedSender::SendRtpPacket(
- std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) {
- packet_router_->SendPacket(std::move(packet), cluster_info);
-}
-
void TaskQueuePacedSender::MaybeUpdateStats(bool is_scheduled_call) {
if (is_shutdown_) {
+ if (is_scheduled_call) {
+ stats_update_scheduled_ = false;
+ }
return;
}
Timestamp now = clock_->CurrentTime();
- if (!is_scheduled_call &&
- now - last_stats_time_ < kMinTimeBetweenStatsUpdates) {
- // Too frequent unscheduled stats update, return early.
- return;
+ if (is_scheduled_call) {
+ // Allow scheduled task to process packets to clear up an remaining debt
+ // level in an otherwise empty queue.
+ pacing_controller_.ProcessPackets();
+ } else {
+ if (now - last_stats_time_ < kMinTimeBetweenStatsUpdates) {
+ // Too frequent unscheduled stats update, return early.
+ return;
+ }
}
- rtc::CritScope cs(&stats_crit_);
- current_stats_.expected_queue_time = pacing_controller_.ExpectedQueueTime();
- current_stats_.first_sent_packet_time =
- pacing_controller_.FirstSentPacketTime();
- current_stats_.oldest_packet_wait_time =
- pacing_controller_.OldestPacketWaitTime();
- current_stats_.queue_size = pacing_controller_.QueueSizeData();
+ Stats new_stats;
+ new_stats.expected_queue_time = pacing_controller_.ExpectedQueueTime();
+ new_stats.first_sent_packet_time = pacing_controller_.FirstSentPacketTime();
+ new_stats.oldest_packet_wait_time = pacing_controller_.OldestPacketWaitTime();
+ new_stats.queue_size = pacing_controller_.QueueSizeData();
+ OnStatsUpdated(new_stats);
+
last_stats_time_ = now;
bool pacer_drained = pacing_controller_.QueueSizePackets() == 0 &&
pacing_controller_.CurrentBufferLevel().IsZero();
// If there's anything interesting to get from the pacer and this is a
- // scheduled call (no scheduled call in flight), post a new scheduled stats
+ // scheduled call (or no scheduled call in flight), post a new scheduled stats
// update.
- if (!pacer_drained && (is_scheduled_call || !stats_update_scheduled_)) {
- task_queue_.PostDelayedTask(
- [this]() {
- RTC_DCHECK_RUN_ON(&task_queue_);
- MaybeUpdateStats(true);
- },
- kMaxTimeBetweenStatsUpdates.ms<uint32_t>());
- stats_update_scheduled_ = true;
- } else {
+ if (!pacer_drained) {
+ if (!stats_update_scheduled_) {
+ // There is no pending delayed task to update stats, add one.
+ // Treat this call as being scheduled in order to bootstrap scheduling
+ // loop.
+ stats_update_scheduled_ = true;
+ is_scheduled_call = true;
+ }
+
+ // Only if on the scheduled call loop do we want to schedule a new delayed
+ // task.
+ if (is_scheduled_call) {
+ task_queue_.PostDelayedTask(
+ [this]() {
+ RTC_DCHECK_RUN_ON(&task_queue_);
+ MaybeUpdateStats(true);
+ },
+ kMaxTimeBetweenStatsUpdates.ms<uint32_t>());
+ }
+ } else if (is_scheduled_call) {
+ // This is a scheduled call, signing out since there's nothing interesting
+ // left to check.
stats_update_scheduled_ = false;
}
}
TaskQueuePacedSender::Stats TaskQueuePacedSender::GetStats() const {
- rtc::CritScope cs(&stats_crit_);
+ MutexLock lock(&stats_mutex_);
return current_stats_;
}
diff --git a/modules/pacing/task_queue_paced_sender.h b/modules/pacing/task_queue_paced_sender.h
index 8b47f5ee3d..ba4f4667b7 100644
--- a/modules/pacing/task_queue_paced_sender.h
+++ b/modules/pacing/task_queue_paced_sender.h
@@ -29,7 +29,7 @@
#include "modules/pacing/packet_router.h"
#include "modules/pacing/rtp_packet_pacer.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_annotations.h"
@@ -38,15 +38,20 @@ namespace webrtc {
class Clock;
class RtcEventLog;
-class TaskQueuePacedSender : public RtpPacketPacer,
- public RtpPacketSender,
- private PacingController::PacketSender {
+class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender {
public:
- TaskQueuePacedSender(Clock* clock,
- PacketRouter* packet_router,
- RtcEventLog* event_log,
- const WebRtcKeyValueConfig* field_trials,
- TaskQueueFactory* task_queue_factory);
+ // The |hold_back_window| parameter sets a lower bound on time to sleep if
+ // there is currently a pacer queue and packets can't immediately be
+ // processed. Increasing this reduces thread wakeups at the expense of higher
+ // latency.
+ // TODO(bugs.webrtc.org/10809): Remove default value for hold_back_window.
+ TaskQueuePacedSender(
+ Clock* clock,
+ PacketRouter* packet_router,
+ RtcEventLog* event_log,
+ const WebRtcKeyValueConfig* field_trials,
+ TaskQueueFactory* task_queue_factory,
+ TimeDelta hold_back_window = PacingController::kMinSleepTime);
~TaskQueuePacedSender() override;
@@ -99,7 +104,8 @@ class TaskQueuePacedSender : public RtpPacketPacer,
// specified by SetPacingRates() if needed to achieve this goal.
void SetQueueTimeLimit(TimeDelta limit) override;
- private:
+ protected:
+ // Exposed as protected for test.
struct Stats {
Stats()
: oldest_packet_wait_time(TimeDelta::Zero()),
@@ -110,7 +116,9 @@ class TaskQueuePacedSender : public RtpPacketPacer,
TimeDelta expected_queue_time;
absl::optional<Timestamp> first_sent_packet_time;
};
+ virtual void OnStatsUpdated(const Stats& stats);
+ private:
// Check if it is time to send packets, or schedule a delayed task if not.
// Use Timestamp::MinusInfinity() to indicate that this call has _not_
// been scheduled by the pacing controller. If this is the case, check if
@@ -118,20 +126,11 @@ class TaskQueuePacedSender : public RtpPacketPacer,
// method again with desired (finite) scheduled process time.
void MaybeProcessPackets(Timestamp scheduled_process_time);
- // Methods implementing PacedSenderController:PacketSender.
-
- void SendRtpPacket(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) override
- RTC_RUN_ON(task_queue_);
-
- std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
- DataSize size) override RTC_RUN_ON(task_queue_);
-
void MaybeUpdateStats(bool is_scheduled_call) RTC_RUN_ON(task_queue_);
Stats GetStats() const;
Clock* const clock_;
- PacketRouter* const packet_router_ RTC_GUARDED_BY(task_queue_);
+ const TimeDelta hold_back_window_;
PacingController pacing_controller_ RTC_GUARDED_BY(task_queue_);
// We want only one (valid) delayed process task in flight at a time.
@@ -156,8 +155,8 @@ class TaskQueuePacedSender : public RtpPacketPacer,
// never drain.
bool is_shutdown_ RTC_GUARDED_BY(task_queue_);
- rtc::CriticalSection stats_crit_;
- Stats current_stats_ RTC_GUARDED_BY(stats_crit_);
+ mutable Mutex stats_mutex_;
+ Stats current_stats_ RTC_GUARDED_BY(stats_mutex_);
rtc::TaskQueue task_queue_;
};
diff --git a/modules/pacing/task_queue_paced_sender_unittest.cc b/modules/pacing/task_queue_paced_sender_unittest.cc
index ba2aad21ff..b02f387768 100644
--- a/modules/pacing/task_queue_paced_sender_unittest.cc
+++ b/modules/pacing/task_queue_paced_sender_unittest.cc
@@ -24,6 +24,7 @@
#include "test/time_controller/simulated_time_controller.h"
using ::testing::_;
+using ::testing::AtLeast;
using ::testing::Return;
using ::testing::SaveArg;
@@ -37,28 +38,56 @@ constexpr size_t kDefaultPacketSize = 1234;
class MockPacketRouter : public PacketRouter {
public:
- MOCK_METHOD2(SendPacket,
- void(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info));
- MOCK_METHOD1(
- GeneratePadding,
- std::vector<std::unique_ptr<RtpPacketToSend>>(size_t target_size_bytes));
+ MOCK_METHOD(void,
+ SendPacket,
+ (std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info),
+ (override));
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ FetchFec,
+ (),
+ (override));
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ GeneratePadding,
+ (DataSize target_size),
+ (override));
+};
+
+class StatsUpdateObserver {
+ public:
+ StatsUpdateObserver() = default;
+ virtual ~StatsUpdateObserver() = default;
+
+ virtual void OnStatsUpdated() = 0;
+};
+
+class TaskQueuePacedSenderForTest : public TaskQueuePacedSender {
+ public:
+ TaskQueuePacedSenderForTest(
+ Clock* clock,
+ PacketRouter* packet_router,
+ RtcEventLog* event_log,
+ const WebRtcKeyValueConfig* field_trials,
+ TaskQueueFactory* task_queue_factory,
+ TimeDelta hold_back_window = PacingController::kMinSleepTime)
+ : TaskQueuePacedSender(clock,
+ packet_router,
+ event_log,
+ field_trials,
+ task_queue_factory,
+ hold_back_window) {}
+
+ void OnStatsUpdated(const Stats& stats) override {
+ ++num_stats_updates_;
+ TaskQueuePacedSender::OnStatsUpdated(stats);
+ }
+
+ size_t num_stats_updates_ = 0;
};
} // namespace
namespace test {
-class TaskQueuePacedSenderTest : public ::testing::Test {
- public:
- TaskQueuePacedSenderTest()
- : time_controller_(Timestamp::Millis(1234)),
- pacer_(time_controller_.GetClock(),
- &packet_router_,
- /*event_log=*/nullptr,
- /*field_trials=*/nullptr,
- time_controller_.GetTaskQueueFactory()) {}
-
- protected:
std::unique_ptr<RtpPacketToSend> BuildRtpPacket(RtpPacketMediaType type) {
auto packet = std::make_unique<RtpPacketToSend>(nullptr);
packet->set_packet_type(type);
@@ -92,109 +121,290 @@ class TaskQueuePacedSenderTest : public ::testing::Test {
return packets;
}
- Timestamp CurrentTime() { return time_controller_.GetClock()->CurrentTime(); }
+ TEST(TaskQueuePacedSenderTest, PacesPackets) {
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ PacingController::kMinSleepTime);
- GlobalSimulatedTimeController time_controller_;
- MockPacketRouter packet_router_;
- TaskQueuePacedSender pacer_;
-};
+ // Insert a number of packets, covering one second.
+ static constexpr size_t kPacketsToSend = 42;
+ pacer.SetPacingRates(
+ DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsToSend),
+ DataRate::Zero());
+ pacer.EnqueuePackets(
+ GeneratePackets(RtpPacketMediaType::kVideo, kPacketsToSend));
+
+ // Expect all of them to be sent.
+ size_t packets_sent = 0;
+ Timestamp end_time = Timestamp::PlusInfinity();
+ EXPECT_CALL(packet_router, SendPacket)
+ .WillRepeatedly([&](std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info) {
+ ++packets_sent;
+ if (packets_sent == kPacketsToSend) {
+ end_time = time_controller.GetClock()->CurrentTime();
+ }
+ });
+
+ const Timestamp start_time = time_controller.GetClock()->CurrentTime();
+
+ // Packets should be sent over a period of close to 1s. Expect a little
+ // lower than this since initial probing is a bit quicker.
+ time_controller.AdvanceTime(TimeDelta::Seconds(1));
+ EXPECT_EQ(packets_sent, kPacketsToSend);
+ ASSERT_TRUE(end_time.IsFinite());
+ EXPECT_NEAR((end_time - start_time).ms<double>(), 1000.0, 50.0);
+ }
+
+ TEST(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) {
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ PacingController::kMinSleepTime);
+
+ // Insert a number of packets to be sent 200ms apart.
+ const size_t kPacketsPerSecond = 5;
+ const DataRate kPacingRate =
+ DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsPerSecond);
+ pacer.SetPacingRates(kPacingRate, DataRate::Zero());
+
+ // Send some initial packets to be rid of any probes.
+ EXPECT_CALL(packet_router, SendPacket).Times(kPacketsPerSecond);
+ pacer.EnqueuePackets(
+ GeneratePackets(RtpPacketMediaType::kVideo, kPacketsPerSecond));
+ time_controller.AdvanceTime(TimeDelta::Seconds(1));
+
+ // Insert three packets, and record send time of each of them.
+ // After the second packet is sent, double the send rate so we can
+ // check the third packets is sent after half the wait time.
+ Timestamp first_packet_time = Timestamp::MinusInfinity();
+ Timestamp second_packet_time = Timestamp::MinusInfinity();
+ Timestamp third_packet_time = Timestamp::MinusInfinity();
+
+ EXPECT_CALL(packet_router, SendPacket)
+ .Times(3)
+ .WillRepeatedly([&](std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info) {
+ if (first_packet_time.IsInfinite()) {
+ first_packet_time = time_controller.GetClock()->CurrentTime();
+ } else if (second_packet_time.IsInfinite()) {
+ second_packet_time = time_controller.GetClock()->CurrentTime();
+ pacer.SetPacingRates(2 * kPacingRate, DataRate::Zero());
+ } else {
+ third_packet_time = time_controller.GetClock()->CurrentTime();
+ }
+ });
+
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 3));
+ time_controller.AdvanceTime(TimeDelta::Millis(500));
+ ASSERT_TRUE(third_packet_time.IsFinite());
+ EXPECT_NEAR((second_packet_time - first_packet_time).ms<double>(), 200.0,
+ 1.0);
+ EXPECT_NEAR((third_packet_time - second_packet_time).ms<double>(), 100.0,
+ 1.0);
+ }
+
+ TEST(TaskQueuePacedSenderTest, SendsAudioImmediately) {
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ PacingController::kMinSleepTime);
+
+ const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125);
+ const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
+ const TimeDelta kPacketPacingTime = kPacketSize / kPacingDataRate;
+
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+
+ // Add some initial video packets, only one should be sent.
+ EXPECT_CALL(packet_router, SendPacket);
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
+
+ // Advance time, but still before next packet should be sent.
+ time_controller.AdvanceTime(kPacketPacingTime / 2);
+
+ // Insert an audio packet, it should be sent immediately.
+ EXPECT_CALL(packet_router, SendPacket);
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kAudio, 1));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
+ }
-TEST_F(TaskQueuePacedSenderTest, PacesPackets) {
- // Insert a number of packets, covering one second.
- static constexpr size_t kPacketsToSend = 42;
- pacer_.SetPacingRates(
- DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsToSend),
- DataRate::Zero());
- pacer_.EnqueuePackets(
- GeneratePackets(RtpPacketMediaType::kVideo, kPacketsToSend));
-
- // Expect all of them to be sent.
- size_t packets_sent = 0;
- Timestamp end_time = Timestamp::PlusInfinity();
- EXPECT_CALL(packet_router_, SendPacket)
- .WillRepeatedly([&](std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) {
- ++packets_sent;
- if (packets_sent == kPacketsToSend) {
- end_time = time_controller_.GetClock()->CurrentTime();
- }
- });
-
- const Timestamp start_time = time_controller_.GetClock()->CurrentTime();
-
- // Packets should be sent over a period of close to 1s. Expect a little lower
- // than this since initial probing is a bit quicker.
- time_controller_.AdvanceTime(TimeDelta::Seconds(1));
- EXPECT_EQ(packets_sent, kPacketsToSend);
- ASSERT_TRUE(end_time.IsFinite());
- EXPECT_NEAR((end_time - start_time).ms<double>(), 1000.0, 50.0);
-}
-
-TEST_F(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) {
- // Insert a number of packets to be sent 200ms apart.
- const size_t kPacketsPerSecond = 5;
- const DataRate kPacingRate =
- DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsPerSecond);
- pacer_.SetPacingRates(kPacingRate, DataRate::Zero());
-
- // Send some initial packets to be rid of any probes.
- EXPECT_CALL(packet_router_, SendPacket).Times(kPacketsPerSecond);
- pacer_.EnqueuePackets(
- GeneratePackets(RtpPacketMediaType::kVideo, kPacketsPerSecond));
- time_controller_.AdvanceTime(TimeDelta::Seconds(1));
-
- // Insert three packets, and record send time of each of them.
- // After the second packet is sent, double the send rate so we can
- // check the third packets is sent after half the wait time.
- Timestamp first_packet_time = Timestamp::MinusInfinity();
- Timestamp second_packet_time = Timestamp::MinusInfinity();
- Timestamp third_packet_time = Timestamp::MinusInfinity();
-
- EXPECT_CALL(packet_router_, SendPacket)
- .Times(3)
- .WillRepeatedly([&](std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) {
- if (first_packet_time.IsInfinite()) {
- first_packet_time = CurrentTime();
- } else if (second_packet_time.IsInfinite()) {
- second_packet_time = CurrentTime();
- pacer_.SetPacingRates(2 * kPacingRate, DataRate::Zero());
- } else {
- third_packet_time = CurrentTime();
- }
- });
-
- pacer_.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 3));
- time_controller_.AdvanceTime(TimeDelta::Millis(500));
- ASSERT_TRUE(third_packet_time.IsFinite());
- EXPECT_NEAR((second_packet_time - first_packet_time).ms<double>(), 200.0,
- 1.0);
- EXPECT_NEAR((third_packet_time - second_packet_time).ms<double>(), 100.0,
- 1.0);
-}
-
-TEST_F(TaskQueuePacedSenderTest, SendsAudioImmediately) {
- const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125);
- const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
- const TimeDelta kPacketPacingTime = kPacketSize / kPacingDataRate;
-
- pacer_.SetPacingRates(kPacingDataRate, DataRate::Zero());
-
- // Add some initial video packets, only one should be sent.
- EXPECT_CALL(packet_router_, SendPacket);
- pacer_.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
- time_controller_.AdvanceTime(TimeDelta::Zero());
- ::testing::Mock::VerifyAndClearExpectations(&packet_router_);
-
- // Advance time, but still before next packet should be sent.
- time_controller_.AdvanceTime(kPacketPacingTime / 2);
-
- // Insert an audio packet, it should be sent immediately.
- EXPECT_CALL(packet_router_, SendPacket);
- pacer_.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kAudio, 1));
- time_controller_.AdvanceTime(TimeDelta::Zero());
- ::testing::Mock::VerifyAndClearExpectations(&packet_router_);
-}
+ TEST(TaskQueuePacedSenderTest, SleepsDuringCoalscingWindow) {
+ const TimeDelta kCoalescingWindow = TimeDelta::Millis(5);
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ kCoalescingWindow);
+
+ // Set rates so one packet adds one ms of buffer level.
+ const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
+ const TimeDelta kPacketPacingTime = TimeDelta::Millis(1);
+ const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime;
+
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+
+ // Add 10 packets. The first should be sent immediately since the buffers
+ // are clear.
+ EXPECT_CALL(packet_router, SendPacket);
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
+
+ // Advance time to 1ms before the coalescing window ends. No packets should
+ // be sent.
+ EXPECT_CALL(packet_router, SendPacket).Times(0);
+ time_controller.AdvanceTime(kCoalescingWindow - TimeDelta::Millis(1));
+
+ // Advance time to where coalescing window ends. All packets that should
+ // have been sent up til now will be sent.
+ EXPECT_CALL(packet_router, SendPacket).Times(5);
+ time_controller.AdvanceTime(TimeDelta::Millis(1));
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
+ }
+
+ TEST(TaskQueuePacedSenderTest, ProbingOverridesCoalescingWindow) {
+ const TimeDelta kCoalescingWindow = TimeDelta::Millis(5);
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ kCoalescingWindow);
+
+ // Set rates so one packet adds one ms of buffer level.
+ const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
+ const TimeDelta kPacketPacingTime = TimeDelta::Millis(1);
+ const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime;
+
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+
+ // Add 10 packets. The first should be sent immediately since the buffers
+ // are clear. This will also trigger the probe to start.
+ EXPECT_CALL(packet_router, SendPacket).Times(AtLeast(1));
+ pacer.CreateProbeCluster(kPacingDataRate * 2, 17);
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
+
+ // Advance time to 1ms before the coalescing window ends. Packets should be
+ // flying.
+ EXPECT_CALL(packet_router, SendPacket).Times(AtLeast(1));
+ time_controller.AdvanceTime(kCoalescingWindow - TimeDelta::Millis(1));
+ }
+
+ TEST(TaskQueuePacedSenderTest, RespectedMinTimeBetweenStatsUpdates) {
+ const TimeDelta kCoalescingWindow = TimeDelta::Millis(5);
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ kCoalescingWindow);
+ const DataRate kPacingDataRate = DataRate::KilobitsPerSec(300);
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+
+ const TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1);
+
+ // Nothing inserted, no stats updates yet.
+ EXPECT_EQ(pacer.num_stats_updates_, 0u);
+
+ // Insert one packet, stats should be updated.
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 1));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ EXPECT_EQ(pacer.num_stats_updates_, 1u);
+
+ // Advance time half of the min stats update interval, and trigger a
+ // refresh - stats should not be updated yet.
+ time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates / 2);
+ pacer.EnqueuePackets({});
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ EXPECT_EQ(pacer.num_stats_updates_, 1u);
+
+ // Advance time the next half, now stats update is triggered.
+ time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates / 2);
+ pacer.EnqueuePackets({});
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ EXPECT_EQ(pacer.num_stats_updates_, 2u);
+ }
+
+ TEST(TaskQueuePacedSenderTest, ThrottlesStatsUpdates) {
+ const TimeDelta kCoalescingWindow = TimeDelta::Millis(5);
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ kCoalescingWindow);
+
+ // Set rates so one packet adds 10ms of buffer level.
+ const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
+ const TimeDelta kPacketPacingTime = TimeDelta::Millis(10);
+ const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime;
+ const TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1);
+ const TimeDelta kMaxTimeBetweenStatsUpdates = TimeDelta::Millis(33);
+
+ // Nothing inserted, no stats updates yet.
+ size_t num_expected_stats_updates = 0;
+ EXPECT_EQ(pacer.num_stats_updates_, num_expected_stats_updates);
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+ time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates);
+ // Updating pacing rates refreshes stats.
+ EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+
+ // Record time when we insert first packet, this triggers the scheduled
+ // stats updating.
+ Clock* const clock = time_controller.GetClock();
+ const Timestamp start_time = clock->CurrentTime();
+
+ while (clock->CurrentTime() - start_time <=
+ kMaxTimeBetweenStatsUpdates - kPacketPacingTime) {
+ // Enqueue packet, expect stats update.
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 1));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+
+ // Advance time to halfway through pacing time, expect another stats
+ // update.
+ time_controller.AdvanceTime(kPacketPacingTime / 2);
+ pacer.EnqueuePackets({});
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+
+ // Advance time the rest of the way.
+ time_controller.AdvanceTime(kPacketPacingTime / 2);
+ }
+
+ // At this point, the pace queue is drained so there is no more intersting
+ // update to be made - but there is still as schduled task that should run
+ // |kMaxTimeBetweenStatsUpdates| after the first update.
+ time_controller.AdvanceTime(start_time + kMaxTimeBetweenStatsUpdates -
+ clock->CurrentTime());
+ EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+
+ // Advance time a significant time - don't expect any more calls as stats
+ // updating does not happen when queue is drained.
+ time_controller.AdvanceTime(TimeDelta::Millis(400));
+ EXPECT_EQ(pacer.num_stats_updates_, num_expected_stats_updates);
+ }
} // namespace test
} // namespace webrtc
diff --git a/modules/remote_bitrate_estimator/BUILD.gn b/modules/remote_bitrate_estimator/BUILD.gn
index d7b0397ea5..0b2d2cdeb1 100644
--- a/modules/remote_bitrate_estimator/BUILD.gn
+++ b/modules/remote_bitrate_estimator/BUILD.gn
@@ -56,9 +56,12 @@ rtc_library("remote_bitrate_estimator") {
"../../rtc_base:rtc_numerics",
"../../rtc_base:safe_minmax",
"../../rtc_base/experiments:field_trial_parser",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -76,6 +79,8 @@ if (!build_with_chromium) {
"../../rtc_base:rtc_base_approved",
"../../test:rtp_test_utils",
"../rtp_rtcp",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
]
diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
index b146d00a2b..e8f835ca6a 100644
--- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
+++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
@@ -282,7 +282,7 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
uint32_t target_bitrate_bps = 0;
std::vector<uint32_t> ssrcs;
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
TimeoutStreams(now_ms);
RTC_DCHECK(inter_arrival_.get());
@@ -391,12 +391,12 @@ void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(int64_t now_ms) {
void RemoteBitrateEstimatorAbsSendTime::OnRttUpdate(int64_t avg_rtt_ms,
int64_t max_rtt_ms) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
remote_rate_.SetRtt(TimeDelta::Millis(avg_rtt_ms));
}
void RemoteBitrateEstimatorAbsSendTime::RemoveStream(uint32_t ssrc) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
ssrcs_.erase(ssrc);
}
@@ -409,7 +409,7 @@ bool RemoteBitrateEstimatorAbsSendTime::LatestEstimate(
// thread.
RTC_DCHECK(ssrcs);
RTC_DCHECK(bitrate_bps);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (!remote_rate_.ValidEstimate()) {
return false;
}
@@ -425,7 +425,7 @@ bool RemoteBitrateEstimatorAbsSendTime::LatestEstimate(
void RemoteBitrateEstimatorAbsSendTime::SetMinBitrate(int min_bitrate_bps) {
// Called from both the configuration thread and the network thread. Shouldn't
// be called from the network thread in the future.
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
remote_rate_.SetMinBitrate(DataRate::BitsPerSec(min_bitrate_bps));
}
} // namespace webrtc
diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
index 9fd4974116..2423363348 100644
--- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
+++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
@@ -28,9 +28,9 @@
#include "modules/remote_bitrate_estimator/overuse_estimator.h"
#include "rtc_base/checks.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
@@ -114,12 +114,12 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
// Returns true if a probe which changed the estimate was detected.
ProbeResult ProcessClusters(int64_t now_ms)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_);
bool IsBitrateImproving(int probe_bitrate_bps) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_);
- void TimeoutStreams(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_);
+ void TimeoutStreams(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_);
rtc::RaceChecker network_race_;
Clock* const clock_;
@@ -138,9 +138,9 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
int64_t last_update_ms_;
bool uma_recorded_;
- rtc::CriticalSection crit_;
- Ssrcs ssrcs_ RTC_GUARDED_BY(&crit_);
- AimdRateControl remote_rate_ RTC_GUARDED_BY(&crit_);
+ mutable Mutex mutex_;
+ Ssrcs ssrcs_ RTC_GUARDED_BY(&mutex_);
+ AimdRateControl remote_rate_ RTC_GUARDED_BY(&mutex_);
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RemoteBitrateEstimatorAbsSendTime);
};
diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
index db3bbe93c0..46d8fbc434 100644
--- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
+++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
@@ -95,7 +95,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket(
uint32_t rtp_timestamp =
header.timestamp + header.extension.transmissionTimeOffset;
int64_t now_ms = clock_->TimeInMilliseconds();
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
SsrcOveruseEstimatorMap::iterator it = overuse_detectors_.find(ssrc);
if (it == overuse_detectors_.end()) {
// This is a new SSRC. Adding to map.
@@ -158,7 +158,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket(
void RemoteBitrateEstimatorSingleStream::Process() {
{
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
UpdateEstimate(clock_->TimeInMilliseconds());
}
last_process_time_ = clock_->TimeInMilliseconds();
@@ -168,7 +168,7 @@ int64_t RemoteBitrateEstimatorSingleStream::TimeUntilNextProcess() {
if (last_process_time_ < 0) {
return 0;
}
- rtc::CritScope cs_(&crit_sect_);
+ MutexLock lock_(&mutex_);
RTC_DCHECK_GT(process_interval_ms_, 0);
return last_process_time_ + process_interval_ms_ -
clock_->TimeInMilliseconds();
@@ -217,12 +217,12 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) {
void RemoteBitrateEstimatorSingleStream::OnRttUpdate(int64_t avg_rtt_ms,
int64_t max_rtt_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
GetRemoteRate()->SetRtt(TimeDelta::Millis(avg_rtt_ms));
}
void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
SsrcOveruseEstimatorMap::iterator it = overuse_detectors_.find(ssrc);
if (it != overuse_detectors_.end()) {
delete it->second;
@@ -233,7 +233,7 @@ void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) {
bool RemoteBitrateEstimatorSingleStream::LatestEstimate(
std::vector<uint32_t>* ssrcs,
uint32_t* bitrate_bps) const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
assert(bitrate_bps);
if (!remote_rate_->ValidEstimate()) {
return false;
@@ -264,7 +264,7 @@ AimdRateControl* RemoteBitrateEstimatorSingleStream::GetRemoteRate() {
}
void RemoteBitrateEstimatorSingleStream::SetMinBitrate(int min_bitrate_bps) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
remote_rate_->SetMinBitrate(DataRate::BitsPerSec(min_bitrate_bps));
}
diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h
index a28109ce99..6da67e5a13 100644
--- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h
+++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h
@@ -22,8 +22,8 @@
#include "modules/remote_bitrate_estimator/aimd_rate_control.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -54,26 +54,25 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator {
typedef std::map<uint32_t, Detector*> SsrcOveruseEstimatorMap;
// Triggers a new estimate calculation.
- void UpdateEstimate(int64_t time_now)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ void UpdateEstimate(int64_t time_now) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void GetSsrcs(std::vector<uint32_t>* ssrcs) const
- RTC_SHARED_LOCKS_REQUIRED(crit_sect_);
+ RTC_SHARED_LOCKS_REQUIRED(mutex_);
// Returns |remote_rate_| if the pointed to object exists,
// otherwise creates it.
- AimdRateControl* GetRemoteRate() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ AimdRateControl* GetRemoteRate() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
Clock* const clock_;
const FieldTrialBasedConfig field_trials_;
- SsrcOveruseEstimatorMap overuse_detectors_ RTC_GUARDED_BY(crit_sect_);
- RateStatistics incoming_bitrate_ RTC_GUARDED_BY(crit_sect_);
- uint32_t last_valid_incoming_bitrate_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<AimdRateControl> remote_rate_ RTC_GUARDED_BY(crit_sect_);
- RemoteBitrateObserver* const observer_ RTC_GUARDED_BY(crit_sect_);
- rtc::CriticalSection crit_sect_;
+ SsrcOveruseEstimatorMap overuse_detectors_ RTC_GUARDED_BY(mutex_);
+ RateStatistics incoming_bitrate_ RTC_GUARDED_BY(mutex_);
+ uint32_t last_valid_incoming_bitrate_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<AimdRateControl> remote_rate_ RTC_GUARDED_BY(mutex_);
+ RemoteBitrateObserver* const observer_ RTC_GUARDED_BY(mutex_);
+ mutable Mutex mutex_;
int64_t last_process_time_;
- int64_t process_interval_ms_ RTC_GUARDED_BY(crit_sect_);
+ int64_t process_interval_ms_ RTC_GUARDED_BY(mutex_);
bool uma_recorded_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RemoteBitrateEstimatorSingleStream);
diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
index f044721fe0..a9cc170a35 100644
--- a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
+++ b/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
@@ -61,7 +61,7 @@ void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms,
RTC_LOG(LS_WARNING) << "Arrival time out of bounds: " << arrival_time_ms;
return;
}
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
media_ssrc_ = header.ssrc;
int64_t seq = 0;
@@ -134,7 +134,7 @@ bool RemoteEstimatorProxy::LatestEstimate(std::vector<unsigned int>* ssrcs,
}
int64_t RemoteEstimatorProxy::TimeUntilNextProcess() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (!send_periodic_feedback_) {
// Wait a day until next process.
return 24 * 60 * 60 * 1000;
@@ -147,7 +147,7 @@ int64_t RemoteEstimatorProxy::TimeUntilNextProcess() {
}
void RemoteEstimatorProxy::Process() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (!send_periodic_feedback_) {
return;
}
@@ -169,7 +169,7 @@ void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) {
kTwccReportSize * 8.0 * 1000.0 / send_config_.min_interval->ms();
// Let TWCC reports occupy 5% of total bandwidth.
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
send_interval_ms_ = static_cast<int>(
0.5 + kTwccReportSize * 8.0 * 1000.0 /
rtc::SafeClamp(send_config_.bandwidth_fraction * bitrate_bps,
@@ -178,7 +178,7 @@ void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) {
void RemoteEstimatorProxy::SetSendPeriodicFeedback(
bool send_periodic_feedback) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
send_periodic_feedback_ = send_periodic_feedback;
}
diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/modules/remote_bitrate_estimator/remote_estimator_proxy.h
index e11eb1fa7a..a4adefc5ee 100644
--- a/modules/remote_bitrate_estimator/remote_estimator_proxy.h
+++ b/modules/remote_bitrate_estimator/remote_estimator_proxy.h
@@ -17,9 +17,9 @@
#include "api/transport/network_control.h"
#include "api/transport/webrtc_key_value_config.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -92,7 +92,7 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator {
const TransportWideFeedbackConfig send_config_;
int64_t last_process_time_ms_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
// |network_state_estimator_| may be null.
NetworkStateEstimator* const network_state_estimator_
RTC_PT_GUARDED_BY(&lock_);
diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
index f7e8ffc9fc..da995922d9 100644
--- a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
+++ b/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
@@ -65,9 +65,10 @@ std::vector<int64_t> TimestampsMs(
class MockTransportFeedbackSender : public TransportFeedbackSenderInterface {
public:
- MOCK_METHOD1(
- SendCombinedRtcpPacket,
- bool(std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets));
+ MOCK_METHOD(bool,
+ SendCombinedRtcpPacket,
+ (std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets),
+ (override));
};
class RemoteEstimatorProxyTest : public ::testing::Test {
diff --git a/modules/remote_bitrate_estimator/test/bwe_test_logging.cc b/modules/remote_bitrate_estimator/test/bwe_test_logging.cc
index cf44fa070a..f99576f59a 100644
--- a/modules/remote_bitrate_estimator/test/bwe_test_logging.cc
+++ b/modules/remote_bitrate_estimator/test/bwe_test_logging.cc
@@ -61,27 +61,27 @@ Logging* Logging::GetInstance() {
}
void Logging::SetGlobalContext(uint32_t name) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
thread_map_[rtc::CurrentThreadId()].global_state.tag = ToString(name);
}
void Logging::SetGlobalContext(const std::string& name) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
thread_map_[rtc::CurrentThreadId()].global_state.tag = name;
}
void Logging::SetGlobalContext(const char* name) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
thread_map_[rtc::CurrentThreadId()].global_state.tag = name;
}
void Logging::SetGlobalEnable(bool enabled) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
thread_map_[rtc::CurrentThreadId()].global_state.enabled = enabled;
}
void Logging::Log(const char format[], ...) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
RTC_DCHECK(it != thread_map_.end());
const State& state = it->second.stack.top();
@@ -118,7 +118,7 @@ void Logging::Plot(int figure,
double value,
uint32_t ssrc,
const std::string& alg_name) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
RTC_DCHECK(it != thread_map_.end());
const State& state = it->second.stack.top();
@@ -132,7 +132,7 @@ void Logging::PlotBar(int figure,
const std::string& name,
double value,
int flow_id) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
RTC_DCHECK(it != thread_map_.end());
const State& state = it->second.stack.top();
@@ -145,7 +145,7 @@ void Logging::PlotBaselineBar(int figure,
const std::string& name,
double value,
int flow_id) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
RTC_DCHECK(it != thread_map_.end());
const State& state = it->second.stack.top();
@@ -161,7 +161,7 @@ void Logging::PlotErrorBar(int figure,
double yhigh,
const std::string& error_title,
int flow_id) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
RTC_DCHECK(it != thread_map_.end());
const State& state = it->second.stack.top();
@@ -180,7 +180,7 @@ void Logging::PlotLimitErrorBar(int figure,
double ymax,
const std::string& limit_title,
int flow_id) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
RTC_DCHECK(it != thread_map_.end());
const State& state = it->second.stack.top();
@@ -195,7 +195,7 @@ void Logging::PlotLabel(int figure,
const std::string& title,
const std::string& y_label,
int num_flows) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
RTC_DCHECK(it != thread_map_.end());
const State& state = it->second.stack.top();
@@ -229,7 +229,7 @@ void Logging::State::MergePrevious(const State& previous) {
void Logging::PushState(const std::string& append_to_tag,
int64_t timestamp_ms,
bool enabled) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
State new_state(append_to_tag, timestamp_ms, enabled);
ThreadState* thread_state = &thread_map_[rtc::CurrentThreadId()];
std::stack<State>* stack = &thread_state->stack;
@@ -242,7 +242,7 @@ void Logging::PushState(const std::string& append_to_tag,
}
void Logging::PopState() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
RTC_DCHECK(it != thread_map_.end());
std::stack<State>* stack = &it->second.stack;
diff --git a/modules/remote_bitrate_estimator/test/bwe_test_logging.h b/modules/remote_bitrate_estimator/test/bwe_test_logging.h
index a399d0b694..5a30da8289 100644
--- a/modules/remote_bitrate_estimator/test/bwe_test_logging.h
+++ b/modules/remote_bitrate_estimator/test/bwe_test_logging.h
@@ -129,7 +129,7 @@
#include <string>
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#define BWE_TEST_LOGGING_GLOBAL_CONTEXT(name) \
do { \
@@ -345,7 +345,7 @@ class Logging {
bool enabled);
void PopState();
- rtc::CriticalSection crit_sect_;
+ Mutex mutex_;
ThreadMap thread_map_;
RTC_DISALLOW_COPY_AND_ASSIGN(Logging);
diff --git a/modules/rtp_rtcp/BUILD.gn b/modules/rtp_rtcp/BUILD.gn
index 0ac6900e65..969df7bd26 100644
--- a/modules/rtp_rtcp/BUILD.gn
+++ b/modules/rtp_rtcp/BUILD.gn
@@ -118,6 +118,8 @@ rtc_library("rtp_rtcp_format") {
"../../rtc_base/system:unused",
"../../system_wrappers",
"../video_coding:codec_globals_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -132,14 +134,18 @@ rtc_library("rtp_rtcp") {
"include/flexfec_sender.h",
"include/receive_statistics.h",
"include/remote_ntp_time_estimator.h",
- "include/rtp_rtcp.h",
+ "include/rtp_rtcp.h", # deprecated
"include/ulpfec_receiver.h",
"source/absolute_capture_time_receiver.cc",
"source/absolute_capture_time_receiver.h",
"source/absolute_capture_time_sender.cc",
"source/absolute_capture_time_sender.h",
+ "source/active_decode_targets_helper.cc",
+ "source/active_decode_targets_helper.h",
"source/create_video_rtp_depacketizer.cc",
"source/create_video_rtp_depacketizer.h",
+ "source/deprecated/deprecated_rtp_sender_egress.cc",
+ "source/deprecated/deprecated_rtp_sender_egress.h",
"source/dtmf_queue.cc",
"source/dtmf_queue.h",
"source/fec_private_tables_bursty.cc",
@@ -186,6 +192,9 @@ rtc_library("rtp_rtcp") {
"source/rtp_rtcp_config.h",
"source/rtp_rtcp_impl.cc",
"source/rtp_rtcp_impl.h",
+ "source/rtp_rtcp_impl2.cc",
+ "source/rtp_rtcp_impl2.h",
+ "source/rtp_rtcp_interface.h",
"source/rtp_sender.cc",
"source/rtp_sender.h",
"source/rtp_sender_audio.cc",
@@ -285,13 +294,18 @@ rtc_library("rtp_rtcp") {
"../../rtc_base:rtc_numerics",
"../../rtc_base:safe_minmax",
"../../rtc_base/experiments:field_trial_parser",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
+ "../../rtc_base/task_utils:pending_task_safety_flag",
+ "../../rtc_base/task_utils:repeating_task",
"../../rtc_base/task_utils:to_queued_task",
"../../rtc_base/time:timestamp_extrapolator",
"../../system_wrappers",
"../../system_wrappers:metrics",
"../remote_bitrate_estimator",
"../video_coding:codec_globals_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/container:inlined_vector",
@@ -328,6 +342,8 @@ rtc_library("rtcp_transceiver") {
"../../rtc_base/task_utils:repeating_task",
"../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
@@ -347,6 +363,8 @@ rtc_library("rtp_video_header") {
"../../api/video:video_frame_type",
"../../api/video:video_rtp_headers",
"../../modules/video_coding:codec_globals_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/abseil-cpp/absl/types:variant",
@@ -384,8 +402,8 @@ rtc_library("mock_rtp_rtcp") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -425,6 +443,7 @@ if (rtc_include_tests) {
sources = [
"source/absolute_capture_time_receiver_unittest.cc",
"source/absolute_capture_time_sender_unittest.cc",
+ "source/active_decode_targets_helper_unittest.cc",
"source/byte_io_unittest.cc",
"source/fec_private_tables_bursty_unittest.cc",
"source/flexfec_header_reader_writer_unittest.cc",
@@ -463,6 +482,7 @@ if (rtc_include_tests) {
"source/rtcp_sender_unittest.cc",
"source/rtcp_transceiver_impl_unittest.cc",
"source/rtcp_transceiver_unittest.cc",
+ "source/rtp_dependency_descriptor_extension_unittest.cc",
"source/rtp_fec_unittest.cc",
"source/rtp_format_h264_unittest.cc",
"source/rtp_format_unittest.cc",
@@ -477,6 +497,7 @@ if (rtc_include_tests) {
"source/rtp_packet_history_unittest.cc",
"source/rtp_packet_unittest.cc",
"source/rtp_packetizer_av1_unittest.cc",
+ "source/rtp_rtcp_impl2_unittest.cc",
"source/rtp_rtcp_impl_unittest.cc",
"source/rtp_sender_audio_unittest.cc",
"source/rtp_sender_unittest.cc",
@@ -533,6 +554,7 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_base_tests_utils",
"../../rtc_base:rtc_numerics",
"../../rtc_base:task_queue_for_test",
+ "../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers",
"../../test:field_trial",
"../../test:mock_frame_transformer",
@@ -540,7 +562,10 @@ if (rtc_include_tests) {
"../../test:rtp_test_utils",
"../../test:test_common",
"../../test:test_support",
+ "../../test/time_controller:time_controller",
"../video_coding:codec_globals_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/memory",
diff --git a/modules/rtp_rtcp/include/flexfec_sender.h b/modules/rtp_rtcp/include/flexfec_sender.h
index 7fe20181af..737593e04c 100644
--- a/modules/rtp_rtcp/include/flexfec_sender.h
+++ b/modules/rtp_rtcp/include/flexfec_sender.h
@@ -24,6 +24,7 @@
#include "modules/rtp_rtcp/source/video_fec_generator.h"
#include "rtc_base/random.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -92,8 +93,8 @@ class FlexfecSender : public VideoFecGenerator {
const RtpHeaderExtensionMap rtp_header_extension_map_;
const size_t header_extensions_size_;
- rtc::CriticalSection crit_;
- RateStatistics fec_bitrate_ RTC_GUARDED_BY(crit_);
+ mutable Mutex mutex_;
+ RateStatistics fec_bitrate_ RTC_GUARDED_BY(mutex_);
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/include/rtp_header_extension_map.h b/modules/rtp_rtcp/include/rtp_header_extension_map.h
index 360a619f82..ff2d34d60d 100644
--- a/modules/rtp_rtcp/include/rtp_header_extension_map.h
+++ b/modules/rtp_rtcp/include/rtp_header_extension_map.h
@@ -51,10 +51,6 @@ class RtpHeaderExtensionMap {
return ids_[type];
}
- // TODO(danilchap): Remove use of the functions below.
- RTC_DEPRECATED int32_t Register(RTPExtensionType type, int id) {
- return RegisterByType(id, type) ? 0 : -1;
- }
int32_t Deregister(RTPExtensionType type);
void Deregister(absl::string_view uri);
diff --git a/modules/rtp_rtcp/include/rtp_rtcp.h b/modules/rtp_rtcp/include/rtp_rtcp.h
index f91f0d13a3..8663296eba 100644
--- a/modules/rtp_rtcp/include/rtp_rtcp.h
+++ b/modules/rtp_rtcp/include/rtp_rtcp.h
@@ -12,456 +12,70 @@
#define MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_H_
#include <memory>
-#include <set>
#include <string>
-#include <utility>
#include <vector>
-#include "absl/strings/string_view.h"
-#include "absl/types/optional.h"
-#include "api/frame_transformer_interface.h"
-#include "api/scoped_refptr.h"
-#include "api/transport/webrtc_key_value_config.h"
-#include "api/video/video_bitrate_allocation.h"
#include "modules/include/module.h"
-#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "modules/rtp_rtcp/include/report_block_data.h"
-#include "modules/rtp_rtcp/include/rtp_packet_sender.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
-#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
-#include "modules/rtp_rtcp/source/video_fec_generator.h"
-#include "rtc_base/constructor_magic.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/deprecation.h"
namespace webrtc {
-// Forward declarations.
-class FrameEncryptorInterface;
-class RateLimiter;
-class ReceiveStatisticsProvider;
-class RemoteBitrateEstimator;
-class RtcEventLog;
-class RTPSender;
-class Transport;
-class VideoBitrateAllocationObserver;
-
-namespace rtcp {
-class TransportFeedback;
-}
-
-class RtpRtcp : public Module, public RtcpFeedbackSenderInterface {
+// DEPRECATED. Do not use.
+class RtpRtcp : public Module, public RtpRtcpInterface {
public:
- struct Configuration {
- Configuration();
- Configuration(Configuration&& rhs);
-
- // True for a audio version of the RTP/RTCP module object false will create
- // a video version.
- bool audio = false;
- bool receiver_only = false;
-
- // The clock to use to read time. If nullptr then system clock will be used.
- Clock* clock = nullptr;
-
- ReceiveStatisticsProvider* receive_statistics = nullptr;
-
- // Transport object that will be called when packets are ready to be sent
- // out on the network.
- Transport* outgoing_transport = nullptr;
-
- // Called when the receiver requests an intra frame.
- RtcpIntraFrameObserver* intra_frame_callback = nullptr;
-
- // Called when the receiver sends a loss notification.
- RtcpLossNotificationObserver* rtcp_loss_notification_observer = nullptr;
-
- // Called when we receive a changed estimate from the receiver of out
- // stream.
- RtcpBandwidthObserver* bandwidth_callback = nullptr;
-
- NetworkStateEstimateObserver* network_state_estimate_observer = nullptr;
- TransportFeedbackObserver* transport_feedback_callback = nullptr;
- VideoBitrateAllocationObserver* bitrate_allocation_observer = nullptr;
- RtcpRttStats* rtt_stats = nullptr;
- RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr;
- // Called on receipt of RTCP report block from remote side.
- // TODO(bugs.webrtc.org/10678): Remove RtcpStatisticsCallback in
- // favor of ReportBlockDataObserver.
- // TODO(bugs.webrtc.org/10679): Consider whether we want to use
- // only getters or only callbacks. If we decide on getters, the
- // ReportBlockDataObserver should also be removed in favor of
- // GetLatestReportBlockData().
- RtcpStatisticsCallback* rtcp_statistics_callback = nullptr;
- RtcpCnameCallback* rtcp_cname_callback = nullptr;
- ReportBlockDataObserver* report_block_data_observer = nullptr;
-
- // Estimates the bandwidth available for a set of streams from the same
- // client.
- RemoteBitrateEstimator* remote_bitrate_estimator = nullptr;
-
- // Spread any bursts of packets into smaller bursts to minimize packet loss.
- RtpPacketSender* paced_sender = nullptr;
-
- // Generates FEC packets.
- // TODO(sprang): Wire up to RtpSenderEgress.
- VideoFecGenerator* fec_generator = nullptr;
-
- BitrateStatisticsObserver* send_bitrate_observer = nullptr;
- SendSideDelayObserver* send_side_delay_observer = nullptr;
- RtcEventLog* event_log = nullptr;
- SendPacketObserver* send_packet_observer = nullptr;
- RateLimiter* retransmission_rate_limiter = nullptr;
- StreamDataCountersCallback* rtp_stats_callback = nullptr;
-
- int rtcp_report_interval_ms = 0;
-
- // Update network2 instead of pacer_exit field of video timing extension.
- bool populate_network2_timestamp = false;
-
- rtc::scoped_refptr<FrameTransformerInterface> frame_transformer;
-
- // E2EE Custom Video Frame Encryption
- FrameEncryptorInterface* frame_encryptor = nullptr;
- // Require all outgoing frames to be encrypted with a FrameEncryptor.
- bool require_frame_encryption = false;
-
- // Corresponds to extmap-allow-mixed in SDP negotiation.
- bool extmap_allow_mixed = false;
-
- // If true, the RTP sender will always annotate outgoing packets with
- // MID and RID header extensions, if provided and negotiated.
- // If false, the RTP sender will stop sending MID and RID header extensions,
- // when it knows that the receiver is ready to demux based on SSRC. This is
- // done by RTCP RR acking.
- bool always_send_mid_and_rid = false;
-
- // If set, field trials are read from |field_trials|, otherwise
- // defaults to webrtc::FieldTrialBasedConfig.
- const WebRtcKeyValueConfig* field_trials = nullptr;
-
- // SSRCs for media and retransmission, respectively.
- // FlexFec SSRC is fetched from |flexfec_sender|.
- uint32_t local_media_ssrc = 0;
- absl::optional<uint32_t> rtx_send_ssrc;
-
- bool need_rtp_packet_infos = false;
-
- // If true, the RTP packet history will select RTX packets based on
- // heuristics such as send time, retransmission count etc, in order to
- // make padding potentially more useful.
- // If false, the last packet will always be picked. This may reduce CPU
- // overhead.
- bool enable_rtx_padding_prioritization = true;
-
- private:
- RTC_DISALLOW_COPY_AND_ASSIGN(Configuration);
- };
-
- // Creates an RTP/RTCP module object using provided |configuration|.
- static std::unique_ptr<RtpRtcp> Create(const Configuration& configuration);
+ // Instantiates a deprecated version of the RtpRtcp module.
+ static std::unique_ptr<RtpRtcp> RTC_DEPRECATED
+ Create(const Configuration& configuration) {
+ return DEPRECATED_Create(configuration);
+ }
- // **************************************************************************
- // Receiver functions
- // **************************************************************************
+ static std::unique_ptr<RtpRtcp> DEPRECATED_Create(
+ const Configuration& configuration);
- virtual void IncomingRtcpPacket(const uint8_t* incoming_packet,
- size_t incoming_packet_length) = 0;
-
- virtual void SetRemoteSSRC(uint32_t ssrc) = 0;
-
- // **************************************************************************
- // Sender
- // **************************************************************************
-
- // Sets the maximum size of an RTP packet, including RTP headers.
- virtual void SetMaxRtpPacketSize(size_t size) = 0;
+ // (TMMBR) Temporary Max Media Bit Rate
+ RTC_DEPRECATED virtual bool TMMBR() const = 0;
- // Returns max RTP packet size. Takes into account RTP headers and
- // FEC/ULP/RED overhead (when FEC is enabled).
- virtual size_t MaxRtpPacketSize() const = 0;
+ RTC_DEPRECATED virtual void SetTMMBRStatus(bool enable) = 0;
- virtual void RegisterSendPayloadFrequency(int payload_type,
- int payload_frequency) = 0;
+ // Returns -1 on failure else 0.
+ RTC_DEPRECATED virtual int32_t AddMixedCNAME(uint32_t ssrc,
+ const char* cname) = 0;
- // Unregisters a send payload.
- // |payload_type| - payload type of codec
// Returns -1 on failure else 0.
- virtual int32_t DeRegisterSendPayload(int8_t payload_type) = 0;
+ RTC_DEPRECATED virtual int32_t RemoveMixedCNAME(uint32_t ssrc) = 0;
- virtual void SetExtmapAllowMixed(bool extmap_allow_mixed) = 0;
+ // Returns remote CName.
+ // Returns -1 on failure else 0.
+ RTC_DEPRECATED virtual int32_t RemoteCNAME(
+ uint32_t remote_ssrc,
+ char cname[RTCP_CNAME_SIZE]) const = 0;
// (De)registers RTP header extension type and id.
// Returns -1 on failure else 0.
RTC_DEPRECATED virtual int32_t RegisterSendRtpHeaderExtension(
RTPExtensionType type,
uint8_t id) = 0;
- // Register extension by uri, triggers CHECK on falure.
- virtual void RegisterRtpHeaderExtension(absl::string_view uri, int id) = 0;
-
- virtual int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) = 0;
- virtual void DeregisterSendRtpHeaderExtension(absl::string_view uri) = 0;
-
- // Returns true if RTP module is send media, and any of the extensions
- // required for bandwidth estimation is registered.
- virtual bool SupportsPadding() const = 0;
- // Same as SupportsPadding(), but additionally requires that
- // SetRtxSendStatus() has been called with the kRtxRedundantPayloads option
- // enabled.
- virtual bool SupportsRtxPayloadPadding() const = 0;
-
- // Returns start timestamp.
- virtual uint32_t StartTimestamp() const = 0;
-
- // Sets start timestamp. Start timestamp is set to a random value if this
- // function is never called.
- virtual void SetStartTimestamp(uint32_t timestamp) = 0;
-
- // Returns SequenceNumber.
- virtual uint16_t SequenceNumber() const = 0;
-
- // Sets SequenceNumber, default is a random number.
- virtual void SetSequenceNumber(uint16_t seq) = 0;
-
- virtual void SetRtpState(const RtpState& rtp_state) = 0;
- virtual void SetRtxState(const RtpState& rtp_state) = 0;
- virtual RtpState GetRtpState() const = 0;
- virtual RtpState GetRtxState() const = 0;
-
- // Returns SSRC.
- virtual uint32_t SSRC() const = 0;
-
- // Sets the value for sending in the RID (and Repaired) RTP header extension.
- // RIDs are used to identify an RTP stream if SSRCs are not negotiated.
- // If the RID and Repaired RID extensions are not registered, the RID will
- // not be sent.
- virtual void SetRid(const std::string& rid) = 0;
-
- // Sets the value for sending in the MID RTP header extension.
- // The MID RTP header extension should be registered for this to do anything.
- // Once set, this value can not be changed or removed.
- virtual void SetMid(const std::string& mid) = 0;
-
- // Sets CSRC.
- // |csrcs| - vector of CSRCs
- virtual void SetCsrcs(const std::vector<uint32_t>& csrcs) = 0;
-
- // Turns on/off sending RTX (RFC 4588). The modes can be set as a combination
- // of values of the enumerator RtxMode.
- virtual void SetRtxSendStatus(int modes) = 0;
-
- // Returns status of sending RTX (RFC 4588). The returned value can be
- // a combination of values of the enumerator RtxMode.
- virtual int RtxSendStatus() const = 0;
-
- // Returns the SSRC used for RTX if set, otherwise a nullopt.
- virtual absl::optional<uint32_t> RtxSsrc() const = 0;
-
- // Sets the payload type to use when sending RTX packets. Note that this
- // doesn't enable RTX, only the payload type is set.
- virtual void SetRtxSendPayloadType(int payload_type,
- int associated_payload_type) = 0;
-
- // Returns the FlexFEC SSRC, if there is one.
- virtual absl::optional<uint32_t> FlexfecSsrc() const = 0;
-
- // Sets sending status. Sends kRtcpByeCode when going from true to false.
- // Returns -1 on failure else 0.
- virtual int32_t SetSendingStatus(bool sending) = 0;
-
- // Returns current sending status.
- virtual bool Sending() const = 0;
-
- // Starts/Stops media packets. On by default.
- virtual void SetSendingMediaStatus(bool sending) = 0;
-
- // Returns current media sending status.
- virtual bool SendingMedia() const = 0;
-
- // Returns whether audio is configured (i.e. Configuration::audio = true).
- virtual bool IsAudioConfigured() const = 0;
-
- // Indicate that the packets sent by this module should be counted towards the
- // bitrate estimate since the stream participates in the bitrate allocation.
- virtual void SetAsPartOfAllocation(bool part_of_allocation) = 0;
-
- // TODO(sprang): Remove when all call sites have been moved to
- // GetSendRates(). Fetches the current send bitrates in bits/s.
- virtual void BitrateSent(uint32_t* total_rate,
- uint32_t* video_rate,
- uint32_t* fec_rate,
- uint32_t* nack_rate) const = 0;
-
- // Returns bitrate sent (post-pacing) per packet type.
- virtual RtpSendRates GetSendRates() const = 0;
-
- virtual RTPSender* RtpSender() = 0;
- virtual const RTPSender* RtpSender() const = 0;
-
- // Record that a frame is about to be sent. Returns true on success, and false
- // if the module isn't ready to send.
- virtual bool OnSendingRtpFrame(uint32_t timestamp,
- int64_t capture_time_ms,
- int payload_type,
- bool force_sender_report) = 0;
-
- // Try to send the provided packet. Returns true iff packet matches any of
- // the SSRCs for this module (media/rtx/fec etc) and was forwarded to the
- // transport.
- virtual bool TrySendPacket(RtpPacketToSend* packet,
- const PacedPacketInfo& pacing_info) = 0;
-
- virtual void OnPacketsAcknowledged(
- rtc::ArrayView<const uint16_t> sequence_numbers) = 0;
-
- virtual std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
- size_t target_size_bytes) = 0;
-
- virtual std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
- rtc::ArrayView<const uint16_t> sequence_numbers) const = 0;
-
- // Returns an expected per packet overhead representing the main RTP header,
- // any CSRCs, and the registered header extensions that are expected on all
- // packets (i.e. disregarding things like abs capture time which is only
- // populated on a subset of packets, but counting MID/RID type extensions
- // when we expect to send them).
- virtual size_t ExpectedPerPacketOverhead() const = 0;
-
- // **************************************************************************
- // RTCP
- // **************************************************************************
-
- // Returns RTCP status.
- virtual RtcpMode RTCP() const = 0;
-
- // Sets RTCP status i.e on(compound or non-compound)/off.
- // |method| - RTCP method to use.
- virtual void SetRTCPStatus(RtcpMode method) = 0;
-
- // Sets RTCP CName (i.e unique identifier).
- // Returns -1 on failure else 0.
- virtual int32_t SetCNAME(const char* cname) = 0;
-
- // Returns remote CName.
- // Returns -1 on failure else 0.
- virtual int32_t RemoteCNAME(uint32_t remote_ssrc,
- char cname[RTCP_CNAME_SIZE]) const = 0;
-
- // Returns remote NTP.
- // Returns -1 on failure else 0.
- virtual int32_t RemoteNTP(uint32_t* received_ntp_secs,
- uint32_t* received_ntp_frac,
- uint32_t* rtcp_arrival_time_secs,
- uint32_t* rtcp_arrival_time_frac,
- uint32_t* rtcp_timestamp) const = 0;
-
- // Returns -1 on failure else 0.
- virtual int32_t AddMixedCNAME(uint32_t ssrc, const char* cname) = 0;
-
- // Returns -1 on failure else 0.
- virtual int32_t RemoveMixedCNAME(uint32_t ssrc) = 0;
-
- // Returns current RTT (round-trip time) estimate.
- // Returns -1 on failure else 0.
- virtual int32_t RTT(uint32_t remote_ssrc,
- int64_t* rtt,
- int64_t* avg_rtt,
- int64_t* min_rtt,
- int64_t* max_rtt) const = 0;
-
- // Returns the estimated RTT, with fallback to a default value.
- virtual int64_t ExpectedRetransmissionTimeMs() const = 0;
-
- // Forces a send of a RTCP packet. Periodic SR and RR are triggered via the
- // process function.
- // Returns -1 on failure else 0.
- virtual int32_t SendRTCP(RTCPPacketType rtcp_packet_type) = 0;
-
- // Returns statistics of the amount of data sent.
- // Returns -1 on failure else 0.
- virtual int32_t DataCountersRTP(size_t* bytes_sent,
- uint32_t* packets_sent) const = 0;
-
- // Returns send statistics for the RTP and RTX stream.
- virtual void GetSendStreamDataCounters(
- StreamDataCounters* rtp_counters,
- StreamDataCounters* rtx_counters) const = 0;
-
- // Returns received RTCP report block.
- // Returns -1 on failure else 0.
- // TODO(https://crbug.com/webrtc/10678): Remove this in favor of
- // GetLatestReportBlockData().
- virtual int32_t RemoteRTCPStat(
- std::vector<RTCPReportBlock>* receive_blocks) const = 0;
- // A snapshot of Report Blocks with additional data of interest to statistics.
- // Within this list, the sender-source SSRC pair is unique and per-pair the
- // ReportBlockData represents the latest Report Block that was received for
- // that pair.
- virtual std::vector<ReportBlockData> GetLatestReportBlockData() const = 0;
// (APP) Sets application specific data.
// Returns -1 on failure else 0.
- virtual int32_t SetRTCPApplicationSpecificData(uint8_t sub_type,
- uint32_t name,
- const uint8_t* data,
- uint16_t length) = 0;
- // (XR) Sets Receiver Reference Time Report (RTTR) status.
- virtual void SetRtcpXrRrtrStatus(bool enable) = 0;
-
- // Returns current Receiver Reference Time Report (RTTR) status.
- virtual bool RtcpXrRrtrStatus() const = 0;
+ RTC_DEPRECATED virtual int32_t SetRTCPApplicationSpecificData(
+ uint8_t sub_type,
+ uint32_t name,
+ const uint8_t* data,
+ uint16_t length) = 0;
- // (REMB) Receiver Estimated Max Bitrate.
- // Schedules sending REMB on next and following sender/receiver reports.
- void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) override = 0;
- // Stops sending REMB on next and following sender/receiver reports.
- void UnsetRemb() override = 0;
-
- // (TMMBR) Temporary Max Media Bit Rate
- virtual bool TMMBR() const = 0;
-
- virtual void SetTMMBRStatus(bool enable) = 0;
-
- // (NACK)
-
- // Sends a Negative acknowledgement packet.
+ // Returns statistics of the amount of data sent.
// Returns -1 on failure else 0.
- // TODO(philipel): Deprecate this and start using SendNack instead, mostly
- // because we want a function that actually send NACK for the specified
- // packets.
- virtual int32_t SendNACK(const uint16_t* nack_list, uint16_t size) = 0;
-
- // Sends NACK for the packets specified.
- // Note: This assumes the caller keeps track of timing and doesn't rely on
- // the RTP module to do this.
- virtual void SendNack(const std::vector<uint16_t>& sequence_numbers) = 0;
-
- // Store the sent packets, needed to answer to a Negative acknowledgment
- // requests.
- virtual void SetStorePacketsStatus(bool enable, uint16_t numberToStore) = 0;
-
- // Returns true if the module is configured to store packets.
- virtual bool StorePackets() const = 0;
-
- virtual void SetVideoBitrateAllocation(
- const VideoBitrateAllocation& bitrate) = 0;
-
- // **************************************************************************
- // Video
- // **************************************************************************
+ RTC_DEPRECATED virtual int32_t DataCountersRTP(
+ size_t* bytes_sent,
+ uint32_t* packets_sent) const = 0;
// Requests new key frame.
// using PLI, https://tools.ietf.org/html/rfc4585#section-6.3.1.1
void SendPictureLossIndication() { SendRTCP(kRtcpPli); }
// using FIR, https://tools.ietf.org/html/rfc5104#section-4.3.1.2
void SendFullIntraRequest() { SendRTCP(kRtcpFir); }
-
- // Sends a LossNotification RTCP message.
- // Returns -1 on failure else 0.
- virtual int32_t SendLossNotification(uint16_t last_decoded_seq_num,
- uint16_t last_received_seq_num,
- bool decodability_flag,
- bool buffering_allowed) = 0;
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.cc b/modules/rtp_rtcp/include/rtp_rtcp_defines.cc
index ca128e708a..5aa41fccb3 100644
--- a/modules/rtp_rtcp/include/rtp_rtcp_defines.cc
+++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.cc
@@ -44,6 +44,12 @@ bool IsLegalRsidName(absl::string_view name) {
StreamDataCounters::StreamDataCounters() : first_packet_time_ms(-1) {}
+RtpPacketCounter::RtpPacketCounter(const RtpPacket& packet)
+ : header_bytes(packet.headers_size()),
+ payload_bytes(packet.payload_size()),
+ padding_bytes(packet.padding_size()),
+ packets(1) {}
+
void RtpPacketCounter::AddPacket(const RtpPacket& packet) {
++packets;
header_bytes += packet.headers_size();
diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/modules/rtp_rtcp/include/rtp_rtcp_defines.h
index 049ff5c506..46c310e276 100644
--- a/modules/rtp_rtcp/include/rtp_rtcp_defines.h
+++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.h
@@ -33,6 +33,7 @@
namespace webrtc {
class RtpPacket;
+class RtpPacketToSend;
namespace rtcp {
class TransportFeedback;
}
@@ -65,7 +66,6 @@ enum RTPExtensionType : int {
kRtpExtensionPlayoutDelay,
kRtpExtensionVideoContentType,
kRtpExtensionVideoTiming,
- kRtpExtensionFrameMarking,
kRtpExtensionRtpStreamId,
kRtpExtensionRepairedRtpStreamId,
kRtpExtensionMid,
@@ -91,7 +91,6 @@ enum RTCPPacketType : uint32_t {
kRtcpTmmbr = 0x0100,
kRtcpTmmbn = 0x0200,
kRtcpSrReq = 0x0400,
- kRtcpApp = 0x1000,
kRtcpLossNotification = 0x2000,
kRtcpRemb = 0x10000,
kRtcpTransmissionTimeOffset = 0x20000,
@@ -158,14 +157,12 @@ struct RtpState {
timestamp(0),
capture_time_ms(-1),
last_timestamp_time_ms(-1),
- media_has_been_sent(false),
ssrc_has_acked(false) {}
uint16_t sequence_number;
uint32_t start_timestamp;
uint32_t timestamp;
int64_t capture_time_ms;
int64_t last_timestamp_time_ms;
- bool media_has_been_sent;
bool ssrc_has_acked;
};
@@ -299,6 +296,8 @@ struct RtpPacketCounter {
RtpPacketCounter()
: header_bytes(0), payload_bytes(0), padding_bytes(0), packets(0) {}
+ explicit RtpPacketCounter(const RtpPacket& packet);
+
void Add(const RtpPacketCounter& other) {
header_bytes += other.header_bytes;
payload_bytes += other.payload_bytes;
@@ -468,5 +467,15 @@ class SendPacketObserver {
int64_t capture_time_ms,
uint32_t ssrc) = 0;
};
+
+// Interface for a class that can assign RTP sequence numbers for a packet
+// to be sent.
+class SequenceNumberAssigner {
+ public:
+ SequenceNumberAssigner() = default;
+ virtual ~SequenceNumberAssigner() = default;
+
+ virtual void AssignSequenceNumber(RtpPacketToSend* packet) = 0;
+};
} // namespace webrtc
#endif // MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_DEFINES_H_
diff --git a/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h b/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h
index 5b1585fa0f..e9a7d52691 100644
--- a/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h
+++ b/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h
@@ -19,7 +19,7 @@ namespace webrtc {
class MockRtcpRttStats : public RtcpRttStats {
public:
MOCK_METHOD(void, OnRttUpdate, (int64_t rtt), (override));
- MOCK_METHOD(int64_t, LastProcessedRtt, (), (const override));
+ MOCK_METHOD(int64_t, LastProcessedRtt, (), (const, override));
};
} // namespace webrtc
#endif // MODULES_RTP_RTCP_MOCKS_MOCK_RTCP_RTT_STATS_H_
diff --git a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index 5a333fe847..d597b1e289 100644
--- a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -20,14 +20,14 @@
#include "absl/types/optional.h"
#include "api/video/video_bitrate_allocation.h"
#include "modules/include/module.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "test/gmock.h"
namespace webrtc {
-class MockRtpRtcp : public RtpRtcp {
+class MockRtpRtcpInterface : public RtpRtcpInterface {
public:
MOCK_METHOD(void,
IncomingRtcpPacket,
@@ -35,7 +35,7 @@ class MockRtpRtcp : public RtpRtcp {
(override));
MOCK_METHOD(void, SetRemoteSSRC, (uint32_t ssrc), (override));
MOCK_METHOD(void, SetMaxRtpPacketSize, (size_t size), (override));
- MOCK_METHOD(size_t, MaxRtpPacketSize, (), (const override));
+ MOCK_METHOD(size_t, MaxRtpPacketSize, (), (const, override));
MOCK_METHOD(void,
RegisterSendPayloadFrequency,
(int payload_type, int frequency),
@@ -45,10 +45,6 @@ class MockRtpRtcp : public RtpRtcp {
(int8_t payload_type),
(override));
MOCK_METHOD(void, SetExtmapAllowMixed, (bool extmap_allow_mixed), (override));
- MOCK_METHOD(int32_t,
- RegisterSendRtpHeaderExtension,
- (RTPExtensionType type, uint8_t id),
- (override));
MOCK_METHOD(void,
RegisterRtpHeaderExtension,
(absl::string_view uri, int id),
@@ -61,32 +57,30 @@ class MockRtpRtcp : public RtpRtcp {
DeregisterSendRtpHeaderExtension,
(absl::string_view uri),
(override));
- MOCK_METHOD(bool, SupportsPadding, (), (const override));
- MOCK_METHOD(bool, SupportsRtxPayloadPadding, (), (const override));
- MOCK_METHOD(uint32_t, StartTimestamp, (), (const override));
+ MOCK_METHOD(bool, SupportsPadding, (), (const, override));
+ MOCK_METHOD(bool, SupportsRtxPayloadPadding, (), (const, override));
+ MOCK_METHOD(uint32_t, StartTimestamp, (), (const, override));
MOCK_METHOD(void, SetStartTimestamp, (uint32_t timestamp), (override));
- MOCK_METHOD(uint16_t, SequenceNumber, (), (const override));
+ MOCK_METHOD(uint16_t, SequenceNumber, (), (const, override));
MOCK_METHOD(void, SetSequenceNumber, (uint16_t seq), (override));
MOCK_METHOD(void, SetRtpState, (const RtpState& rtp_state), (override));
MOCK_METHOD(void, SetRtxState, (const RtpState& rtp_state), (override));
- MOCK_METHOD(RtpState, GetRtpState, (), (const override));
- MOCK_METHOD(RtpState, GetRtxState, (), (const override));
- MOCK_METHOD(uint32_t, SSRC, (), (const override));
+ MOCK_METHOD(RtpState, GetRtpState, (), (const, override));
+ MOCK_METHOD(RtpState, GetRtxState, (), (const, override));
+ MOCK_METHOD(uint32_t, SSRC, (), (const, override));
MOCK_METHOD(void, SetRid, (const std::string& rid), (override));
MOCK_METHOD(void, SetMid, (const std::string& mid), (override));
- MOCK_METHOD(int32_t, CSRCs, (uint32_t csrcs[kRtpCsrcSize]), (const override));
MOCK_METHOD(void, SetCsrcs, (const std::vector<uint32_t>& csrcs), (override));
MOCK_METHOD(void, SetRtxSendStatus, (int modes), (override));
- MOCK_METHOD(int, RtxSendStatus, (), (const override));
- MOCK_METHOD(absl::optional<uint32_t>, RtxSsrc, (), (const override));
+ MOCK_METHOD(int, RtxSendStatus, (), (const, override));
+ MOCK_METHOD(absl::optional<uint32_t>, RtxSsrc, (), (const, override));
MOCK_METHOD(void, SetRtxSendPayloadType, (int, int), (override));
- MOCK_METHOD(absl::optional<uint32_t>, FlexfecSsrc, (), (const override));
- MOCK_METHOD((std::pair<int, int>), RtxSendPayloadType, (), (const override));
+ MOCK_METHOD(absl::optional<uint32_t>, FlexfecSsrc, (), (const, override));
MOCK_METHOD(int32_t, SetSendingStatus, (bool sending), (override));
- MOCK_METHOD(bool, Sending, (), (const override));
+ MOCK_METHOD(bool, Sending, (), (const, override));
MOCK_METHOD(void, SetSendingMediaStatus, (bool sending), (override));
- MOCK_METHOD(bool, SendingMedia, (), (const override));
- MOCK_METHOD(bool, IsAudioConfigured, (), (const override));
+ MOCK_METHOD(bool, SendingMedia, (), (const, override));
+ MOCK_METHOD(bool, IsAudioConfigured, (), (const, override));
MOCK_METHOD(void, SetAsPartOfAllocation, (bool), (override));
MOCK_METHOD(void,
BitrateSent,
@@ -94,12 +88,8 @@ class MockRtpRtcp : public RtpRtcp {
uint32_t* video_rate,
uint32_t* fec_rate,
uint32_t* nack_rate),
- (const override));
- MOCK_METHOD(RtpSendRates, GetSendRates, (), (const override));
- MOCK_METHOD(int,
- EstimatedReceiveBandwidth,
- (uint32_t * available_bandwidth),
- (const override));
+ (const, override));
+ MOCK_METHOD(RtpSendRates, GetSendRates, (), (const, override));
MOCK_METHOD(bool,
OnSendingRtpFrame,
(uint32_t, int64_t, int, bool),
@@ -109,6 +99,15 @@ class MockRtpRtcp : public RtpRtcp {
(RtpPacketToSend * packet, const PacedPacketInfo& pacing_info),
(override));
MOCK_METHOD(void,
+ SetFecProtectionParams,
+ (const FecProtectionParams& delta_params,
+ const FecProtectionParams& key_params),
+ (override));
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ FetchFecPackets,
+ (),
+ (override));
+ MOCK_METHOD(void,
OnPacketsAcknowledged,
(rtc::ArrayView<const uint16_t>),
(override));
@@ -119,31 +118,22 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_METHOD(std::vector<RtpSequenceNumberMap::Info>,
GetSentRtpPacketInfos,
(rtc::ArrayView<const uint16_t> sequence_numbers),
- (const override));
- MOCK_METHOD(size_t, ExpectedPerPacketOverhead, (), (const override));
- MOCK_METHOD(RtcpMode, RTCP, (), (const override));
+ (const, override));
+ MOCK_METHOD(size_t, ExpectedPerPacketOverhead, (), (const, override));
+ MOCK_METHOD(RtcpMode, RTCP, (), (const, override));
MOCK_METHOD(void, SetRTCPStatus, (RtcpMode method), (override));
MOCK_METHOD(int32_t,
SetCNAME,
(const char cname[RTCP_CNAME_SIZE]),
(override));
MOCK_METHOD(int32_t,
- RemoteCNAME,
- (uint32_t remote_ssrc, char cname[RTCP_CNAME_SIZE]),
- (const override));
- MOCK_METHOD(int32_t,
RemoteNTP,
(uint32_t * received_ntp_secs,
uint32_t* received_ntp_frac,
uint32_t* rtcp_arrival_time_secs,
uint32_t* rtcp_arrival_time_frac,
uint32_t* rtcp_timestamp),
- (const override));
- MOCK_METHOD(int32_t,
- AddMixedCNAME,
- (uint32_t ssrc, const char cname[RTCP_CNAME_SIZE]),
- (override));
- MOCK_METHOD(int32_t, RemoveMixedCNAME, (uint32_t ssrc), (override));
+ (const, override));
MOCK_METHOD(int32_t,
RTT,
(uint32_t remote_ssrc,
@@ -151,39 +141,28 @@ class MockRtpRtcp : public RtpRtcp {
int64_t* avg_rtt,
int64_t* min_rtt,
int64_t* max_rtt),
- (const override));
- MOCK_METHOD(int64_t, ExpectedRetransmissionTimeMs, (), (const override));
+ (const, override));
+ MOCK_METHOD(int64_t, ExpectedRetransmissionTimeMs, (), (const, override));
MOCK_METHOD(int32_t, SendRTCP, (RTCPPacketType packet_type), (override));
- MOCK_METHOD(int32_t,
- DataCountersRTP,
- (size_t * bytes_sent, uint32_t* packets_sent),
- (const override));
MOCK_METHOD(void,
GetSendStreamDataCounters,
(StreamDataCounters*, StreamDataCounters*),
- (const override));
+ (const, override));
MOCK_METHOD(int32_t,
RemoteRTCPStat,
(std::vector<RTCPReportBlock> * receive_blocks),
- (const override));
+ (const, override));
MOCK_METHOD(std::vector<ReportBlockData>,
GetLatestReportBlockData,
(),
- (const override));
- MOCK_METHOD(
- int32_t,
- SetRTCPApplicationSpecificData,
- (uint8_t sub_type, uint32_t name, const uint8_t* data, uint16_t length),
- (override));
+ (const, override));
MOCK_METHOD(void, SetRtcpXrRrtrStatus, (bool enable), (override));
- MOCK_METHOD(bool, RtcpXrRrtrStatus, (), (const override));
+ MOCK_METHOD(bool, RtcpXrRrtrStatus, (), (const, override));
MOCK_METHOD(void,
SetRemb,
(int64_t bitrate, std::vector<uint32_t> ssrcs),
(override));
MOCK_METHOD(void, UnsetRemb, (), (override));
- MOCK_METHOD(bool, TMMBR, (), (const override));
- MOCK_METHOD(void, SetTMMBRStatus, (bool enable), (override));
MOCK_METHOD(int32_t,
SendNACK,
(const uint16_t* nack_list, uint16_t size),
@@ -196,7 +175,7 @@ class MockRtpRtcp : public RtpRtcp {
SetStorePacketsStatus,
(bool enable, uint16_t number_to_store),
(override));
- MOCK_METHOD(bool, StorePackets, (), (const override));
+ MOCK_METHOD(bool, StorePackets, (), (const, override));
MOCK_METHOD(void,
SendCombinedRtcpPacket,
(std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets),
@@ -208,20 +187,12 @@ class MockRtpRtcp : public RtpRtcp {
bool decodability_flag,
bool buffering_allowed),
(override));
- MOCK_METHOD(void, Process, (), (override));
MOCK_METHOD(void,
SetVideoBitrateAllocation,
(const VideoBitrateAllocation&),
(override));
MOCK_METHOD(RTPSender*, RtpSender, (), (override));
- MOCK_METHOD(const RTPSender*, RtpSender, (), (const override));
-
- private:
- // Mocking this method is currently not required and having a default
- // implementation like
- // MOCK_METHOD(int64_t, TimeUntilNextProcess, (), (override))
- // can be dangerous since it can cause a tight loop on a process thread.
- int64_t TimeUntilNextProcess() override { return 0xffffffff; }
+ MOCK_METHOD(const RTPSender*, RtpSender, (), (const, override));
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc b/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc
index 62f300d0e5..529ed7eef6 100644
--- a/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc
+++ b/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc
@@ -39,7 +39,7 @@ uint32_t AbsoluteCaptureTimeReceiver::GetSource(
void AbsoluteCaptureTimeReceiver::SetRemoteToLocalClockOffset(
absl::optional<int64_t> value_q32x32) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
remote_to_local_clock_offset_ = value_q32x32;
}
@@ -52,7 +52,7 @@ AbsoluteCaptureTimeReceiver::OnReceivePacket(
const absl::optional<AbsoluteCaptureTime>& received_extension) {
const Timestamp receive_time = clock_->CurrentTime();
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
AbsoluteCaptureTime extension;
if (received_extension == absl::nullopt) {
diff --git a/modules/rtp_rtcp/source/absolute_capture_time_receiver.h b/modules/rtp_rtcp/source/absolute_capture_time_receiver.h
index ea55ab4d22..ce3442b386 100644
--- a/modules/rtp_rtcp/source/absolute_capture_time_receiver.h
+++ b/modules/rtp_rtcp/source/absolute_capture_time_receiver.h
@@ -15,7 +15,7 @@
#include "api/rtp_headers.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
@@ -73,26 +73,26 @@ class AbsoluteCaptureTimeReceiver {
uint32_t source,
uint32_t rtp_timestamp,
uint32_t rtp_clock_frequency) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
absl::optional<int64_t> AdjustEstimatedCaptureClockOffset(
absl::optional<int64_t> received_value) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
Clock* const clock_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
- absl::optional<int64_t> remote_to_local_clock_offset_ RTC_GUARDED_BY(crit_);
+ absl::optional<int64_t> remote_to_local_clock_offset_ RTC_GUARDED_BY(mutex_);
- Timestamp last_receive_time_ RTC_GUARDED_BY(crit_);
+ Timestamp last_receive_time_ RTC_GUARDED_BY(mutex_);
- uint32_t last_source_ RTC_GUARDED_BY(crit_);
- uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(crit_);
- uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(crit_);
- uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(crit_);
+ uint32_t last_source_ RTC_GUARDED_BY(mutex_);
+ uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_);
+ uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_);
+ uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_);
absl::optional<int64_t> last_estimated_capture_clock_offset_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
}; // AbsoluteCaptureTimeReceiver
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/absolute_capture_time_sender.cc b/modules/rtp_rtcp/source/absolute_capture_time_sender.cc
index f614c0c521..83ba6cac91 100644
--- a/modules/rtp_rtcp/source/absolute_capture_time_sender.cc
+++ b/modules/rtp_rtcp/source/absolute_capture_time_sender.cc
@@ -47,7 +47,7 @@ absl::optional<AbsoluteCaptureTime> AbsoluteCaptureTimeSender::OnSendPacket(
absl::optional<int64_t> estimated_capture_clock_offset) {
const Timestamp send_time = clock_->CurrentTime();
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
if (!ShouldSendExtension(send_time, source, rtp_timestamp,
rtp_clock_frequency, absolute_capture_timestamp,
diff --git a/modules/rtp_rtcp/source/absolute_capture_time_sender.h b/modules/rtp_rtcp/source/absolute_capture_time_sender.h
index c45a2dc5b6..348a28370d 100644
--- a/modules/rtp_rtcp/source/absolute_capture_time_sender.h
+++ b/modules/rtp_rtcp/source/absolute_capture_time_sender.h
@@ -15,7 +15,7 @@
#include "api/rtp_headers.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
@@ -67,20 +67,20 @@ class AbsoluteCaptureTimeSender {
uint32_t rtp_clock_frequency,
uint64_t absolute_capture_timestamp,
absl::optional<int64_t> estimated_capture_clock_offset) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
Clock* const clock_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
- Timestamp last_send_time_ RTC_GUARDED_BY(crit_);
+ Timestamp last_send_time_ RTC_GUARDED_BY(mutex_);
- uint32_t last_source_ RTC_GUARDED_BY(crit_);
- uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(crit_);
- uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(crit_);
- uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(crit_);
+ uint32_t last_source_ RTC_GUARDED_BY(mutex_);
+ uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_);
+ uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_);
+ uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_);
absl::optional<int64_t> last_estimated_capture_clock_offset_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
}; // AbsoluteCaptureTimeSender
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/active_decode_targets_helper.cc b/modules/rtp_rtcp/source/active_decode_targets_helper.cc
new file mode 100644
index 0000000000..71e7e8cf78
--- /dev/null
+++ b/modules/rtp_rtcp/source/active_decode_targets_helper.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/active_decode_targets_helper.h"
+
+#include <stdint.h>
+
+#include "api/array_view.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+// Returns mask of ids of chains previous frame is part of.
+// Assumes for each chain frames are seen in order and no frame on any chain is
+// missing. That assumptions allows a simple detection when previous frame is
+// part of a chain.
+std::bitset<32> LastSendOnChain(int frame_diff,
+ rtc::ArrayView<const int> chain_diffs) {
+ std::bitset<32> bitmask = 0;
+ for (size_t i = 0; i < chain_diffs.size(); ++i) {
+ if (frame_diff == chain_diffs[i]) {
+ bitmask.set(i);
+ }
+ }
+ return bitmask;
+}
+
+// Returns bitmask with first `num` bits set to 1.
+std::bitset<32> AllActive(size_t num) {
+ RTC_DCHECK_LE(num, 32);
+ return (~uint32_t{0}) >> (32 - num);
+}
+
+// Returns bitmask of chains that protect at least one active decode target.
+std::bitset<32> ActiveChains(
+ rtc::ArrayView<const int> decode_target_protected_by_chain,
+ int num_chains,
+ std::bitset<32> active_decode_targets) {
+ std::bitset<32> active_chains = 0;
+ for (size_t dt = 0; dt < decode_target_protected_by_chain.size(); ++dt) {
+ if (dt < active_decode_targets.size() && !active_decode_targets[dt]) {
+ continue;
+ }
+ int chain_idx = decode_target_protected_by_chain[dt];
+ RTC_DCHECK_LT(chain_idx, num_chains);
+ active_chains.set(chain_idx);
+ }
+ return active_chains;
+}
+
+} // namespace
+
+void ActiveDecodeTargetsHelper::OnFrame(
+ rtc::ArrayView<const int> decode_target_protected_by_chain,
+ std::bitset<32> active_decode_targets,
+ bool is_keyframe,
+ int64_t frame_id,
+ rtc::ArrayView<const int> chain_diffs) {
+ const int num_chains = chain_diffs.size();
+ if (num_chains == 0) {
+ // Avoid printing the warning
+ // when already printed the warning for the same active decode targets, or
+ // when active_decode_targets are not changed from it's default value of
+ // all are active, including non-existent decode targets.
+ if (last_active_decode_targets_ != active_decode_targets &&
+ !active_decode_targets.all()) {
+ RTC_LOG(LS_WARNING) << "No chains are configured, but some decode "
+ "targets might be inactive. Unsupported.";
+ }
+ last_active_decode_targets_ = active_decode_targets;
+ return;
+ }
+ const size_t num_decode_targets = decode_target_protected_by_chain.size();
+ RTC_DCHECK_GT(num_decode_targets, 0);
+ std::bitset<32> all_decode_targets = AllActive(num_decode_targets);
+ // Default value for active_decode_targets is 'all are active', i.e. all bits
+ // are set. Default value is set before number of decode targets is known.
+ // It is up to this helper to make the value cleaner and unset unused bits.
+ active_decode_targets &= all_decode_targets;
+
+ if (is_keyframe) {
+ // Key frame resets the state.
+ last_active_decode_targets_ = all_decode_targets;
+ last_active_chains_ = AllActive(num_chains);
+ unsent_on_chain_.reset();
+ } else {
+ // Update state assuming previous frame was sent.
+ unsent_on_chain_ &=
+ ~LastSendOnChain(frame_id - last_frame_id_, chain_diffs);
+ }
+ // Save for the next call to OnFrame.
+ // Though usually `frame_id == last_frame_id_ + 1`, it might not be so when
+ // frame id space is shared by several simulcast rtp streams.
+ last_frame_id_ = frame_id;
+
+ if (active_decode_targets == last_active_decode_targets_) {
+ return;
+ }
+ last_active_decode_targets_ = active_decode_targets;
+
+ if (active_decode_targets.none()) {
+ RTC_LOG(LS_ERROR) << "It is invalid to produce a frame (" << frame_id
+ << ") while there are no active decode targets";
+ return;
+ }
+ last_active_chains_ = ActiveChains(decode_target_protected_by_chain,
+ num_chains, active_decode_targets);
+ // Frames that are part of inactive chains might not be produced by the
+ // encoder. Thus stop sending `active_decode_target` bitmask when it is sent
+ // on all active chains rather than on all chains.
+ unsent_on_chain_ = last_active_chains_;
+ RTC_DCHECK(!unsent_on_chain_.none());
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/active_decode_targets_helper.h b/modules/rtp_rtcp/source/active_decode_targets_helper.h
new file mode 100644
index 0000000000..13755e8d80
--- /dev/null
+++ b/modules/rtp_rtcp/source/active_decode_targets_helper.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_ACTIVE_DECODE_TARGETS_HELPER_H_
+#define MODULES_RTP_RTCP_SOURCE_ACTIVE_DECODE_TARGETS_HELPER_H_
+
+#include <stdint.h>
+
+#include <bitset>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+
+namespace webrtc {
+
+// Helper class that decides when active_decode_target_bitmask should be written
+// into the dependency descriptor rtp header extension.
+// See: https://aomediacodec.github.io/av1-rtp-spec/#a44-switching
+// This class is thread-compatible
+class ActiveDecodeTargetsHelper {
+ public:
+ ActiveDecodeTargetsHelper() = default;
+ ActiveDecodeTargetsHelper(const ActiveDecodeTargetsHelper&) = delete;
+ ActiveDecodeTargetsHelper& operator=(const ActiveDecodeTargetsHelper&) =
+ delete;
+ ~ActiveDecodeTargetsHelper() = default;
+
+ // Decides if active decode target bitmask should be attached to the frame
+ // that is about to be sent.
+ void OnFrame(rtc::ArrayView<const int> decode_target_protected_by_chain,
+ std::bitset<32> active_decode_targets,
+ bool is_keyframe,
+ int64_t frame_id,
+ rtc::ArrayView<const int> chain_diffs);
+
+ // Returns active decode target to attach to the dependency descriptor.
+ absl::optional<uint32_t> ActiveDecodeTargetsBitmask() const {
+ if (unsent_on_chain_.none())
+ return absl::nullopt;
+ return last_active_decode_targets_.to_ulong();
+ }
+
+ std::bitset<32> ActiveChainsBitmask() const { return last_active_chains_; }
+
+ private:
+ // `unsent_on_chain_[i]` indicates last active decode
+ // target bitmask wasn't attached to a packet on the chain with id `i`.
+ std::bitset<32> unsent_on_chain_ = 0;
+ std::bitset<32> last_active_decode_targets_ = 0;
+ std::bitset<32> last_active_chains_ = 0;
+ int64_t last_frame_id_ = 0;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_ACTIVE_DECODE_TARGETS_HELPER_H_
diff --git a/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc b/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc
new file mode 100644
index 0000000000..6f64fd1418
--- /dev/null
+++ b/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc
@@ -0,0 +1,272 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/active_decode_targets_helper.h"
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+constexpr std::bitset<32> kAll = ~uint32_t{0};
+} // namespace
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsNulloptOnKeyFrameWhenAllDecodeTargetsAreActive) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b11,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsNulloptOnKeyFrameWhenAllDecodeTargetsAreActiveAfterDeltaFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b11,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u);
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b11,
+ /*is_keyframe=*/true, /*frame_id=*/3, chain_diffs_key);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsBitmaskOnKeyFrameWhenSomeDecodeTargetsAreInactive) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsBitmaskOnKeyFrameWhenSomeDecodeTargetsAreInactiveAfterDeltaFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/3, chain_diffs_key);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsNulloptWhenActiveDecodeTargetsAreUnused) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsNulloptOnDeltaFrameAfterSentOnKeyFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, ReturnsNewBitmaskOnDeltaFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b11,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsBitmaskWhenAllDecodeTargetsReactivatedOnDeltaFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ ASSERT_NE(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+
+ // Reactive all the decode targets
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/false, /*frame_id=*/3, chain_diffs_delta);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b11u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, ReturnsNulloptAfterSentOnAllActiveChains) {
+ // Active decode targets (0 and 1) are protected by chains 1 and 2.
+ const std::bitset<32> kSome = 0b011;
+ constexpr int kDecodeTargetProtectedByChain[] = {2, 1, 0};
+
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0, 0, 0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b111,
+ /*is_keyframe=*/true,
+ /*frame_id=*/0, chain_diffs_key);
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+
+ int chain_diffs_delta1[] = {1, 1, 1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kSome,
+ /*is_keyframe=*/false,
+ /*frame_id=*/1, chain_diffs_delta1);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b011u);
+
+ int chain_diffs_delta2[] = {2, 2, 1}; // Previous frame was part of chain#2
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kSome,
+ /*is_keyframe=*/false,
+ /*frame_id=*/2, chain_diffs_delta2);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b011u);
+
+ // active_decode_targets_bitmask was send on chains 1 and 2. It was never sent
+ // on chain 0, but chain 0 only protects inactive decode target#2
+ int chain_diffs_delta3[] = {3, 1, 2}; // Previous frame was part of chain#1
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kSome,
+ /*is_keyframe=*/false,
+ /*frame_id=*/3, chain_diffs_delta3);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, ReturnsBitmaskWhenChanged) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 1, 1};
+
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0, 0};
+ helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b111,
+ /*is_keyframe=*/true,
+ /*frame_id=*/0, chain_diffs_key);
+ int chain_diffs_delta1[] = {1, 1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b011,
+ /*is_keyframe=*/false,
+ /*frame_id=*/1, chain_diffs_delta1);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b011u);
+
+ int chain_diffs_delta2[] = {1, 2};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b101,
+ /*is_keyframe=*/false,
+ /*frame_id=*/2, chain_diffs_delta2);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b101u);
+
+ // active_decode_target_bitmask was send on chain0, but it was an old one.
+ int chain_diffs_delta3[] = {2, 1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b101,
+ /*is_keyframe=*/false,
+ /*frame_id=*/3, chain_diffs_delta3);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b101u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, ReturnsNulloptWhenChainsAreNotUsed) {
+ const rtc::ArrayView<const int> kDecodeTargetProtectedByChain;
+ const rtc::ArrayView<const int> kNoChainDiffs;
+
+ ActiveDecodeTargetsHelper helper;
+ helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/true,
+ /*frame_id=*/0, kNoChainDiffs);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b101,
+ /*is_keyframe=*/false,
+ /*frame_id=*/1, kNoChainDiffs);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, Supports32DecodeTargets) {
+ std::bitset<32> some;
+ std::vector<int> decode_target_protected_by_chain(32);
+ for (int i = 0; i < 32; ++i) {
+ decode_target_protected_by_chain[i] = i;
+ some[i] = i % 2 == 0;
+ }
+
+ ActiveDecodeTargetsHelper helper;
+ std::vector<int> chain_diffs_key(32, 0);
+ helper.OnFrame(decode_target_protected_by_chain,
+ /*active_decode_targets=*/some,
+ /*is_keyframe=*/true,
+ /*frame_id=*/1, chain_diffs_key);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), some.to_ulong());
+ std::vector<int> chain_diffs_delta(32, 1);
+ helper.OnFrame(decode_target_protected_by_chain,
+ /*active_decode_targets=*/some,
+ /*is_keyframe=*/false,
+ /*frame_id=*/2, chain_diffs_delta);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+ helper.OnFrame(decode_target_protected_by_chain,
+ /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/false,
+ /*frame_id=*/2, chain_diffs_delta);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), kAll.to_ulong());
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc
new file mode 100644
index 0000000000..ba091cefb0
--- /dev/null
+++ b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc
@@ -0,0 +1,472 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h"
+
+#include <limits>
+#include <memory>
+#include <utility>
+
+#include "absl/strings/match.h"
+#include "api/transport/field_trial_based_config.h"
+#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h"
+#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+constexpr uint32_t kTimestampTicksPerMs = 90;
+constexpr int kSendSideDelayWindowMs = 1000;
+constexpr int kBitrateStatisticsWindowMs = 1000;
+constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13;
+
+bool IsEnabled(absl::string_view name,
+ const WebRtcKeyValueConfig* field_trials) {
+ FieldTrialBasedConfig default_trials;
+ auto& trials = field_trials ? *field_trials : default_trials;
+ return absl::StartsWith(trials.Lookup(name), "Enabled");
+}
+} // namespace
+
+DEPRECATED_RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender(
+ DEPRECATED_RtpSenderEgress* sender)
+ : transport_sequence_number_(0), sender_(sender) {}
+DEPRECATED_RtpSenderEgress::NonPacedPacketSender::~NonPacedPacketSender() =
+ default;
+
+void DEPRECATED_RtpSenderEgress::NonPacedPacketSender::EnqueuePackets(
+ std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ for (auto& packet : packets) {
+ if (!packet->SetExtension<TransportSequenceNumber>(
+ ++transport_sequence_number_)) {
+ --transport_sequence_number_;
+ }
+ packet->ReserveExtension<TransmissionOffset>();
+ packet->ReserveExtension<AbsoluteSendTime>();
+ sender_->SendPacket(packet.get(), PacedPacketInfo());
+ }
+}
+
+DEPRECATED_RtpSenderEgress::DEPRECATED_RtpSenderEgress(
+ const RtpRtcpInterface::Configuration& config,
+ RtpPacketHistory* packet_history)
+ : ssrc_(config.local_media_ssrc),
+ rtx_ssrc_(config.rtx_send_ssrc),
+ flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc()
+ : absl::nullopt),
+ populate_network2_timestamp_(config.populate_network2_timestamp),
+ send_side_bwe_with_overhead_(
+ IsEnabled("WebRTC-SendSideBwe-WithOverhead", config.field_trials)),
+ clock_(config.clock),
+ packet_history_(packet_history),
+ transport_(config.outgoing_transport),
+ event_log_(config.event_log),
+ is_audio_(config.audio),
+ need_rtp_packet_infos_(config.need_rtp_packet_infos),
+ transport_feedback_observer_(config.transport_feedback_callback),
+ send_side_delay_observer_(config.send_side_delay_observer),
+ send_packet_observer_(config.send_packet_observer),
+ rtp_stats_callback_(config.rtp_stats_callback),
+ bitrate_callback_(config.send_bitrate_observer),
+ media_has_been_sent_(false),
+ force_part_of_allocation_(false),
+ timestamp_offset_(0),
+ max_delay_it_(send_delays_.end()),
+ sum_delays_ms_(0),
+ total_packet_send_delay_ms_(0),
+ send_rates_(kNumMediaTypes,
+ {kBitrateStatisticsWindowMs, RateStatistics::kBpsScale}),
+ rtp_sequence_number_map_(need_rtp_packet_infos_
+ ? std::make_unique<RtpSequenceNumberMap>(
+ kRtpSequenceNumberMapMaxEntries)
+ : nullptr) {}
+
+void DEPRECATED_RtpSenderEgress::SendPacket(
+ RtpPacketToSend* packet,
+ const PacedPacketInfo& pacing_info) {
+ RTC_DCHECK(packet);
+
+ const uint32_t packet_ssrc = packet->Ssrc();
+ RTC_DCHECK(packet->packet_type().has_value());
+ RTC_DCHECK(HasCorrectSsrc(*packet));
+ int64_t now_ms = clock_->TimeInMilliseconds();
+
+ if (is_audio_) {
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioTotBitrate_kbps", now_ms,
+ GetSendRates().Sum().kbps(), packet_ssrc);
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(
+ 1, "AudioNackBitrate_kbps", now_ms,
+ GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(),
+ packet_ssrc);
+#endif
+ } else {
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoTotBitrate_kbps", now_ms,
+ GetSendRates().Sum().kbps(), packet_ssrc);
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(
+ 1, "VideoNackBitrate_kbps", now_ms,
+ GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(),
+ packet_ssrc);
+#endif
+ }
+
+ PacketOptions options;
+ {
+ MutexLock lock(&lock_);
+ options.included_in_allocation = force_part_of_allocation_;
+
+ if (need_rtp_packet_infos_ &&
+ packet->packet_type() == RtpPacketToSend::Type::kVideo) {
+ RTC_DCHECK(rtp_sequence_number_map_);
+ // Last packet of a frame, add it to sequence number info map.
+ const uint32_t timestamp = packet->Timestamp() - timestamp_offset_;
+ bool is_first_packet_of_frame = packet->is_first_packet_of_frame();
+ bool is_last_packet_of_frame = packet->Marker();
+
+ rtp_sequence_number_map_->InsertPacket(
+ packet->SequenceNumber(),
+ RtpSequenceNumberMap::Info(timestamp, is_first_packet_of_frame,
+ is_last_packet_of_frame));
+ }
+ }
+
+ // Bug webrtc:7859. While FEC is invoked from rtp_sender_video, and not after
+ // the pacer, these modifications of the header below are happening after the
+ // FEC protection packets are calculated. This will corrupt recovered packets
+ // at the same place. It's not an issue for extensions, which are present in
+ // all the packets (their content just may be incorrect on recovered packets).
+ // In case of VideoTimingExtension, since it's present not in every packet,
+ // data after rtp header may be corrupted if these packets are protected by
+ // the FEC.
+ int64_t diff_ms = now_ms - packet->capture_time_ms();
+ if (packet->HasExtension<TransmissionOffset>()) {
+ packet->SetExtension<TransmissionOffset>(kTimestampTicksPerMs * diff_ms);
+ }
+ if (packet->HasExtension<AbsoluteSendTime>()) {
+ packet->SetExtension<AbsoluteSendTime>(
+ AbsoluteSendTime::MsTo24Bits(now_ms));
+ }
+
+ if (packet->HasExtension<VideoTimingExtension>()) {
+ if (populate_network2_timestamp_) {
+ packet->set_network2_time_ms(now_ms);
+ } else {
+ packet->set_pacer_exit_time_ms(now_ms);
+ }
+ }
+
+ const bool is_media = packet->packet_type() == RtpPacketMediaType::kAudio ||
+ packet->packet_type() == RtpPacketMediaType::kVideo;
+
+ // Downstream code actually uses this flag to distinguish between media and
+ // everything else.
+ options.is_retransmit = !is_media;
+ if (auto packet_id = packet->GetExtension<TransportSequenceNumber>()) {
+ options.packet_id = *packet_id;
+ options.included_in_feedback = true;
+ options.included_in_allocation = true;
+ AddPacketToTransportFeedback(*packet_id, *packet, pacing_info);
+ }
+
+ options.application_data.assign(packet->application_data().begin(),
+ packet->application_data().end());
+
+ if (packet->packet_type() != RtpPacketMediaType::kPadding &&
+ packet->packet_type() != RtpPacketMediaType::kRetransmission) {
+ UpdateDelayStatistics(packet->capture_time_ms(), now_ms, packet_ssrc);
+ UpdateOnSendPacket(options.packet_id, packet->capture_time_ms(),
+ packet_ssrc);
+ }
+
+ const bool send_success = SendPacketToNetwork(*packet, options, pacing_info);
+
+ // Put packet in retransmission history or update pending status even if
+ // actual sending fails.
+ if (is_media && packet->allow_retransmission()) {
+ packet_history_->PutRtpPacket(std::make_unique<RtpPacketToSend>(*packet),
+ now_ms);
+ } else if (packet->retransmitted_sequence_number()) {
+ packet_history_->MarkPacketAsSent(*packet->retransmitted_sequence_number());
+ }
+
+ if (send_success) {
+ MutexLock lock(&lock_);
+ UpdateRtpStats(*packet);
+ media_has_been_sent_ = true;
+ }
+}
+
+void DEPRECATED_RtpSenderEgress::ProcessBitrateAndNotifyObservers() {
+ if (!bitrate_callback_)
+ return;
+
+ MutexLock lock(&lock_);
+ RtpSendRates send_rates = GetSendRatesLocked();
+ bitrate_callback_->Notify(
+ send_rates.Sum().bps(),
+ send_rates[RtpPacketMediaType::kRetransmission].bps(), ssrc_);
+}
+
+RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRates() const {
+ MutexLock lock(&lock_);
+ return GetSendRatesLocked();
+}
+
+RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRatesLocked() const {
+ const int64_t now_ms = clock_->TimeInMilliseconds();
+ RtpSendRates current_rates;
+ for (size_t i = 0; i < kNumMediaTypes; ++i) {
+ RtpPacketMediaType type = static_cast<RtpPacketMediaType>(i);
+ current_rates[type] =
+ DataRate::BitsPerSec(send_rates_[i].Rate(now_ms).value_or(0));
+ }
+ return current_rates;
+}
+
+void DEPRECATED_RtpSenderEgress::GetDataCounters(
+ StreamDataCounters* rtp_stats,
+ StreamDataCounters* rtx_stats) const {
+ MutexLock lock(&lock_);
+ *rtp_stats = rtp_stats_;
+ *rtx_stats = rtx_rtp_stats_;
+}
+
+void DEPRECATED_RtpSenderEgress::ForceIncludeSendPacketsInAllocation(
+ bool part_of_allocation) {
+ MutexLock lock(&lock_);
+ force_part_of_allocation_ = part_of_allocation;
+}
+
+bool DEPRECATED_RtpSenderEgress::MediaHasBeenSent() const {
+ MutexLock lock(&lock_);
+ return media_has_been_sent_;
+}
+
+void DEPRECATED_RtpSenderEgress::SetMediaHasBeenSent(bool media_sent) {
+ MutexLock lock(&lock_);
+ media_has_been_sent_ = media_sent;
+}
+
+void DEPRECATED_RtpSenderEgress::SetTimestampOffset(uint32_t timestamp) {
+ MutexLock lock(&lock_);
+ timestamp_offset_ = timestamp;
+}
+
+std::vector<RtpSequenceNumberMap::Info>
+DEPRECATED_RtpSenderEgress::GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const {
+ RTC_DCHECK(!sequence_numbers.empty());
+ if (!need_rtp_packet_infos_) {
+ return std::vector<RtpSequenceNumberMap::Info>();
+ }
+
+ std::vector<RtpSequenceNumberMap::Info> results;
+ results.reserve(sequence_numbers.size());
+
+ MutexLock lock(&lock_);
+ for (uint16_t sequence_number : sequence_numbers) {
+ const auto& info = rtp_sequence_number_map_->Get(sequence_number);
+ if (!info) {
+ // The empty vector will be returned. We can delay the clearing
+ // of the vector until after we exit the critical section.
+ return std::vector<RtpSequenceNumberMap::Info>();
+ }
+ results.push_back(*info);
+ }
+
+ return results;
+}
+
+bool DEPRECATED_RtpSenderEgress::HasCorrectSsrc(
+ const RtpPacketToSend& packet) const {
+ switch (*packet.packet_type()) {
+ case RtpPacketMediaType::kAudio:
+ case RtpPacketMediaType::kVideo:
+ return packet.Ssrc() == ssrc_;
+ case RtpPacketMediaType::kRetransmission:
+ case RtpPacketMediaType::kPadding:
+ // Both padding and retransmission must be on either the media or the
+ // RTX stream.
+ return packet.Ssrc() == rtx_ssrc_ || packet.Ssrc() == ssrc_;
+ case RtpPacketMediaType::kForwardErrorCorrection:
+ // FlexFEC is on separate SSRC, ULPFEC uses media SSRC.
+ return packet.Ssrc() == ssrc_ || packet.Ssrc() == flexfec_ssrc_;
+ }
+ return false;
+}
+
+void DEPRECATED_RtpSenderEgress::AddPacketToTransportFeedback(
+ uint16_t packet_id,
+ const RtpPacketToSend& packet,
+ const PacedPacketInfo& pacing_info) {
+ if (transport_feedback_observer_) {
+ size_t packet_size = packet.payload_size() + packet.padding_size();
+ if (send_side_bwe_with_overhead_) {
+ packet_size = packet.size();
+ }
+
+ RtpPacketSendInfo packet_info;
+ packet_info.ssrc = ssrc_;
+ packet_info.transport_sequence_number = packet_id;
+ packet_info.rtp_sequence_number = packet.SequenceNumber();
+ packet_info.length = packet_size;
+ packet_info.pacing_info = pacing_info;
+ packet_info.packet_type = packet.packet_type();
+ transport_feedback_observer_->OnAddPacket(packet_info);
+ }
+}
+
+void DEPRECATED_RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms,
+ int64_t now_ms,
+ uint32_t ssrc) {
+ if (!send_side_delay_observer_ || capture_time_ms <= 0)
+ return;
+
+ int avg_delay_ms = 0;
+ int max_delay_ms = 0;
+ uint64_t total_packet_send_delay_ms = 0;
+ {
+ MutexLock lock(&lock_);
+ // Compute the max and average of the recent capture-to-send delays.
+ // The time complexity of the current approach depends on the distribution
+ // of the delay values. This could be done more efficiently.
+
+ // Remove elements older than kSendSideDelayWindowMs.
+ auto lower_bound =
+ send_delays_.lower_bound(now_ms - kSendSideDelayWindowMs);
+ for (auto it = send_delays_.begin(); it != lower_bound; ++it) {
+ if (max_delay_it_ == it) {
+ max_delay_it_ = send_delays_.end();
+ }
+ sum_delays_ms_ -= it->second;
+ }
+ send_delays_.erase(send_delays_.begin(), lower_bound);
+ if (max_delay_it_ == send_delays_.end()) {
+ // Removed the previous max. Need to recompute.
+ RecomputeMaxSendDelay();
+ }
+
+ // Add the new element.
+ RTC_DCHECK_GE(now_ms, 0);
+ RTC_DCHECK_LE(now_ms, std::numeric_limits<int64_t>::max() / 2);
+ RTC_DCHECK_GE(capture_time_ms, 0);
+ RTC_DCHECK_LE(capture_time_ms, std::numeric_limits<int64_t>::max() / 2);
+ int64_t diff_ms = now_ms - capture_time_ms;
+ RTC_DCHECK_GE(diff_ms, static_cast<int64_t>(0));
+ RTC_DCHECK_LE(diff_ms, std::numeric_limits<int>::max());
+ int new_send_delay = rtc::dchecked_cast<int>(now_ms - capture_time_ms);
+ SendDelayMap::iterator it;
+ bool inserted;
+ std::tie(it, inserted) =
+ send_delays_.insert(std::make_pair(now_ms, new_send_delay));
+ if (!inserted) {
+ // TODO(terelius): If we have multiple delay measurements during the same
+ // millisecond then we keep the most recent one. It is not clear that this
+ // is the right decision, but it preserves an earlier behavior.
+ int previous_send_delay = it->second;
+ sum_delays_ms_ -= previous_send_delay;
+ it->second = new_send_delay;
+ if (max_delay_it_ == it && new_send_delay < previous_send_delay) {
+ RecomputeMaxSendDelay();
+ }
+ }
+ if (max_delay_it_ == send_delays_.end() ||
+ it->second >= max_delay_it_->second) {
+ max_delay_it_ = it;
+ }
+ sum_delays_ms_ += new_send_delay;
+ total_packet_send_delay_ms_ += new_send_delay;
+ total_packet_send_delay_ms = total_packet_send_delay_ms_;
+
+ size_t num_delays = send_delays_.size();
+ RTC_DCHECK(max_delay_it_ != send_delays_.end());
+ max_delay_ms = rtc::dchecked_cast<int>(max_delay_it_->second);
+ int64_t avg_ms = (sum_delays_ms_ + num_delays / 2) / num_delays;
+ RTC_DCHECK_GE(avg_ms, static_cast<int64_t>(0));
+ RTC_DCHECK_LE(avg_ms,
+ static_cast<int64_t>(std::numeric_limits<int>::max()));
+ avg_delay_ms =
+ rtc::dchecked_cast<int>((sum_delays_ms_ + num_delays / 2) / num_delays);
+ }
+ send_side_delay_observer_->SendSideDelayUpdated(
+ avg_delay_ms, max_delay_ms, total_packet_send_delay_ms, ssrc);
+}
+
+void DEPRECATED_RtpSenderEgress::RecomputeMaxSendDelay() {
+ max_delay_it_ = send_delays_.begin();
+ for (auto it = send_delays_.begin(); it != send_delays_.end(); ++it) {
+ if (it->second >= max_delay_it_->second) {
+ max_delay_it_ = it;
+ }
+ }
+}
+
+void DEPRECATED_RtpSenderEgress::UpdateOnSendPacket(int packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc) {
+ if (!send_packet_observer_ || capture_time_ms <= 0 || packet_id == -1) {
+ return;
+ }
+
+ send_packet_observer_->OnSendPacket(packet_id, capture_time_ms, ssrc);
+}
+
+bool DEPRECATED_RtpSenderEgress::SendPacketToNetwork(
+ const RtpPacketToSend& packet,
+ const PacketOptions& options,
+ const PacedPacketInfo& pacing_info) {
+ int bytes_sent = -1;
+ if (transport_) {
+ bytes_sent = transport_->SendRtp(packet.data(), packet.size(), options)
+ ? static_cast<int>(packet.size())
+ : -1;
+ if (event_log_ && bytes_sent > 0) {
+ event_log_->Log(std::make_unique<RtcEventRtpPacketOutgoing>(
+ packet, pacing_info.probe_cluster_id));
+ }
+ }
+
+ if (bytes_sent <= 0) {
+ RTC_LOG(LS_WARNING) << "Transport failed to send packet.";
+ return false;
+ }
+ return true;
+}
+
+void DEPRECATED_RtpSenderEgress::UpdateRtpStats(const RtpPacketToSend& packet) {
+ int64_t now_ms = clock_->TimeInMilliseconds();
+
+ StreamDataCounters* counters =
+ packet.Ssrc() == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_;
+
+ if (counters->first_packet_time_ms == -1) {
+ counters->first_packet_time_ms = now_ms;
+ }
+
+ if (packet.packet_type() == RtpPacketMediaType::kForwardErrorCorrection) {
+ counters->fec.AddPacket(packet);
+ }
+
+ if (packet.packet_type() == RtpPacketMediaType::kRetransmission) {
+ counters->retransmitted.AddPacket(packet);
+ }
+ counters->transmitted.AddPacket(packet);
+
+ RTC_DCHECK(packet.packet_type().has_value());
+ send_rates_[static_cast<size_t>(*packet.packet_type())].Update(packet.size(),
+ now_ms);
+
+ if (rtp_stats_callback_) {
+ rtp_stats_callback_->DataCountersUpdated(*counters, packet.Ssrc());
+ }
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h
new file mode 100644
index 0000000000..742e7d5499
--- /dev/null
+++ b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_
+#define MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/call/transport.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/units/data_rate.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtp_packet_history.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
+#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
+#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class DEPRECATED_RtpSenderEgress {
+ public:
+ // Helper class that redirects packets directly to the send part of this class
+ // without passing through an actual paced sender.
+ class NonPacedPacketSender : public RtpPacketSender {
+ public:
+ explicit NonPacedPacketSender(DEPRECATED_RtpSenderEgress* sender);
+ virtual ~NonPacedPacketSender();
+
+ void EnqueuePackets(
+ std::vector<std::unique_ptr<RtpPacketToSend>> packets) override;
+
+ private:
+ uint16_t transport_sequence_number_;
+ DEPRECATED_RtpSenderEgress* const sender_;
+ };
+
+ DEPRECATED_RtpSenderEgress(const RtpRtcpInterface::Configuration& config,
+ RtpPacketHistory* packet_history);
+ ~DEPRECATED_RtpSenderEgress() = default;
+
+ void SendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info)
+ RTC_LOCKS_EXCLUDED(lock_);
+ uint32_t Ssrc() const { return ssrc_; }
+ absl::optional<uint32_t> RtxSsrc() const { return rtx_ssrc_; }
+ absl::optional<uint32_t> FlexFecSsrc() const { return flexfec_ssrc_; }
+
+ void ProcessBitrateAndNotifyObservers() RTC_LOCKS_EXCLUDED(lock_);
+ RtpSendRates GetSendRates() const RTC_LOCKS_EXCLUDED(lock_);
+ void GetDataCounters(StreamDataCounters* rtp_stats,
+ StreamDataCounters* rtx_stats) const
+ RTC_LOCKS_EXCLUDED(lock_);
+
+ void ForceIncludeSendPacketsInAllocation(bool part_of_allocation)
+ RTC_LOCKS_EXCLUDED(lock_);
+ bool MediaHasBeenSent() const RTC_LOCKS_EXCLUDED(lock_);
+ void SetMediaHasBeenSent(bool media_sent) RTC_LOCKS_EXCLUDED(lock_);
+ void SetTimestampOffset(uint32_t timestamp) RTC_LOCKS_EXCLUDED(lock_);
+
+ // For each sequence number in |sequence_number|, recall the last RTP packet
+ // which bore it - its timestamp and whether it was the first and/or last
+ // packet in that frame. If all of the given sequence numbers could be
+ // recalled, return a vector with all of them (in corresponding order).
+ // If any could not be recalled, return an empty vector.
+ std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const
+ RTC_LOCKS_EXCLUDED(lock_);
+
+ private:
+ // Maps capture time in milliseconds to send-side delay in milliseconds.
+ // Send-side delay is the difference between transmission time and capture
+ // time.
+ typedef std::map<int64_t, int> SendDelayMap;
+
+ RtpSendRates GetSendRatesLocked() const RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+ bool HasCorrectSsrc(const RtpPacketToSend& packet) const;
+ void AddPacketToTransportFeedback(uint16_t packet_id,
+ const RtpPacketToSend& packet,
+ const PacedPacketInfo& pacing_info);
+ void UpdateDelayStatistics(int64_t capture_time_ms,
+ int64_t now_ms,
+ uint32_t ssrc);
+ void RecomputeMaxSendDelay() RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+ void UpdateOnSendPacket(int packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc);
+ // Sends packet on to |transport_|, leaving the RTP module.
+ bool SendPacketToNetwork(const RtpPacketToSend& packet,
+ const PacketOptions& options,
+ const PacedPacketInfo& pacing_info);
+ void UpdateRtpStats(const RtpPacketToSend& packet)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+
+ const uint32_t ssrc_;
+ const absl::optional<uint32_t> rtx_ssrc_;
+ const absl::optional<uint32_t> flexfec_ssrc_;
+ const bool populate_network2_timestamp_;
+ const bool send_side_bwe_with_overhead_;
+ Clock* const clock_;
+ RtpPacketHistory* const packet_history_;
+ Transport* const transport_;
+ RtcEventLog* const event_log_;
+ const bool is_audio_;
+ const bool need_rtp_packet_infos_;
+
+ TransportFeedbackObserver* const transport_feedback_observer_;
+ SendSideDelayObserver* const send_side_delay_observer_;
+ SendPacketObserver* const send_packet_observer_;
+ StreamDataCountersCallback* const rtp_stats_callback_;
+ BitrateStatisticsObserver* const bitrate_callback_;
+
+ mutable Mutex lock_;
+ bool media_has_been_sent_ RTC_GUARDED_BY(lock_);
+ bool force_part_of_allocation_ RTC_GUARDED_BY(lock_);
+ uint32_t timestamp_offset_ RTC_GUARDED_BY(lock_);
+
+ SendDelayMap send_delays_ RTC_GUARDED_BY(lock_);
+ SendDelayMap::const_iterator max_delay_it_ RTC_GUARDED_BY(lock_);
+ // The sum of delays over a kSendSideDelayWindowMs sliding window.
+ int64_t sum_delays_ms_ RTC_GUARDED_BY(lock_);
+ uint64_t total_packet_send_delay_ms_ RTC_GUARDED_BY(lock_);
+ StreamDataCounters rtp_stats_ RTC_GUARDED_BY(lock_);
+ StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(lock_);
+ // One element per value in RtpPacketMediaType, with index matching value.
+ std::vector<RateStatistics> send_rates_ RTC_GUARDED_BY(lock_);
+
+ // Maps sent packets' sequence numbers to a tuple consisting of:
+ // 1. The timestamp, without the randomizing offset mandated by the RFC.
+ // 2. Whether the packet was the first in its frame.
+ // 3. Whether the packet was the last in its frame.
+ const std::unique_ptr<RtpSequenceNumberMap> rtp_sequence_number_map_
+ RTC_GUARDED_BY(lock_);
+};
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_
diff --git a/modules/rtp_rtcp/source/dtmf_queue.cc b/modules/rtp_rtcp/source/dtmf_queue.cc
index 10e674789a..df06d2a2f3 100644
--- a/modules/rtp_rtcp/source/dtmf_queue.cc
+++ b/modules/rtp_rtcp/source/dtmf_queue.cc
@@ -24,7 +24,7 @@ DtmfQueue::DtmfQueue() {}
DtmfQueue::~DtmfQueue() {}
bool DtmfQueue::AddDtmf(const Event& event) {
- rtc::CritScope lock(&dtmf_critsect_);
+ MutexLock lock(&dtmf_mutex_);
if (queue_.size() >= kDtmfOutbandMax) {
return false;
}
@@ -34,7 +34,7 @@ bool DtmfQueue::AddDtmf(const Event& event) {
bool DtmfQueue::NextDtmf(Event* event) {
RTC_DCHECK(event);
- rtc::CritScope lock(&dtmf_critsect_);
+ MutexLock lock(&dtmf_mutex_);
if (queue_.empty()) {
return false;
}
@@ -45,7 +45,7 @@ bool DtmfQueue::NextDtmf(Event* event) {
}
bool DtmfQueue::PendingDtmf() const {
- rtc::CritScope lock(&dtmf_critsect_);
+ MutexLock lock(&dtmf_mutex_);
return !queue_.empty();
}
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/dtmf_queue.h b/modules/rtp_rtcp/source/dtmf_queue.h
index adb93aa6fa..1d1867fd27 100644
--- a/modules/rtp_rtcp/source/dtmf_queue.h
+++ b/modules/rtp_rtcp/source/dtmf_queue.h
@@ -15,7 +15,7 @@
#include <list>
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
class DtmfQueue {
@@ -35,7 +35,7 @@ class DtmfQueue {
bool PendingDtmf() const;
private:
- rtc::CriticalSection dtmf_critsect_;
+ mutable Mutex dtmf_mutex_;
std::list<Event> queue_;
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/flexfec_sender.cc b/modules/rtp_rtcp/source/flexfec_sender.cc
index 16a6f2603c..f1fe71d198 100644
--- a/modules/rtp_rtcp/source/flexfec_sender.cc
+++ b/modules/rtp_rtcp/source/flexfec_sender.cc
@@ -176,7 +176,7 @@ std::vector<std::unique_ptr<RtpPacketToSend>> FlexfecSender::GetFecPackets() {
last_generated_packet_ms_ = now_ms;
}
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
fec_bitrate_.Update(total_fec_data_bytes, now_ms);
return fec_packets_to_send;
@@ -188,7 +188,7 @@ size_t FlexfecSender::MaxPacketOverhead() const {
}
DataRate FlexfecSender::CurrentFecRate() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return DataRate::BitsPerSec(
fec_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0));
}
diff --git a/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/modules/rtp_rtcp/source/nack_rtx_unittest.cc
index 55e1e44ebe..8afaf3ee61 100644
--- a/modules/rtp_rtcp/source/nack_rtx_unittest.cc
+++ b/modules/rtp_rtcp/source/nack_rtx_unittest.cc
@@ -19,9 +19,9 @@
#include "call/rtp_stream_receiver_controller.h"
#include "call/rtx_receive_stream.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/rtp_sender_video.h"
#include "rtc_base/rate_limiter.h"
#include "test/gtest.h"
@@ -63,7 +63,9 @@ class RtxLoopBackTransport : public webrtc::Transport {
count_rtx_ssrc_(0),
module_(NULL) {}
- void SetSendModule(RtpRtcp* rtpRtcpModule) { module_ = rtpRtcpModule; }
+ void SetSendModule(RtpRtcpInterface* rtpRtcpModule) {
+ module_ = rtpRtcpModule;
+ }
void DropEveryNthPacket(int n) { packet_loss_ = n; }
@@ -109,7 +111,7 @@ class RtxLoopBackTransport : public webrtc::Transport {
int consecutive_drop_end_;
uint32_t rtx_ssrc_;
int count_rtx_ssrc_;
- RtpRtcp* module_;
+ RtpRtcpInterface* module_;
RtpStreamReceiverController stream_receiver_controller_;
std::set<uint16_t> expected_sequence_numbers_;
};
@@ -125,7 +127,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
~RtpRtcpRtxNackTest() override {}
void SetUp() override {
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.audio = false;
configuration.clock = &fake_clock;
receive_statistics_ = ReceiveStatistics::Create(&fake_clock);
@@ -134,7 +136,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
configuration.retransmission_rate_limiter = &retransmission_rate_limiter_;
configuration.local_media_ssrc = kTestSsrc;
configuration.rtx_send_ssrc = kTestRtxSsrc;
- rtp_rtcp_module_ = RtpRtcp::Create(configuration);
+ rtp_rtcp_module_ = ModuleRtpRtcpImpl2::Create(configuration);
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
video_config.clock = &fake_clock;
@@ -209,7 +211,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
EXPECT_TRUE(rtp_sender_video_->SendVideo(
kPayloadType, VideoCodecType::kVideoCodecGeneric, timestamp,
- timestamp / 90, payload_data, nullptr, video_header, 0));
+ timestamp / 90, payload_data, video_header, 0));
// Min required delay until retransmit = 5 + RTT ms (RTT = 0).
fake_clock.AdvanceTimeMilliseconds(5);
int length = BuildNackList(nack_list);
@@ -224,7 +226,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
}
std::unique_ptr<ReceiveStatistics> receive_statistics_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_module_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_module_;
std::unique_ptr<RTPSenderVideo> rtp_sender_video_;
RtxLoopBackTransport transport_;
const std::map<int, int> rtx_associated_payload_types_ = {
@@ -259,7 +261,7 @@ TEST_F(RtpRtcpRtxNackTest, LongNackList) {
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
EXPECT_TRUE(rtp_sender_video_->SendVideo(
kPayloadType, VideoCodecType::kVideoCodecGeneric, timestamp,
- timestamp / 90, payload_data, nullptr, video_header, 0));
+ timestamp / 90, payload_data, video_header, 0));
// Prepare next frame.
timestamp += 3000;
fake_clock.AdvanceTimeMilliseconds(33);
diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.cc b/modules/rtp_rtcp/source/receive_statistics_impl.cc
index 0c47e08b1e..6ec41a1eb0 100644
--- a/modules/rtp_rtcp/source/receive_statistics_impl.cc
+++ b/modules/rtp_rtcp/source/receive_statistics_impl.cc
@@ -100,7 +100,7 @@ bool StreamStatisticianImpl::UpdateOutOfOrder(const RtpPacketReceived& packet,
}
void StreamStatisticianImpl::UpdateCounters(const RtpPacketReceived& packet) {
- rtc::CritScope cs(&stream_lock_);
+ MutexLock lock(&stream_lock_);
RTC_DCHECK_EQ(ssrc_, packet.Ssrc());
int64_t now_ms = clock_->TimeInMilliseconds();
@@ -159,17 +159,17 @@ void StreamStatisticianImpl::UpdateJitter(const RtpPacketReceived& packet,
void StreamStatisticianImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
- rtc::CritScope cs(&stream_lock_);
+ MutexLock lock(&stream_lock_);
max_reordering_threshold_ = max_reordering_threshold;
}
void StreamStatisticianImpl::EnableRetransmitDetection(bool enable) {
- rtc::CritScope cs(&stream_lock_);
+ MutexLock lock(&stream_lock_);
enable_retransmit_detection_ = enable;
}
RtpReceiveStats StreamStatisticianImpl::GetStats() const {
- rtc::CritScope cs(&stream_lock_);
+ MutexLock lock(&stream_lock_);
RtpReceiveStats stats;
stats.packets_lost = cumulative_loss_;
// TODO(nisse): Can we return a float instead?
@@ -183,7 +183,7 @@ RtpReceiveStats StreamStatisticianImpl::GetStats() const {
bool StreamStatisticianImpl::GetActiveStatisticsAndReset(
RtcpStatistics* statistics) {
- rtc::CritScope cs(&stream_lock_);
+ MutexLock lock(&stream_lock_);
if (clock_->TimeInMilliseconds() - last_receive_time_ms_ >=
kStatisticsTimeoutMs) {
// Not active.
@@ -241,7 +241,7 @@ RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() {
}
absl::optional<int> StreamStatisticianImpl::GetFractionLostInPercent() const {
- rtc::CritScope cs(&stream_lock_);
+ MutexLock lock(&stream_lock_);
if (!ReceivedRtpPacket()) {
return absl::nullopt;
}
@@ -257,12 +257,12 @@ absl::optional<int> StreamStatisticianImpl::GetFractionLostInPercent() const {
StreamDataCounters StreamStatisticianImpl::GetReceiveStreamDataCounters()
const {
- rtc::CritScope cs(&stream_lock_);
+ MutexLock lock(&stream_lock_);
return receive_counters_;
}
uint32_t StreamStatisticianImpl::BitrateReceived() const {
- rtc::CritScope cs(&stream_lock_);
+ MutexLock lock(&stream_lock_);
return incoming_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0);
}
@@ -320,7 +320,7 @@ void ReceiveStatisticsImpl::OnRtpPacket(const RtpPacketReceived& packet) {
StreamStatisticianImpl* ReceiveStatisticsImpl::GetStatistician(
uint32_t ssrc) const {
- rtc::CritScope cs(&receive_statistics_lock_);
+ MutexLock lock(&receive_statistics_lock_);
const auto& it = statisticians_.find(ssrc);
if (it == statisticians_.end())
return NULL;
@@ -329,7 +329,7 @@ StreamStatisticianImpl* ReceiveStatisticsImpl::GetStatistician(
StreamStatisticianImpl* ReceiveStatisticsImpl::GetOrCreateStatistician(
uint32_t ssrc) {
- rtc::CritScope cs(&receive_statistics_lock_);
+ MutexLock lock(&receive_statistics_lock_);
StreamStatisticianImpl*& impl = statisticians_[ssrc];
if (impl == nullptr) { // new element
impl = new StreamStatisticianImpl(ssrc, clock_, max_reordering_threshold_);
@@ -341,7 +341,7 @@ void ReceiveStatisticsImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
std::map<uint32_t, StreamStatisticianImpl*> statisticians;
{
- rtc::CritScope cs(&receive_statistics_lock_);
+ MutexLock lock(&receive_statistics_lock_);
max_reordering_threshold_ = max_reordering_threshold;
statisticians = statisticians_;
}
@@ -366,7 +366,7 @@ std::vector<rtcp::ReportBlock> ReceiveStatisticsImpl::RtcpReportBlocks(
size_t max_blocks) {
std::map<uint32_t, StreamStatisticianImpl*> statisticians;
{
- rtc::CritScope cs(&receive_statistics_lock_);
+ MutexLock lock(&receive_statistics_lock_);
statisticians = statisticians_;
}
std::vector<rtcp::ReportBlock> result;
diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.h b/modules/rtp_rtcp/source/receive_statistics_impl.h
index e352ae8787..41830b0b48 100644
--- a/modules/rtp_rtcp/source/receive_statistics_impl.h
+++ b/modules/rtp_rtcp/source/receive_statistics_impl.h
@@ -18,8 +18,8 @@
#include "absl/types/optional.h"
#include "modules/include/module_common_types_public.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -65,7 +65,7 @@ class StreamStatisticianImpl : public StreamStatistician {
const uint32_t ssrc_;
Clock* const clock_;
- rtc::CriticalSection stream_lock_;
+ mutable Mutex stream_lock_;
RateStatistics incoming_bitrate_ RTC_GUARDED_BY(&stream_lock_);
// In number of packets or sequence numbers.
int max_reordering_threshold_ RTC_GUARDED_BY(&stream_lock_);
@@ -123,7 +123,7 @@ class ReceiveStatisticsImpl : public ReceiveStatistics {
StreamStatisticianImpl* GetOrCreateStatistician(uint32_t ssrc);
Clock* const clock_;
- rtc::CriticalSection receive_statistics_lock_;
+ mutable Mutex receive_statistics_lock_;
uint32_t last_returned_ssrc_;
int max_reordering_threshold_ RTC_GUARDED_BY(receive_statistics_lock_);
std::map<uint32_t, StreamStatisticianImpl*> statisticians_
diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc
index bfe2667684..1db5eeb550 100644
--- a/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -12,6 +12,7 @@
#include <string.h>
+#include <algorithm>
#include <limits>
#include <map>
#include <memory>
@@ -63,10 +64,11 @@ const int64_t kRtcpMinFrameLengthMs = 17;
// Maximum number of received RRTRs that will be stored.
const size_t kMaxNumberOfStoredRrtrs = 300;
-constexpr int32_t kDefaultVideoReportInterval = 1000;
-constexpr int32_t kDefaultAudioReportInterval = 5000;
+constexpr TimeDelta kDefaultVideoReportInterval = TimeDelta::Seconds(1);
+constexpr TimeDelta kDefaultAudioReportInterval = TimeDelta::Seconds(5);
-std::set<uint32_t> GetRegisteredSsrcs(const RtpRtcp::Configuration& config) {
+std::set<uint32_t> GetRegisteredSsrcs(
+ const RtpRtcpInterface::Configuration& config) {
std::set<uint32_t> ssrcs;
ssrcs.insert(config.local_media_ssrc);
if (config.rtx_send_ssrc) {
@@ -80,6 +82,22 @@ std::set<uint32_t> GetRegisteredSsrcs(const RtpRtcp::Configuration& config) {
}
return ssrcs;
}
+
+// Returns true if the |timestamp| has exceeded the |interval *
+// kRrTimeoutIntervals| period and was reset (set to PlusInfinity()). Returns
+// false if the timer was either already reset or if it has not expired.
+bool ResetTimestampIfExpired(const Timestamp now,
+ Timestamp& timestamp,
+ TimeDelta interval) {
+ if (timestamp.IsInfinite() ||
+ now <= timestamp + interval * kRrTimeoutIntervals) {
+ return false;
+ }
+
+ timestamp = Timestamp::PlusInfinity();
+ return true;
+}
+
} // namespace
struct RTCPReceiver::PacketInformation {
@@ -136,7 +154,7 @@ struct RTCPReceiver::LastFirStatus {
uint8_t sequence_number;
};
-RTCPReceiver::RTCPReceiver(const RtpRtcp::Configuration& config,
+RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config,
ModuleRtpRtcp* owner)
: clock_(config.clock),
receiver_only_(config.receiver_only),
@@ -149,18 +167,16 @@ RTCPReceiver::RTCPReceiver(const RtpRtcp::Configuration& config,
network_state_estimate_observer_(config.network_state_estimate_observer),
transport_feedback_observer_(config.transport_feedback_callback),
bitrate_allocation_observer_(config.bitrate_allocation_observer),
- report_interval_ms_(config.rtcp_report_interval_ms > 0
- ? config.rtcp_report_interval_ms
- : (config.audio ? kDefaultAudioReportInterval
- : kDefaultVideoReportInterval)),
+ report_interval_(config.rtcp_report_interval_ms > 0
+ ? TimeDelta::Millis(config.rtcp_report_interval_ms)
+ : (config.audio ? kDefaultAudioReportInterval
+ : kDefaultVideoReportInterval)),
// TODO(bugs.webrtc.org/10774): Remove fallback.
remote_ssrc_(0),
remote_sender_rtp_time_(0),
xr_rrtr_status_(false),
xr_rr_rtt_ms_(0),
oldest_tmmbr_info_ms_(0),
- last_received_rb_ms_(0),
- last_increased_sequence_number_ms_(0),
stats_callback_(config.rtcp_statistics_callback),
cname_callback_(config.rtcp_cname_callback),
report_block_data_observer_(config.report_block_data_observer),
@@ -184,20 +200,22 @@ void RTCPReceiver::IncomingPacket(rtc::ArrayView<const uint8_t> packet) {
TriggerCallbacksFromRtcpPacket(packet_information);
}
+// This method is only used by test and legacy code, so we should be able to
+// remove it soon.
int64_t RTCPReceiver::LastReceivedReportBlockMs() const {
- rtc::CritScope lock(&rtcp_receiver_lock_);
- return last_received_rb_ms_;
+ MutexLock lock(&rtcp_receiver_lock_);
+ return last_received_rb_.IsFinite() ? last_received_rb_.ms() : 0;
}
void RTCPReceiver::SetRemoteSSRC(uint32_t ssrc) {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
// New SSRC reset old reports.
last_received_sr_ntp_.Reset();
remote_ssrc_ = ssrc;
}
uint32_t RTCPReceiver::RemoteSSRC() const {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
return remote_ssrc_;
}
@@ -206,7 +224,7 @@ int32_t RTCPReceiver::RTT(uint32_t remote_ssrc,
int64_t* avg_rtt_ms,
int64_t* min_rtt_ms,
int64_t* max_rtt_ms) const {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
auto it = received_report_blocks_.find(main_ssrc_);
if (it == received_report_blocks_.end())
@@ -239,13 +257,13 @@ int32_t RTCPReceiver::RTT(uint32_t remote_ssrc,
}
void RTCPReceiver::SetRtcpXrRrtrStatus(bool enable) {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
xr_rrtr_status_ = enable;
}
bool RTCPReceiver::GetAndResetXrRrRtt(int64_t* rtt_ms) {
RTC_DCHECK(rtt_ms);
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
if (xr_rr_rtt_ms_ == 0) {
return false;
}
@@ -254,12 +272,66 @@ bool RTCPReceiver::GetAndResetXrRrRtt(int64_t* rtt_ms) {
return true;
}
+// Called regularly (1/sec) on the worker thread to do rtt calculations.
+absl::optional<TimeDelta> RTCPReceiver::OnPeriodicRttUpdate(
+ Timestamp newer_than,
+ bool sending) {
+ // Running on the worker thread (same as construction thread).
+ absl::optional<TimeDelta> rtt;
+
+ if (sending) {
+ // Check if we've received a report block within the last kRttUpdateInterval
+ // amount of time.
+ MutexLock lock(&rtcp_receiver_lock_);
+ if (last_received_rb_.IsInfinite() || last_received_rb_ > newer_than) {
+ // Stow away the report block for the main ssrc. We'll use the associated
+ // data map to look up each sender and check the last_rtt_ms().
+ auto main_report_it = received_report_blocks_.find(main_ssrc_);
+ if (main_report_it != received_report_blocks_.end()) {
+ const ReportBlockDataMap& main_data_map = main_report_it->second;
+ int64_t max_rtt = 0;
+ for (const auto& reports_per_receiver : received_report_blocks_) {
+ for (const auto& report : reports_per_receiver.second) {
+ const RTCPReportBlock& block = report.second.report_block();
+ auto it_info = main_data_map.find(block.sender_ssrc);
+ if (it_info != main_data_map.end()) {
+ const ReportBlockData* report_block_data = &it_info->second;
+ if (report_block_data->num_rtts() > 0) {
+ max_rtt = std::max(report_block_data->last_rtt_ms(), max_rtt);
+ }
+ }
+ }
+ }
+ if (max_rtt)
+ rtt.emplace(TimeDelta::Millis(max_rtt));
+ }
+ }
+
+ // Check for expired timers and if so, log and reset.
+ auto now = clock_->CurrentTime();
+ if (RtcpRrTimeoutLocked(now)) {
+ RTC_LOG_F(LS_WARNING) << "Timeout: No RTCP RR received.";
+ } else if (RtcpRrSequenceNumberTimeoutLocked(now)) {
+ RTC_LOG_F(LS_WARNING) << "Timeout: No increase in RTCP RR extended "
+ "highest sequence number.";
+ }
+ } else {
+ // Report rtt from receiver.
+ int64_t rtt_ms;
+ if (GetAndResetXrRrRtt(&rtt_ms)) {
+ rtt.emplace(TimeDelta::Millis(rtt_ms));
+ }
+ }
+
+ return rtt;
+}
+
bool RTCPReceiver::NTP(uint32_t* received_ntp_secs,
uint32_t* received_ntp_frac,
uint32_t* rtcp_arrival_time_secs,
uint32_t* rtcp_arrival_time_frac,
uint32_t* rtcp_timestamp) const {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
if (!last_received_sr_ntp_.Valid())
return false;
@@ -284,7 +356,7 @@ bool RTCPReceiver::NTP(uint32_t* received_ntp_secs,
std::vector<rtcp::ReceiveTimeInfo>
RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
const size_t last_xr_rtis_size = std::min(
received_rrtrs_.size(), rtcp::ExtendedReports::kMaxNumberOfDlrrItems);
@@ -309,7 +381,7 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() {
int32_t RTCPReceiver::StatisticsReceived(
std::vector<RTCPReportBlock>* receive_blocks) const {
RTC_DCHECK(receive_blocks);
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
for (const auto& reports_per_receiver : received_report_blocks_)
for (const auto& report : reports_per_receiver.second)
receive_blocks->push_back(report.second.report_block());
@@ -318,7 +390,7 @@ int32_t RTCPReceiver::StatisticsReceived(
std::vector<ReportBlockData> RTCPReceiver::GetLatestReportBlockData() const {
std::vector<ReportBlockData> result;
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
for (const auto& reports_per_receiver : received_report_blocks_)
for (const auto& report : reports_per_receiver.second)
result.push_back(report.second);
@@ -327,7 +399,7 @@ std::vector<ReportBlockData> RTCPReceiver::GetLatestReportBlockData() const {
bool RTCPReceiver::ParseCompoundPacket(rtc::ArrayView<const uint8_t> packet,
PacketInformation* packet_information) {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
CommonHeader rtcp_block;
for (const uint8_t* next_block = packet.begin(); next_block != packet.end();
@@ -498,8 +570,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block,
if (registered_ssrcs_.count(report_block.source_ssrc()) == 0)
return;
- const Timestamp now = clock_->CurrentTime();
- last_received_rb_ms_ = now.ms();
+ last_received_rb_ = clock_->CurrentTime();
ReportBlockData* report_block_data =
&received_report_blocks_[report_block.source_ssrc()][remote_ssrc];
@@ -512,7 +583,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block,
report_block_data->report_block().extended_highest_sequence_number) {
// We have successfully delivered new RTP packets to the remote side after
// the last RR was sent from the remote side.
- last_increased_sequence_number_ms_ = now.ms();
+ last_increased_sequence_number_ = last_received_rb_;
}
rtcp_report_block.extended_highest_sequence_number =
report_block.extended_high_seq_num();
@@ -538,7 +609,8 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block,
if (send_time_ntp != 0) {
uint32_t delay_ntp = report_block.delay_since_last_sr();
// Local NTP time.
- uint32_t receive_time_ntp = CompactNtp(TimeMicrosToNtp(now.us()));
+ uint32_t receive_time_ntp =
+ CompactNtp(TimeMicrosToNtp(last_received_rb_.us()));
// RTT in 1/(2^16) seconds.
uint32_t rtt_ntp = receive_time_ntp - delay_ntp - send_time_ntp;
@@ -577,37 +649,22 @@ RTCPReceiver::TmmbrInformation* RTCPReceiver::GetTmmbrInformation(
return &it->second;
}
+// These two methods (RtcpRrTimeout and RtcpRrSequenceNumberTimeout) only exist
+// for tests and legacy code (rtp_rtcp_impl.cc). We should be able to to delete
+// the methods and require that access to the locked variables only happens on
+// the worker thread and thus no locking is needed.
bool RTCPReceiver::RtcpRrTimeout() {
- rtc::CritScope lock(&rtcp_receiver_lock_);
- if (last_received_rb_ms_ == 0)
- return false;
-
- int64_t time_out_ms = kRrTimeoutIntervals * report_interval_ms_;
- if (clock_->TimeInMilliseconds() > last_received_rb_ms_ + time_out_ms) {
- // Reset the timer to only trigger one log.
- last_received_rb_ms_ = 0;
- return true;
- }
- return false;
+ MutexLock lock(&rtcp_receiver_lock_);
+ return RtcpRrTimeoutLocked(clock_->CurrentTime());
}
bool RTCPReceiver::RtcpRrSequenceNumberTimeout() {
- rtc::CritScope lock(&rtcp_receiver_lock_);
- if (last_increased_sequence_number_ms_ == 0)
- return false;
-
- int64_t time_out_ms = kRrTimeoutIntervals * report_interval_ms_;
- if (clock_->TimeInMilliseconds() >
- last_increased_sequence_number_ms_ + time_out_ms) {
- // Reset the timer to only trigger one log.
- last_increased_sequence_number_ms_ = 0;
- return true;
- }
- return false;
+ MutexLock lock(&rtcp_receiver_lock_);
+ return RtcpRrSequenceNumberTimeoutLocked(clock_->CurrentTime());
}
bool RTCPReceiver::UpdateTmmbrTimers() {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
int64_t now_ms = clock_->TimeInMilliseconds();
int64_t timeout_ms = now_ms - kTmmbrTimeoutIntervalMs;
@@ -644,7 +701,7 @@ bool RTCPReceiver::UpdateTmmbrTimers() {
}
std::vector<rtcp::TmmbItem> RTCPReceiver::BoundingSet(bool* tmmbr_owner) {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
TmmbrInformation* tmmbr_info = GetTmmbrInformation(remote_ssrc_);
if (!tmmbr_info)
return std::vector<rtcp::TmmbItem>();
@@ -1005,7 +1062,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket(
std::set<uint32_t> registered_ssrcs;
{
// We don't want to hold this critsect when triggering the callbacks below.
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
local_ssrc = main_ssrc_;
registered_ssrcs = registered_ssrcs_;
}
@@ -1121,7 +1178,7 @@ int32_t RTCPReceiver::CNAME(uint32_t remoteSSRC,
char cName[RTCP_CNAME_SIZE]) const {
RTC_DCHECK(cName);
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
auto received_cname_it = received_cnames_.find(remoteSSRC);
if (received_cname_it == received_cnames_.end())
return -1;
@@ -1132,7 +1189,7 @@ int32_t RTCPReceiver::CNAME(uint32_t remoteSSRC,
}
std::vector<rtcp::TmmbItem> RTCPReceiver::TmmbrReceived() {
- rtc::CritScope lock(&rtcp_receiver_lock_);
+ MutexLock lock(&rtcp_receiver_lock_);
std::vector<rtcp::TmmbItem> candidates;
int64_t now_ms = clock_->TimeInMilliseconds();
@@ -1152,4 +1209,13 @@ std::vector<rtcp::TmmbItem> RTCPReceiver::TmmbrReceived() {
return candidates;
}
+bool RTCPReceiver::RtcpRrTimeoutLocked(Timestamp now) {
+ return ResetTimestampIfExpired(now, last_received_rb_, report_interval_);
+}
+
+bool RTCPReceiver::RtcpRrSequenceNumberTimeoutLocked(Timestamp now) {
+ return ResetTimestampIfExpired(now, last_increased_sequence_number_,
+ report_interval_);
+}
+
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h
index ef41476903..f97fe61291 100644
--- a/modules/rtp_rtcp/source/rtcp_receiver.h
+++ b/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -20,11 +20,11 @@
#include "api/array_view.h"
#include "modules/rtp_rtcp/include/report_block_data.h"
#include "modules/rtp_rtcp/include/rtcp_statistics.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_nack_stats.h"
#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
-#include "rtc_base/critical_section.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/ntp_time.h"
@@ -53,7 +53,8 @@ class RTCPReceiver final {
virtual ~ModuleRtpRtcp() = default;
};
- RTCPReceiver(const RtpRtcp::Configuration& config, ModuleRtpRtcp* owner);
+ RTCPReceiver(const RtpRtcpInterface::Configuration& config,
+ ModuleRtpRtcp* owner);
~RTCPReceiver();
void IncomingPacket(const uint8_t* packet, size_t packet_size) {
@@ -88,6 +89,11 @@ class RTCPReceiver final {
void SetRtcpXrRrtrStatus(bool enable);
bool GetAndResetXrRrRtt(int64_t* rtt_ms);
+ // Called once per second on the worker thread to do rtt calculations.
+ // Returns an optional rtt value if one is available.
+ absl::optional<TimeDelta> OnPeriodicRttUpdate(Timestamp newer_than,
+ bool sending);
+
// Get statistics.
int32_t StatisticsReceived(std::vector<RTCPReportBlock>* receiveBlocks) const;
// A snapshot of Report Blocks with additional data of interest to statistics.
@@ -209,6 +215,12 @@ class RTCPReceiver final {
PacketInformation* packet_information)
RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_);
+ bool RtcpRrTimeoutLocked(Timestamp now)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_);
+
+ bool RtcpRrSequenceNumberTimeoutLocked(Timestamp now)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_);
+
Clock* const clock_;
const bool receiver_only_;
ModuleRtpRtcp* const rtp_rtcp_;
@@ -221,9 +233,9 @@ class RTCPReceiver final {
NetworkStateEstimateObserver* const network_state_estimate_observer_;
TransportFeedbackObserver* const transport_feedback_observer_;
VideoBitrateAllocationObserver* const bitrate_allocation_observer_;
- const int report_interval_ms_;
+ const TimeDelta report_interval_;
- rtc::CriticalSection rtcp_receiver_lock_;
+ mutable Mutex rtcp_receiver_lock_;
uint32_t remote_ssrc_ RTC_GUARDED_BY(rtcp_receiver_lock_);
// Received sender report.
@@ -255,11 +267,12 @@ class RTCPReceiver final {
RTC_GUARDED_BY(rtcp_receiver_lock_);
// The last time we received an RTCP Report block for this module.
- int64_t last_received_rb_ms_ RTC_GUARDED_BY(rtcp_receiver_lock_);
+ Timestamp last_received_rb_ RTC_GUARDED_BY(rtcp_receiver_lock_) =
+ Timestamp::PlusInfinity();
// The time we last received an RTCP RR telling we have successfully
// delivered RTP packet to the remote side.
- int64_t last_increased_sequence_number_ms_;
+ Timestamp last_increased_sequence_number_ = Timestamp::PlusInfinity();
RtcpStatisticsCallback* const stats_callback_;
RtcpCnameCallback* const cname_callback_;
diff --git a/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
index f95219674b..a384d71913 100644
--- a/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
+++ b/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
@@ -161,8 +161,8 @@ struct ReceiverMocks {
StrictMock<MockModuleRtpRtcp> rtp_rtcp_impl;
};
-RtpRtcp::Configuration DefaultConfiguration(ReceiverMocks* mocks) {
- RtpRtcp::Configuration config;
+RtpRtcpInterface::Configuration DefaultConfiguration(ReceiverMocks* mocks) {
+ RtpRtcpInterface::Configuration config;
config.clock = &mocks->clock;
config.receiver_only = false;
config.rtcp_packet_type_counter_observer =
@@ -230,7 +230,7 @@ TEST(RtcpReceiverTest, InjectSrPacketFromUnknownSender) {
rtcp::SenderReport sr;
sr.SetSenderSsrc(kUnknownSenderSsrc);
- // The parser will handle report blocks in Sender Report from other than his
+ // The parser will handle report blocks in Sender Report from other than their
// expected peer.
EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks);
EXPECT_CALL(mocks.bandwidth_observer,
@@ -636,7 +636,7 @@ TEST(RtcpReceiverTest, InjectApp) {
TEST(RtcpReceiverTest, InjectSdesWithOneChunk) {
ReceiverMocks mocks;
MockCnameCallbackImpl callback;
- RtpRtcp::Configuration config = DefaultConfiguration(&mocks);
+ RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks);
config.rtcp_cname_callback = &callback;
RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl);
receiver.SetRemoteSSRC(kSenderSsrc);
@@ -1310,7 +1310,7 @@ TEST(RtcpReceiverTest, TmmbrThreeConstraintsTimeOut) {
TEST(RtcpReceiverTest, Callbacks) {
ReceiverMocks mocks;
MockRtcpCallbackImpl callback;
- RtpRtcp::Configuration config = DefaultConfiguration(&mocks);
+ RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks);
config.rtcp_statistics_callback = &callback;
RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl);
receiver.SetRemoteSSRC(kSenderSsrc);
@@ -1348,7 +1348,7 @@ TEST(RtcpReceiverTest,
VerifyBlockAndTimestampObtainedFromReportBlockDataObserver) {
ReceiverMocks mocks;
MockReportBlockDataObserverImpl observer;
- RtpRtcp::Configuration config = DefaultConfiguration(&mocks);
+ RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks);
config.report_block_data_observer = &observer;
RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl);
receiver.SetRemoteSSRC(kSenderSsrc);
@@ -1397,7 +1397,7 @@ TEST(RtcpReceiverTest,
TEST(RtcpReceiverTest, VerifyRttObtainedFromReportBlockDataObserver) {
ReceiverMocks mocks;
MockReportBlockDataObserverImpl observer;
- RtpRtcp::Configuration config = DefaultConfiguration(&mocks);
+ RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks);
config.report_block_data_observer = &observer;
RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl);
receiver.SetRemoteSSRC(kSenderSsrc);
diff --git a/modules/rtp_rtcp/source/rtcp_sender.cc b/modules/rtp_rtcp/source/rtcp_sender.cc
index f06d429fb9..fae635e1bc 100644
--- a/modules/rtp_rtcp/source/rtcp_sender.cc
+++ b/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -33,7 +33,7 @@
#include "modules/rtp_rtcp/source/rtcp_packet/tmmbn.h"
#include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
-#include "modules/rtp_rtcp/source/rtp_rtcp_impl.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "modules/rtp_rtcp/source/tmmbr_help.h"
#include "rtc_base/checks.h"
@@ -123,7 +123,7 @@ RTCPSender::FeedbackState::FeedbackState()
last_rr_ntp_secs(0),
last_rr_ntp_frac(0),
remote_sr(0),
- module(nullptr) {}
+ receiver(nullptr) {}
RTCPSender::FeedbackState::FeedbackState(const FeedbackState&) = default;
@@ -148,7 +148,7 @@ class RTCPSender::RtcpContext {
const int64_t now_us_;
};
-RTCPSender::RTCPSender(const RtpRtcp::Configuration& config)
+RTCPSender::RTCPSender(const RtpRtcpInterface::Configuration& config)
: audio_(config.audio),
ssrc_(config.local_media_ssrc),
clock_(config.clock),
@@ -176,11 +176,6 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config)
packet_oh_send_(0),
max_packet_size_(IP_PACKET_SIZE - 28), // IPv4 + UDP by default.
- app_sub_type_(0),
- app_name_(0),
- app_data_(nullptr),
- app_length_(0),
-
xr_send_receiver_reference_time_enabled_(false),
packet_type_counter_observer_(config.rtcp_packet_type_counter_observer),
send_video_bitrate_allocation_(false),
@@ -194,7 +189,6 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config)
builders_[kRtcpFir] = &RTCPSender::BuildFIR;
builders_[kRtcpRemb] = &RTCPSender::BuildREMB;
builders_[kRtcpBye] = &RTCPSender::BuildBYE;
- builders_[kRtcpApp] = &RTCPSender::BuildAPP;
builders_[kRtcpLossNotification] = &RTCPSender::BuildLossNotification;
builders_[kRtcpTmmbr] = &RTCPSender::BuildTMMBR;
builders_[kRtcpTmmbn] = &RTCPSender::BuildTMMBN;
@@ -205,12 +199,12 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config)
RTCPSender::~RTCPSender() {}
RtcpMode RTCPSender::Status() const {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
return method_;
}
void RTCPSender::SetRTCPStatus(RtcpMode new_method) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
if (method_ == RtcpMode::kOff && new_method != RtcpMode::kOff) {
// When switching on, reschedule the next packet
@@ -221,7 +215,7 @@ void RTCPSender::SetRTCPStatus(RtcpMode new_method) {
}
bool RTCPSender::Sending() const {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
return sending_;
}
@@ -229,7 +223,7 @@ int32_t RTCPSender::SetSendingStatus(const FeedbackState& feedback_state,
bool sending) {
bool sendRTCPBye = false;
{
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
if (method_ != RtcpMode::kOff) {
if (sending == false && sending_ == true) {
@@ -249,7 +243,7 @@ int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state,
uint16_t last_received_seq_num,
bool decodability_flag,
bool buffering_allowed) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
loss_notification_state_.last_decoded_seq_num = last_decoded_seq_num;
loss_notification_state_.last_received_seq_num = last_received_seq_num;
@@ -262,13 +256,13 @@ int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state,
return 0;
}
- return SendCompoundRTCP(feedback_state,
- {RTCPPacketType::kRtcpLossNotification});
+ return SendCompoundRTCPLocked(
+ feedback_state, {RTCPPacketType::kRtcpLossNotification}, 0, nullptr);
}
void RTCPSender::SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) {
RTC_CHECK_GE(bitrate_bps, 0);
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
remb_bitrate_ = bitrate_bps;
remb_ssrcs_ = std::move(ssrcs);
@@ -279,18 +273,18 @@ void RTCPSender::SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) {
}
void RTCPSender::UnsetRemb() {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
// Stop sending REMB each report until it is reenabled and REMB data set.
ConsumeFlag(kRtcpRemb, /*forced=*/true);
}
bool RTCPSender::TMMBR() const {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
return IsFlagPresent(RTCPPacketType::kRtcpTmmbr);
}
void RTCPSender::SetTMMBRStatus(bool enable) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
if (enable) {
SetFlag(RTCPPacketType::kRtcpTmmbr, false);
} else {
@@ -299,19 +293,19 @@ void RTCPSender::SetTMMBRStatus(bool enable) {
}
void RTCPSender::SetMaxRtpPacketSize(size_t max_packet_size) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
max_packet_size_ = max_packet_size;
}
void RTCPSender::SetTimestampOffset(uint32_t timestamp_offset) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
timestamp_offset_ = timestamp_offset;
}
void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp,
int64_t capture_time_ms,
int8_t payload_type) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
// For compatibility with clients who don't set payload type correctly on all
// calls.
if (payload_type != -1) {
@@ -327,12 +321,12 @@ void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp,
}
void RTCPSender::SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
rtp_clock_rates_khz_[payload_type] = rtp_clock_rate_hz / 1000;
}
void RTCPSender::SetRemoteSSRC(uint32_t ssrc) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
remote_ssrc_ = ssrc;
}
@@ -341,7 +335,7 @@ int32_t RTCPSender::SetCNAME(const char* c_name) {
return -1;
RTC_DCHECK_LT(strlen(c_name), RTCP_CNAME_SIZE);
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
cname_ = c_name;
return 0;
}
@@ -349,7 +343,7 @@ int32_t RTCPSender::SetCNAME(const char* c_name) {
int32_t RTCPSender::AddMixedCNAME(uint32_t SSRC, const char* c_name) {
RTC_DCHECK(c_name);
RTC_DCHECK_LT(strlen(c_name), RTCP_CNAME_SIZE);
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
// One spot is reserved for ssrc_/cname_.
// TODO(danilchap): Add support for more than 30 contributes by sending
// several sdes packets.
@@ -361,7 +355,7 @@ int32_t RTCPSender::AddMixedCNAME(uint32_t SSRC, const char* c_name) {
}
int32_t RTCPSender::RemoveMixedCNAME(uint32_t SSRC) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
auto it = csrc_cnames_.find(SSRC);
if (it == csrc_cnames_.end())
@@ -432,7 +426,7 @@ bool RTCPSender::TimeToSendRTCPReport(bool sendKeyframeBeforeRTP) const {
int64_t now = clock_->TimeInMilliseconds();
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
if (method_ == RtcpMode::kOff)
return false;
@@ -538,13 +532,13 @@ std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildREMB(
}
void RTCPSender::SetTargetBitrate(unsigned int target_bitrate) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
tmmbr_send_bps_ = target_bitrate;
}
std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBR(
const RtcpContext& ctx) {
- if (ctx.feedback_state_.module == nullptr)
+ if (ctx.feedback_state_.receiver == nullptr)
return nullptr;
// Before sending the TMMBR check the received TMMBN, only an owner is
// allowed to raise the bitrate:
@@ -554,11 +548,11 @@ std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBR(
// get current bounding set from RTCP receiver
bool tmmbr_owner = false;
- // holding critical_section_rtcp_sender_ while calling RTCPreceiver which
+ // holding mutex_rtcp_sender_ while calling RTCPreceiver which
// will accuire criticalSectionRTCPReceiver_ is a potental deadlock but
// since RTCPreceiver is not doing the reverse we should be fine
std::vector<rtcp::TmmbItem> candidates =
- ctx.feedback_state_.module->BoundingSet(&tmmbr_owner);
+ ctx.feedback_state_.receiver->BoundingSet(&tmmbr_owner);
if (!candidates.empty()) {
for (const auto& candidate : candidates) {
@@ -614,9 +608,6 @@ std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBN(
std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildAPP(const RtcpContext& ctx) {
rtcp::App* app = new rtcp::App();
app->SetSenderSsrc(ssrc_);
- app->SetSubType(app_sub_type_);
- app->SetName(app_name_);
- app->SetData(app_data_.get(), app_length_);
return std::unique_ptr<rtcp::RtcpPacket>(app);
}
@@ -711,53 +702,87 @@ int32_t RTCPSender::SendCompoundRTCP(
size_t max_packet_size;
{
- rtc::CritScope lock(&critical_section_rtcp_sender_);
- if (method_ == RtcpMode::kOff) {
- RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled.";
- return -1;
+ MutexLock lock(&mutex_rtcp_sender_);
+ auto result = ComputeCompoundRTCPPacket(feedback_state, packet_types,
+ nack_size, nack_list, &container);
+ if (result) {
+ return *result;
}
- // Add all flags as volatile. Non volatile entries will not be overwritten.
- // All new volatile flags added will be consumed by the end of this call.
- SetFlags(packet_types, true);
-
- // Prevent sending streams to send SR before any media has been sent.
- const bool can_calculate_rtp_timestamp = (last_frame_capture_time_ms_ >= 0);
- if (!can_calculate_rtp_timestamp) {
- bool consumed_sr_flag = ConsumeFlag(kRtcpSr);
- bool consumed_report_flag = sending_ && ConsumeFlag(kRtcpReport);
- bool sender_report = consumed_report_flag || consumed_sr_flag;
- if (sender_report && AllVolatileFlagsConsumed()) {
- // This call was for Sender Report and nothing else.
- return 0;
- }
- if (sending_ && method_ == RtcpMode::kCompound) {
- // Not allowed to send any RTCP packet without sender report.
- return -1;
- }
+ max_packet_size = max_packet_size_;
+ }
+
+ size_t bytes_sent = container.SendPackets(max_packet_size);
+ return bytes_sent == 0 ? -1 : 0;
+}
+
+int32_t RTCPSender::SendCompoundRTCPLocked(
+ const FeedbackState& feedback_state,
+ const std::set<RTCPPacketType>& packet_types,
+ int32_t nack_size,
+ const uint16_t* nack_list) {
+ PacketContainer container(transport_, event_log_);
+ auto result = ComputeCompoundRTCPPacket(feedback_state, packet_types,
+ nack_size, nack_list, &container);
+ if (result) {
+ return *result;
+ }
+ size_t bytes_sent = container.SendPackets(max_packet_size_);
+ return bytes_sent == 0 ? -1 : 0;
+}
+
+absl::optional<int32_t> RTCPSender::ComputeCompoundRTCPPacket(
+ const FeedbackState& feedback_state,
+ const std::set<RTCPPacketType>& packet_types,
+ int32_t nack_size,
+ const uint16_t* nack_list,
+ rtcp::CompoundPacket* out_packet) {
+ if (method_ == RtcpMode::kOff) {
+ RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled.";
+ return -1;
+ }
+ // Add all flags as volatile. Non volatile entries will not be overwritten.
+ // All new volatile flags added will be consumed by the end of this call.
+ SetFlags(packet_types, true);
+
+ // Prevent sending streams to send SR before any media has been sent.
+ const bool can_calculate_rtp_timestamp = (last_frame_capture_time_ms_ >= 0);
+ if (!can_calculate_rtp_timestamp) {
+ bool consumed_sr_flag = ConsumeFlag(kRtcpSr);
+ bool consumed_report_flag = sending_ && ConsumeFlag(kRtcpReport);
+ bool sender_report = consumed_report_flag || consumed_sr_flag;
+ if (sender_report && AllVolatileFlagsConsumed()) {
+ // This call was for Sender Report and nothing else.
+ return 0;
+ }
+ if (sending_ && method_ == RtcpMode::kCompound) {
+ // Not allowed to send any RTCP packet without sender report.
+ return -1;
}
+ }
- if (packet_type_counter_.first_packet_time_ms == -1)
- packet_type_counter_.first_packet_time_ms = clock_->TimeInMilliseconds();
+ if (packet_type_counter_.first_packet_time_ms == -1)
+ packet_type_counter_.first_packet_time_ms = clock_->TimeInMilliseconds();
- // We need to send our NTP even if we haven't received any reports.
- RtcpContext context(feedback_state, nack_size, nack_list,
- clock_->TimeInMicroseconds());
+ // We need to send our NTP even if we haven't received any reports.
+ RtcpContext context(feedback_state, nack_size, nack_list,
+ clock_->TimeInMicroseconds());
- PrepareReport(feedback_state);
+ PrepareReport(feedback_state);
- std::unique_ptr<rtcp::RtcpPacket> packet_bye;
+ std::unique_ptr<rtcp::RtcpPacket> packet_bye;
- auto it = report_flags_.begin();
- while (it != report_flags_.end()) {
- auto builder_it = builders_.find(it->type);
- RTC_DCHECK(builder_it != builders_.end())
- << "Could not find builder for packet type " << it->type;
- if (it->is_volatile) {
- report_flags_.erase(it++);
- } else {
- ++it;
- }
+ auto it = report_flags_.begin();
+ while (it != report_flags_.end()) {
+ auto builder_it = builders_.find(it->type);
+ if (it->is_volatile) {
+ report_flags_.erase(it++);
+ } else {
+ ++it;
+ }
+ if (builder_it == builders_.end()) {
+ RTC_NOTREACHED() << "Could not find builder for packet type " << it->type;
+ } else {
BuilderFunc func = builder_it->second;
std::unique_ptr<rtcp::RtcpPacket> packet = (this->*func)(context);
if (packet == nullptr)
@@ -767,26 +792,23 @@ int32_t RTCPSender::SendCompoundRTCP(
if (builder_it->first == kRtcpBye) {
packet_bye = std::move(packet);
} else {
- container.Append(packet.release());
+ out_packet->Append(packet.release());
}
}
+ }
- // Append the BYE now at the end
- if (packet_bye) {
- container.Append(packet_bye.release());
- }
-
- if (packet_type_counter_observer_ != nullptr) {
- packet_type_counter_observer_->RtcpPacketTypesCounterUpdated(
- remote_ssrc_, packet_type_counter_);
- }
+ // Append the BYE now at the end
+ if (packet_bye) {
+ out_packet->Append(packet_bye.release());
+ }
- RTC_DCHECK(AllVolatileFlagsConsumed());
- max_packet_size = max_packet_size_;
+ if (packet_type_counter_observer_ != nullptr) {
+ packet_type_counter_observer_->RtcpPacketTypesCounterUpdated(
+ remote_ssrc_, packet_type_counter_);
}
- size_t bytes_sent = container.SendPackets(max_packet_size);
- return bytes_sent == 0 ? -1 : 0;
+ RTC_DCHECK(AllVolatileFlagsConsumed());
+ return absl::nullopt;
}
void RTCPSender::PrepareReport(const FeedbackState& feedback_state) {
@@ -873,41 +895,22 @@ std::vector<rtcp::ReportBlock> RTCPSender::CreateReportBlocks(
void RTCPSender::SetCsrcs(const std::vector<uint32_t>& csrcs) {
RTC_DCHECK_LE(csrcs.size(), kRtpCsrcSize);
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
csrcs_ = csrcs;
}
-int32_t RTCPSender::SetApplicationSpecificData(uint8_t subType,
- uint32_t name,
- const uint8_t* data,
- uint16_t length) {
- if (length % 4 != 0) {
- RTC_LOG(LS_ERROR) << "Failed to SetApplicationSpecificData.";
- return -1;
- }
- rtc::CritScope lock(&critical_section_rtcp_sender_);
-
- SetFlag(kRtcpApp, true);
- app_sub_type_ = subType;
- app_name_ = name;
- app_data_.reset(new uint8_t[length]);
- app_length_ = length;
- memcpy(app_data_.get(), data, length);
- return 0;
-}
-
void RTCPSender::SendRtcpXrReceiverReferenceTime(bool enable) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
xr_send_receiver_reference_time_enabled_ = enable;
}
bool RTCPSender::RtcpXrReceiverReferenceTime() const {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
return xr_send_receiver_reference_time_enabled_;
}
void RTCPSender::SetTmmbn(std::vector<rtcp::TmmbItem> bounding_set) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
tmmbn_to_send_ = std::move(bounding_set);
SetFlag(kRtcpTmmbn, true);
}
@@ -949,7 +952,7 @@ bool RTCPSender::AllVolatileFlagsConsumed() const {
void RTCPSender::SetVideoBitrateAllocation(
const VideoBitrateAllocation& bitrate) {
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
// Check if this allocation is first ever, or has a different set of
// spatial/temporal layers signaled and enabled, if so trigger an rtcp report
// as soon as possible.
@@ -997,7 +1000,7 @@ void RTCPSender::SendCombinedRtcpPacket(
size_t max_packet_size;
uint32_t ssrc;
{
- rtc::CritScope lock(&critical_section_rtcp_sender_);
+ MutexLock lock(&mutex_rtcp_sender_);
if (method_ == RtcpMode::kOff) {
RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled.";
return;
diff --git a/modules/rtp_rtcp/source/rtcp_sender.h b/modules/rtp_rtcp/source/rtcp_sender.h
index 32c1e1dbc1..bcdf15edab 100644
--- a/modules/rtp_rtcp/source/rtcp_sender.h
+++ b/modules/rtp_rtcp/source/rtcp_sender.h
@@ -23,24 +23,25 @@
#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_nack_stats.h"
#include "modules/rtp_rtcp/source/rtcp_packet.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h"
#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h"
#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/random.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
-class ModuleRtpRtcpImpl;
+class RTCPReceiver;
class RtcEventLog;
-class RTCPSender {
+class RTCPSender final {
public:
struct FeedbackState {
FeedbackState();
@@ -60,162 +61,189 @@ class RTCPSender {
std::vector<rtcp::ReceiveTimeInfo> last_xr_rtis;
// Used when generating TMMBR.
- ModuleRtpRtcpImpl* module;
+ RTCPReceiver* receiver;
};
- explicit RTCPSender(const RtpRtcp::Configuration& config);
+ explicit RTCPSender(const RtpRtcpInterface::Configuration& config);
virtual ~RTCPSender();
- RtcpMode Status() const;
- void SetRTCPStatus(RtcpMode method);
+ RtcpMode Status() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
+ void SetRTCPStatus(RtcpMode method) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- bool Sending() const;
+ bool Sending() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
int32_t SetSendingStatus(const FeedbackState& feedback_state,
- bool enabled); // combine the functions
+ bool enabled)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); // combine the functions
- int32_t SetNackStatus(bool enable);
+ int32_t SetNackStatus(bool enable) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- void SetTimestampOffset(uint32_t timestamp_offset);
+ void SetTimestampOffset(uint32_t timestamp_offset)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
// TODO(bugs.webrtc.org/6458): Remove default parameter value when all the
// depending projects are updated to correctly set payload type.
void SetLastRtpTime(uint32_t rtp_timestamp,
int64_t capture_time_ms,
- int8_t payload_type = -1);
+ int8_t payload_type = -1)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- void SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz);
+ void SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
uint32_t SSRC() const { return ssrc_; }
- void SetRemoteSSRC(uint32_t ssrc);
+ void SetRemoteSSRC(uint32_t ssrc) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- int32_t SetCNAME(const char* cName);
+ int32_t SetCNAME(const char* cName) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name);
+ int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- int32_t RemoveMixedCNAME(uint32_t SSRC);
+ int32_t RemoveMixedCNAME(uint32_t SSRC)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const;
+ bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
int32_t SendRTCP(const FeedbackState& feedback_state,
RTCPPacketType packetType,
int32_t nackSize = 0,
- const uint16_t* nackList = 0);
+ const uint16_t* nackList = 0)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
int32_t SendCompoundRTCP(const FeedbackState& feedback_state,
const std::set<RTCPPacketType>& packetTypes,
int32_t nackSize = 0,
- const uint16_t* nackList = 0);
+ const uint16_t* nackList = nullptr)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
int32_t SendLossNotification(const FeedbackState& feedback_state,
uint16_t last_decoded_seq_num,
uint16_t last_received_seq_num,
bool decodability_flag,
- bool buffering_allowed);
+ bool buffering_allowed)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs);
+ void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- void UnsetRemb();
+ void UnsetRemb() RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- bool TMMBR() const;
+ bool TMMBR() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- void SetTMMBRStatus(bool enable);
+ void SetTMMBRStatus(bool enable) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- void SetMaxRtpPacketSize(size_t max_packet_size);
+ void SetMaxRtpPacketSize(size_t max_packet_size)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- void SetTmmbn(std::vector<rtcp::TmmbItem> bounding_set);
+ void SetTmmbn(std::vector<rtcp::TmmbItem> bounding_set)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- int32_t SetApplicationSpecificData(uint8_t subType,
- uint32_t name,
- const uint8_t* data,
- uint16_t length);
+ void SendRtcpXrReceiverReferenceTime(bool enable)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- void SendRtcpXrReceiverReferenceTime(bool enable);
+ bool RtcpXrReceiverReferenceTime() const
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- bool RtcpXrReceiverReferenceTime() const;
+ void SetCsrcs(const std::vector<uint32_t>& csrcs)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- void SetCsrcs(const std::vector<uint32_t>& csrcs);
-
- void SetTargetBitrate(unsigned int target_bitrate);
- void SetVideoBitrateAllocation(const VideoBitrateAllocation& bitrate);
+ void SetTargetBitrate(unsigned int target_bitrate)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
+ void SetVideoBitrateAllocation(const VideoBitrateAllocation& bitrate)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
void SendCombinedRtcpPacket(
- std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets);
+ std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
private:
class RtcpContext;
+ int32_t SendCompoundRTCPLocked(const FeedbackState& feedback_state,
+ const std::set<RTCPPacketType>& packet_types,
+ int32_t nack_size,
+ const uint16_t* nack_list)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
+
+ absl::optional<int32_t> ComputeCompoundRTCPPacket(
+ const FeedbackState& feedback_state,
+ const std::set<RTCPPacketType>& packet_types,
+ int32_t nack_size,
+ const uint16_t* nack_list,
+ rtcp::CompoundPacket* out_packet)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
+
// Determine which RTCP messages should be sent and setup flags.
void PrepareReport(const FeedbackState& feedback_state)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::vector<rtcp::ReportBlock> CreateReportBlocks(
const FeedbackState& feedback_state)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildSR(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildRR(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildSDES(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildPLI(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildREMB(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildTMMBR(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildTMMBN(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildAPP(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildLossNotification(
const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildExtendedReports(
const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildBYE(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildFIR(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
std::unique_ptr<rtcp::RtcpPacket> BuildNACK(const RtcpContext& context)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
private:
const bool audio_;
const uint32_t ssrc_;
Clock* const clock_;
- Random random_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- RtcpMode method_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ Random random_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ RtcpMode method_ RTC_GUARDED_BY(mutex_rtcp_sender_);
RtcEventLog* const event_log_;
Transport* const transport_;
const int report_interval_ms_;
- rtc::CriticalSection critical_section_rtcp_sender_;
- bool sending_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ mutable Mutex mutex_rtcp_sender_;
+ bool sending_ RTC_GUARDED_BY(mutex_rtcp_sender_);
- int64_t next_time_to_send_rtcp_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ int64_t next_time_to_send_rtcp_ RTC_GUARDED_BY(mutex_rtcp_sender_);
- uint32_t timestamp_offset_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- int64_t last_frame_capture_time_ms_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ uint32_t timestamp_offset_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ int64_t last_frame_capture_time_ms_ RTC_GUARDED_BY(mutex_rtcp_sender_);
// SSRC that we receive on our RTP channel
- uint32_t remote_ssrc_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- std::string cname_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ uint32_t remote_ssrc_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ std::string cname_ RTC_GUARDED_BY(mutex_rtcp_sender_);
ReceiveStatisticsProvider* receive_statistics_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ RTC_GUARDED_BY(mutex_rtcp_sender_);
std::map<uint32_t, std::string> csrc_cnames_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ RTC_GUARDED_BY(mutex_rtcp_sender_);
// send CSRCs
- std::vector<uint32_t> csrcs_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ std::vector<uint32_t> csrcs_ RTC_GUARDED_BY(mutex_rtcp_sender_);
// Full intra request
- uint8_t sequence_number_fir_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ uint8_t sequence_number_fir_ RTC_GUARDED_BY(mutex_rtcp_sender_);
// Loss Notification
struct LossNotificationState {
@@ -224,59 +252,47 @@ class RTCPSender {
bool decodability_flag;
};
LossNotificationState loss_notification_state_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ RTC_GUARDED_BY(mutex_rtcp_sender_);
// REMB
- int64_t remb_bitrate_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- std::vector<uint32_t> remb_ssrcs_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
-
- std::vector<rtcp::TmmbItem> tmmbn_to_send_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t tmmbr_send_bps_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t packet_oh_send_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- size_t max_packet_size_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
-
- // APP
- uint8_t app_sub_type_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t app_name_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- std::unique_ptr<uint8_t[]> app_data_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
- uint16_t app_length_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ int64_t remb_bitrate_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ std::vector<uint32_t> remb_ssrcs_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+
+ std::vector<rtcp::TmmbItem> tmmbn_to_send_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ uint32_t tmmbr_send_bps_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ uint32_t packet_oh_send_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ size_t max_packet_size_ RTC_GUARDED_BY(mutex_rtcp_sender_);
// True if sending of XR Receiver reference time report is enabled.
bool xr_send_receiver_reference_time_enabled_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ RTC_GUARDED_BY(mutex_rtcp_sender_);
RtcpPacketTypeCounterObserver* const packet_type_counter_observer_;
- RtcpPacketTypeCounter packet_type_counter_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ RtcpPacketTypeCounter packet_type_counter_ RTC_GUARDED_BY(mutex_rtcp_sender_);
- RtcpNackStats nack_stats_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ RtcpNackStats nack_stats_ RTC_GUARDED_BY(mutex_rtcp_sender_);
VideoBitrateAllocation video_bitrate_allocation_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
- bool send_video_bitrate_allocation_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ bool send_video_bitrate_allocation_ RTC_GUARDED_BY(mutex_rtcp_sender_);
- std::map<int8_t, int> rtp_clock_rates_khz_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
- int8_t last_payload_type_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ std::map<int8_t, int> rtp_clock_rates_khz_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ int8_t last_payload_type_ RTC_GUARDED_BY(mutex_rtcp_sender_);
absl::optional<VideoBitrateAllocation> CheckAndUpdateLayerStructure(
const VideoBitrateAllocation& bitrate) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
void SetFlag(uint32_t type, bool is_volatile)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
void SetFlags(const std::set<RTCPPacketType>& types, bool is_volatile)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
bool IsFlagPresent(uint32_t type) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
bool ConsumeFlag(uint32_t type, bool forced = false)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
bool AllVolatileFlagsConsumed() const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
struct ReportFlag {
ReportFlag(uint32_t type, bool is_volatile)
: type(type), is_volatile(is_volatile) {}
@@ -286,8 +302,7 @@ class RTCPSender {
const bool is_volatile;
};
- std::set<ReportFlag> report_flags_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
+ std::set<ReportFlag> report_flags_ RTC_GUARDED_BY(mutex_rtcp_sender_);
typedef std::unique_ptr<rtcp::RtcpPacket> (RTCPSender::*BuilderFunc)(
const RtcpContext&);
diff --git a/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
index 754ad89327..4b6d4a3da9 100644
--- a/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
+++ b/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
@@ -18,7 +18,7 @@
#include "modules/rtp_rtcp/source/rtcp_packet/bye.h"
#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
-#include "modules/rtp_rtcp/source/rtp_rtcp_impl.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/rate_limiter.h"
#include "test/gmock.h"
@@ -76,8 +76,8 @@ class RtcpSenderTest : public ::testing::Test {
: clock_(1335900000),
receive_statistics_(ReceiveStatistics::Create(&clock_)),
retransmission_rate_limiter_(&clock_, 1000) {
- RtpRtcp::Configuration configuration = GetDefaultConfig();
- rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl(configuration));
+ RtpRtcpInterface::Configuration configuration = GetDefaultConfig();
+ rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl2(configuration));
rtcp_sender_.reset(new RTCPSender(configuration));
rtcp_sender_->SetRemoteSSRC(kRemoteSsrc);
rtcp_sender_->SetTimestampOffset(kStartRtpTimestamp);
@@ -85,8 +85,8 @@ class RtcpSenderTest : public ::testing::Test {
/*payload_type=*/0);
}
- RtpRtcp::Configuration GetDefaultConfig() {
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration GetDefaultConfig() {
+ RtpRtcpInterface::Configuration configuration;
configuration.audio = false;
configuration.clock = &clock_;
configuration.outgoing_transport = &test_transport_;
@@ -115,7 +115,7 @@ class RtcpSenderTest : public ::testing::Test {
SimulatedClock clock_;
TestTransport test_transport_;
std::unique_ptr<ReceiveStatistics> receive_statistics_;
- std::unique_ptr<ModuleRtpRtcpImpl> rtp_rtcp_impl_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_impl_;
std::unique_ptr<RTCPSender> rtcp_sender_;
RateLimiter retransmission_rate_limiter_;
};
@@ -191,7 +191,7 @@ TEST_F(RtcpSenderTest, SendConsecutiveSrWithExactSlope) {
}
TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &test_transport_;
@@ -213,7 +213,7 @@ TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) {
}
TEST_F(RtcpSenderTest, DoNotSendCompundBeforeRtp) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &test_transport_;
@@ -315,47 +315,6 @@ TEST_F(RtcpSenderTest, StopSendingTriggersBye) {
EXPECT_EQ(kSenderSsrc, parser()->bye()->sender_ssrc());
}
-TEST_F(RtcpSenderTest, SendApp) {
- const uint8_t kSubType = 30;
- uint32_t name = 'n' << 24;
- name += 'a' << 16;
- name += 'm' << 8;
- name += 'e';
- const uint8_t kData[] = {'t', 'e', 's', 't', 'd', 'a', 't', 'a'};
- EXPECT_EQ(0, rtcp_sender_->SetApplicationSpecificData(kSubType, name, kData,
- sizeof(kData)));
- rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize);
- EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpApp));
- EXPECT_EQ(1, parser()->app()->num_packets());
- EXPECT_EQ(kSubType, parser()->app()->sub_type());
- EXPECT_EQ(name, parser()->app()->name());
- EXPECT_EQ(sizeof(kData), parser()->app()->data_size());
- EXPECT_EQ(0, memcmp(kData, parser()->app()->data(), sizeof(kData)));
-}
-
-TEST_F(RtcpSenderTest, SendEmptyApp) {
- const uint8_t kSubType = 30;
- const uint32_t kName = 0x6E616D65;
-
- EXPECT_EQ(
- 0, rtcp_sender_->SetApplicationSpecificData(kSubType, kName, nullptr, 0));
-
- rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize);
- EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpApp));
- EXPECT_EQ(1, parser()->app()->num_packets());
- EXPECT_EQ(kSubType, parser()->app()->sub_type());
- EXPECT_EQ(kName, parser()->app()->name());
- EXPECT_EQ(0U, parser()->app()->data_size());
-}
-
-TEST_F(RtcpSenderTest, SetInvalidApplicationSpecificData) {
- const uint8_t kData[] = {'t', 'e', 's', 't', 'd', 'a', 't'};
- const uint16_t kInvalidDataLength = sizeof(kData) / sizeof(kData[0]);
- EXPECT_EQ(-1,
- rtcp_sender_->SetApplicationSpecificData(
- 0, 0, kData, kInvalidDataLength)); // Should by multiple of 4.
-}
-
TEST_F(RtcpSenderTest, SendFir) {
rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize);
EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpFir));
@@ -563,7 +522,7 @@ TEST_F(RtcpSenderTest, TestNoXrRrtrSentIfNotEnabled) {
TEST_F(RtcpSenderTest, TestRegisterRtcpPacketTypeObserver) {
RtcpPacketTypeCounterObserverImpl observer;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &test_transport_;
@@ -691,7 +650,7 @@ TEST_F(RtcpSenderTest, ByeMustBeLast) {
}));
// Re-configure rtcp_sender_ with mock_transport_
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &mock_transport;
diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_config.h b/modules/rtp_rtcp/source/rtcp_transceiver_config.h
index 2cbd1045d2..8a8fd6aed8 100644
--- a/modules/rtp_rtcp/source/rtcp_transceiver_config.h
+++ b/modules/rtp_rtcp/source/rtcp_transceiver_config.h
@@ -28,8 +28,8 @@ class MediaReceiverRtcpObserver {
public:
virtual ~MediaReceiverRtcpObserver() = default;
- // All message handlers have default empty implementation. This way user needs
- // to implement only those she is interested in.
+ // All message handlers have default empty implementation. This way users only
+ // need to implement the ones they are interested in.
virtual void OnSenderReport(uint32_t sender_ssrc,
NtpTime ntp_time,
uint32_t rtp_time) {}
diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc b/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc
index 727a9bca23..9c4c5adf79 100644
--- a/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc
+++ b/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc
@@ -55,15 +55,17 @@ using ::webrtc::test::RtcpPacketParser;
class MockReceiveStatisticsProvider : public webrtc::ReceiveStatisticsProvider {
public:
- MOCK_METHOD1(RtcpReportBlocks, std::vector<ReportBlock>(size_t));
+ MOCK_METHOD(std::vector<ReportBlock>, RtcpReportBlocks, (size_t), (override));
};
class MockMediaReceiverRtcpObserver : public webrtc::MediaReceiverRtcpObserver {
public:
- MOCK_METHOD3(OnSenderReport, void(uint32_t, NtpTime, uint32_t));
- MOCK_METHOD1(OnBye, void(uint32_t));
- MOCK_METHOD2(OnBitrateAllocation,
- void(uint32_t, const VideoBitrateAllocation&));
+ MOCK_METHOD(void, OnSenderReport, (uint32_t, NtpTime, uint32_t), (override));
+ MOCK_METHOD(void, OnBye, (uint32_t), (override));
+ MOCK_METHOD(void,
+ OnBitrateAllocation,
+ (uint32_t, const VideoBitrateAllocation&),
+ (override));
};
// Since some tests will need to wait for this period, make it small to avoid
diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc b/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc
index 5fb2aa55eb..9c181c6526 100644
--- a/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc
+++ b/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc
@@ -42,7 +42,10 @@ using ::webrtc::test::RtcpPacketParser;
class MockMediaReceiverRtcpObserver : public webrtc::MediaReceiverRtcpObserver {
public:
- MOCK_METHOD3(OnSenderReport, void(uint32_t, webrtc::NtpTime, uint32_t));
+ MOCK_METHOD(void,
+ OnSenderReport,
+ (uint32_t, webrtc::NtpTime, uint32_t),
+ (override));
};
constexpr int kTimeoutMs = 1000;
diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc
index 30dedb192f..3b09818576 100644
--- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc
+++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc
@@ -10,6 +10,7 @@
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include <bitset>
#include <cstdint>
#include "api/array_view.h"
@@ -23,6 +24,7 @@ namespace webrtc {
constexpr RTPExtensionType RtpDependencyDescriptorExtension::kId;
constexpr char RtpDependencyDescriptorExtension::kUri[];
+constexpr std::bitset<32> RtpDependencyDescriptorExtension::kAllChainsAreActive;
bool RtpDependencyDescriptorExtension::Parse(
rtc::ArrayView<const uint8_t> data,
@@ -34,16 +36,20 @@ bool RtpDependencyDescriptorExtension::Parse(
size_t RtpDependencyDescriptorExtension::ValueSize(
const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor) {
- RtpDependencyDescriptorWriter writer(/*data=*/{}, structure, descriptor);
+ RtpDependencyDescriptorWriter writer(/*data=*/{}, structure, active_chains,
+ descriptor);
return DivideRoundUp(writer.ValueSizeBits(), 8);
}
bool RtpDependencyDescriptorExtension::Write(
rtc::ArrayView<uint8_t> data,
const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor) {
- RtpDependencyDescriptorWriter writer(data, structure, descriptor);
+ RtpDependencyDescriptorWriter writer(data, structure, active_chains,
+ descriptor);
return writer.Write();
}
diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h
index b99230c56b..de16eeab2a 100644
--- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h
+++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h
@@ -10,6 +10,7 @@
#ifndef MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_EXTENSION_H_
#define MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_EXTENSION_H_
+#include <bitset>
#include <cstdint>
#include "api/array_view.h"
@@ -34,10 +35,24 @@ class RtpDependencyDescriptorExtension {
DependencyDescriptor* descriptor);
static size_t ValueSize(const FrameDependencyStructure& structure,
+ const DependencyDescriptor& descriptor) {
+ return ValueSize(structure, kAllChainsAreActive, descriptor);
+ }
+ static size_t ValueSize(const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor);
static bool Write(rtc::ArrayView<uint8_t> data,
const FrameDependencyStructure& structure,
+ const DependencyDescriptor& descriptor) {
+ return Write(data, structure, kAllChainsAreActive, descriptor);
+ }
+ static bool Write(rtc::ArrayView<uint8_t> data,
+ const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor);
+
+ private:
+ static constexpr std::bitset<32> kAllChainsAreActive = ~uint32_t{0};
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc
new file mode 100644
index 0000000000..11d809693c
--- /dev/null
+++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+
+#include "api/array_view.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+
+#include "test/gmock.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::Each;
+
+TEST(RtpDependencyDescriptorExtensionTest, Writer3BytesForPerfectTemplate) {
+ uint8_t buffer[3];
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.templates = {
+ FrameDependencyTemplate().Dtis("SR").FrameDiffs({1}).ChainDiffs({2, 2})};
+ DependencyDescriptor descriptor;
+ descriptor.frame_dependencies = structure.templates[0];
+
+ EXPECT_EQ(RtpDependencyDescriptorExtension::ValueSize(structure, descriptor),
+ 3u);
+ EXPECT_TRUE(
+ RtpDependencyDescriptorExtension::Write(buffer, structure, descriptor));
+}
+
+TEST(RtpDependencyDescriptorExtensionTest, WriteZeroInUnusedBits) {
+ uint8_t buffer[32];
+ std::memset(buffer, 0xff, sizeof(buffer));
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.templates = {
+ FrameDependencyTemplate().Dtis("SR").FrameDiffs({1}).ChainDiffs({1, 1})};
+ DependencyDescriptor descriptor;
+ descriptor.frame_dependencies = structure.templates[0];
+ descriptor.frame_dependencies.frame_diffs = {2};
+
+ // To test unused bytes are zeroed, need a buffer large enough.
+ size_t value_size =
+ RtpDependencyDescriptorExtension::ValueSize(structure, descriptor);
+ ASSERT_LT(value_size, sizeof(buffer));
+
+ ASSERT_TRUE(
+ RtpDependencyDescriptorExtension::Write(buffer, structure, descriptor));
+
+ const uint8_t* unused_bytes = buffer + value_size;
+ size_t num_unused_bytes = buffer + sizeof(buffer) - unused_bytes;
+ // Check remaining bytes are zeroed.
+ EXPECT_THAT(rtc::MakeArrayView(unused_bytes, num_unused_bytes), Each(0));
+}
+
+// In practice chain diff for inactive chain will grow uboundly because no
+// frames are produced for it, that shouldn't block writing the extension.
+TEST(RtpDependencyDescriptorExtensionTest,
+ TemplateMatchingSkipsInactiveChains) {
+ uint8_t buffer[3];
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.templates = {
+ FrameDependencyTemplate().Dtis("SR").ChainDiffs({2, 2})};
+ DependencyDescriptor descriptor;
+ descriptor.frame_dependencies = structure.templates[0];
+
+ // Set only 1st chain as active.
+ std::bitset<32> active_chains = 0b01;
+ descriptor.frame_dependencies.chain_diffs[1] = 1000;
+
+ // Expect perfect template match since the only difference is for an inactive
+ // chain. Pefect template match consumes 3 bytes.
+ EXPECT_EQ(RtpDependencyDescriptorExtension::ValueSize(
+ structure, active_chains, descriptor),
+ 3u);
+ EXPECT_TRUE(RtpDependencyDescriptorExtension::Write(
+ buffer, structure, active_chains, descriptor));
+}
+
+TEST(RtpDependencyDescriptorExtensionTest,
+ AcceptsInvalidChainDiffForInactiveChainWhenChainsAreCustom) {
+ uint8_t buffer[256];
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.templates = {
+ FrameDependencyTemplate().Dtis("SR").ChainDiffs({2, 2})};
+ DependencyDescriptor descriptor;
+ descriptor.frame_dependencies = structure.templates[0];
+
+ // Set only 1st chain as active.
+ std::bitset<32> active_chains = 0b01;
+ // Set chain_diff different to the template to make it custom.
+ descriptor.frame_dependencies.chain_diffs[0] = 1;
+ // Set chain diff for inactive chain beyound limit of 255 max chain diff.
+ descriptor.frame_dependencies.chain_diffs[1] = 1000;
+
+ // Because chains are custom, should use more than base 3 bytes.
+ EXPECT_GT(RtpDependencyDescriptorExtension::ValueSize(
+ structure, active_chains, descriptor),
+ 3u);
+ EXPECT_TRUE(RtpDependencyDescriptorExtension::Write(
+ buffer, structure, active_chains, descriptor));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc
index 07b6a3b3c3..cba594dc6f 100644
--- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc
+++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc
@@ -18,13 +18,6 @@
#include "rtc_base/checks.h"
namespace webrtc {
-namespace {
-
-constexpr int kMaxTemporalId = 7;
-constexpr int kMaxSpatialId = 3;
-constexpr int kMaxTemplates = 64;
-
-} // namespace
RtpDependencyDescriptorReader::RtpDependencyDescriptorReader(
rtc::ArrayView<const uint8_t> raw_data,
@@ -95,7 +88,7 @@ void RtpDependencyDescriptorReader::ReadTemplateLayers() {
int spatial_id = 0;
NextLayerIdc next_layer_idc;
do {
- if (templates.size() == kMaxTemplates) {
+ if (templates.size() == DependencyDescriptor::kMaxTemplates) {
parsing_failed_ = true;
break;
}
@@ -107,14 +100,14 @@ void RtpDependencyDescriptorReader::ReadTemplateLayers() {
next_layer_idc = static_cast<NextLayerIdc>(ReadBits(2));
if (next_layer_idc == kNextTemporalLayer) {
temporal_id++;
- if (temporal_id > kMaxTemporalId) {
+ if (temporal_id >= DependencyDescriptor::kMaxTemporalIds) {
parsing_failed_ = true;
break;
}
} else if (next_layer_idc == kNextSpatialLayer) {
temporal_id = 0;
spatial_id++;
- if (spatial_id > kMaxSpatialId) {
+ if (spatial_id >= DependencyDescriptor::kMaxSpatialIds) {
parsing_failed_ = true;
break;
}
@@ -153,7 +146,7 @@ void RtpDependencyDescriptorReader::ReadTemplateChains() {
if (structure->num_chains == 0)
return;
for (int i = 0; i < structure->num_decode_targets; ++i) {
- uint32_t protected_by_chain = ReadNonSymmetric(structure->num_chains + 1);
+ uint32_t protected_by_chain = ReadNonSymmetric(structure->num_chains);
structure->decode_target_protected_by_chain.push_back(protected_by_chain);
}
for (FrameDependencyTemplate& frame_template : structure->templates) {
@@ -198,9 +191,10 @@ void RtpDependencyDescriptorReader::ReadExtendedFields() {
}
void RtpDependencyDescriptorReader::ReadFrameDependencyDefinition() {
- size_t template_index = (frame_dependency_template_id_ + kMaxTemplates -
- structure_->structure_id) %
- kMaxTemplates;
+ size_t template_index =
+ (frame_dependency_template_id_ + DependencyDescriptor::kMaxTemplates -
+ structure_->structure_id) %
+ DependencyDescriptor::kMaxTemplates;
if (template_index >= structure_->templates.size()) {
parsing_failed_ = true;
diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc
index 9e1a425666..25d221253b 100644
--- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc
+++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc
@@ -9,6 +9,7 @@
*/
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h"
+#include <bitset>
#include <cstddef>
#include <cstdint>
#include <iterator>
@@ -23,8 +24,6 @@
namespace webrtc {
namespace {
-constexpr int kMaxTemplates = 64;
-
enum class NextLayerIdc : uint64_t {
kSameLayer = 0,
kNextTemporal = 1,
@@ -35,12 +34,8 @@ enum class NextLayerIdc : uint64_t {
NextLayerIdc GetNextLayerIdc(const FrameDependencyTemplate& previous,
const FrameDependencyTemplate& next) {
- // TODO(danilchap): Move these constants to header shared between reader and
- // writer.
- static constexpr int kMaxSpatialId = 3;
- static constexpr int kMaxTemporalId = 7;
- RTC_DCHECK_LE(next.spatial_id, kMaxSpatialId);
- RTC_DCHECK_LE(next.temporal_id, kMaxTemporalId);
+ RTC_DCHECK_LT(next.spatial_id, DependencyDescriptor::kMaxSpatialIds);
+ RTC_DCHECK_LT(next.temporal_id, DependencyDescriptor::kMaxTemporalIds);
if (next.spatial_id == previous.spatial_id &&
next.temporal_id == previous.temporal_id) {
@@ -61,9 +56,11 @@ NextLayerIdc GetNextLayerIdc(const FrameDependencyTemplate& previous,
RtpDependencyDescriptorWriter::RtpDependencyDescriptorWriter(
rtc::ArrayView<uint8_t> data,
const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor)
: descriptor_(descriptor),
structure_(structure),
+ active_chains_(active_chains),
bit_writer_(data.data(), data.size()) {
FindBestTemplate();
}
@@ -74,6 +71,14 @@ bool RtpDependencyDescriptorWriter::Write() {
WriteExtendedFields();
WriteFrameDependencyDefinition();
}
+ size_t remaining_bits = bit_writer_.RemainingBitCount();
+ // Zero remaining memory to avoid leaving it uninitialized.
+ if (remaining_bits % 64 != 0) {
+ WriteBits(/*val=*/0, remaining_bits % 64);
+ }
+ for (size_t i = 0; i < remaining_bits / 64; ++i) {
+ WriteBits(/*val=*/0, 64);
+ }
return !build_failed_;
}
@@ -106,8 +111,8 @@ int RtpDependencyDescriptorWriter::StructureSizeBits() const {
structure_.num_chains, structure_.num_decode_targets + 1);
if (structure_.num_chains > 0) {
for (int protected_by : structure_.decode_target_protected_by_chain) {
- bits += rtc::BitBufferWriter::SizeNonSymmetricBits(
- protected_by, structure_.num_chains + 1);
+ bits += rtc::BitBufferWriter::SizeNonSymmetricBits(protected_by,
+ structure_.num_chains);
}
bits += 4 * structure_.templates.size() * structure_.num_chains;
}
@@ -126,8 +131,14 @@ RtpDependencyDescriptorWriter::CalculateMatch(
result.need_custom_dtis =
descriptor_.frame_dependencies.decode_target_indications !=
frame_template->decode_target_indications;
- result.need_custom_chains =
- descriptor_.frame_dependencies.chain_diffs != frame_template->chain_diffs;
+ result.need_custom_chains = false;
+ for (int i = 0; i < structure_.num_chains; ++i) {
+ if (active_chains_[i] && descriptor_.frame_dependencies.chain_diffs[i] !=
+ frame_template->chain_diffs[i]) {
+ result.need_custom_chains = true;
+ break;
+ }
+ }
result.extra_size_bits = 0;
if (result.need_custom_fdiffs) {
@@ -193,7 +204,7 @@ bool RtpDependencyDescriptorWriter::HasExtendedFields() const {
uint64_t RtpDependencyDescriptorWriter::TemplateId() const {
return (best_template_.template_position - structure_.templates.begin() +
structure_.structure_id) %
- kMaxTemplates;
+ DependencyDescriptor::kMaxTemplates;
}
void RtpDependencyDescriptorWriter::WriteBits(uint64_t val, size_t bit_count) {
@@ -209,9 +220,10 @@ void RtpDependencyDescriptorWriter::WriteNonSymmetric(uint32_t value,
void RtpDependencyDescriptorWriter::WriteTemplateDependencyStructure() {
RTC_DCHECK_GE(structure_.structure_id, 0);
- RTC_DCHECK_LT(structure_.structure_id, kMaxTemplates);
+ RTC_DCHECK_LT(structure_.structure_id, DependencyDescriptor::kMaxTemplates);
RTC_DCHECK_GT(structure_.num_decode_targets, 0);
- RTC_DCHECK_LE(structure_.num_decode_targets, 1 << 5);
+ RTC_DCHECK_LE(structure_.num_decode_targets,
+ DependencyDescriptor::kMaxDecodeTargets);
WriteBits(structure_.structure_id, 6);
WriteBits(structure_.num_decode_targets - 1, 5);
@@ -228,7 +240,7 @@ void RtpDependencyDescriptorWriter::WriteTemplateDependencyStructure() {
void RtpDependencyDescriptorWriter::WriteTemplateLayers() {
const auto& templates = structure_.templates;
RTC_DCHECK(!templates.empty());
- RTC_DCHECK_LE(templates.size(), kMaxTemplates);
+ RTC_DCHECK_LE(templates.size(), DependencyDescriptor::kMaxTemplates);
RTC_DCHECK_EQ(templates[0].spatial_id, 0);
RTC_DCHECK_EQ(templates[0].temporal_id, 0);
@@ -276,8 +288,8 @@ void RtpDependencyDescriptorWriter::WriteTemplateChains() {
structure_.num_decode_targets);
for (int protected_by : structure_.decode_target_protected_by_chain) {
RTC_DCHECK_GE(protected_by, 0);
- RTC_DCHECK_LE(protected_by, structure_.num_chains);
- WriteNonSymmetric(protected_by, structure_.num_chains + 1);
+ RTC_DCHECK_LT(protected_by, structure_.num_chains);
+ WriteNonSymmetric(protected_by, structure_.num_chains);
}
for (const auto& frame_template : structure_.templates) {
RTC_DCHECK_EQ(frame_template.chain_diffs.size(), structure_.num_chains);
@@ -363,7 +375,9 @@ void RtpDependencyDescriptorWriter::WriteFrameFdiffs() {
void RtpDependencyDescriptorWriter::WriteFrameChains() {
RTC_DCHECK_EQ(descriptor_.frame_dependencies.chain_diffs.size(),
structure_.num_chains);
- for (int chain_diff : descriptor_.frame_dependencies.chain_diffs) {
+ for (int i = 0; i < structure_.num_chains; ++i) {
+ int chain_diff =
+ active_chains_[i] ? descriptor_.frame_dependencies.chain_diffs[i] : 0;
RTC_DCHECK_GE(chain_diff, 0);
RTC_DCHECK_LT(chain_diff, 1 << 8);
WriteBits(chain_diff, 8);
diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h
index 5a823b6e86..99fefecea6 100644
--- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h
+++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h
@@ -10,6 +10,7 @@
#ifndef MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_WRITER_H_
#define MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_WRITER_H_
+#include <bitset>
#include <cstddef>
#include <cstdint>
#include <vector>
@@ -25,6 +26,7 @@ class RtpDependencyDescriptorWriter {
// |descriptor| matches the |structure|.
RtpDependencyDescriptorWriter(rtc::ArrayView<uint8_t> data,
const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor);
// Serializes DependencyDescriptor rtp header extension.
@@ -77,6 +79,7 @@ class RtpDependencyDescriptorWriter {
bool build_failed_ = false;
const DependencyDescriptor& descriptor_;
const FrameDependencyStructure& structure_;
+ std::bitset<32> active_chains_;
rtc::BitBufferWriter bit_writer_;
TemplateMatch best_template_;
};
diff --git a/modules/rtp_rtcp/source/rtp_format.cc b/modules/rtp_rtcp/source/rtp_format.cc
index 28f63f1109..7703a6bf0f 100644
--- a/modules/rtp_rtcp/source/rtp_format.cc
+++ b/modules/rtp_rtcp/source/rtp_format.cc
@@ -30,8 +30,7 @@ std::unique_ptr<RtpPacketizer> RtpPacketizer::Create(
rtc::ArrayView<const uint8_t> payload,
PayloadSizeLimits limits,
// Codec-specific details.
- const RTPVideoHeader& rtp_video_header,
- const RTPFragmentationHeader* fragmentation) {
+ const RTPVideoHeader& rtp_video_header) {
if (!type) {
// Use raw packetizer.
return std::make_unique<RtpPacketizerGeneric>(payload, limits);
@@ -39,11 +38,10 @@ std::unique_ptr<RtpPacketizer> RtpPacketizer::Create(
switch (*type) {
case kVideoCodecH264: {
- RTC_CHECK(fragmentation);
const auto& h264 =
absl::get<RTPVideoHeaderH264>(rtp_video_header.video_type_header);
- return std::make_unique<RtpPacketizerH264>(
- payload, limits, h264.packetization_mode, *fragmentation);
+ return std::make_unique<RtpPacketizerH264>(payload, limits,
+ h264.packetization_mode);
}
case kVideoCodecVP8: {
const auto& vp8 =
diff --git a/modules/rtp_rtcp/source/rtp_format.h b/modules/rtp_rtcp/source/rtp_format.h
index dca8285b62..b593f29b1d 100644
--- a/modules/rtp_rtcp/source/rtp_format.h
+++ b/modules/rtp_rtcp/source/rtp_format.h
@@ -18,7 +18,6 @@
#include "absl/types/optional.h"
#include "api/array_view.h"
-#include "modules/include/module_common_types.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
namespace webrtc {
@@ -41,8 +40,7 @@ class RtpPacketizer {
rtc::ArrayView<const uint8_t> payload,
PayloadSizeLimits limits,
// Codec-specific details.
- const RTPVideoHeader& rtp_video_header,
- const RTPFragmentationHeader* fragmentation);
+ const RTPVideoHeader& rtp_video_header);
virtual ~RtpPacketizer() = default;
diff --git a/modules/rtp_rtcp/source/rtp_format_h264.cc b/modules/rtp_rtcp/source/rtp_format_h264.cc
index 6f19e38629..6c3966cb93 100644
--- a/modules/rtp_rtcp/source/rtp_format_h264.cc
+++ b/modules/rtp_rtcp/source/rtp_format_h264.cc
@@ -25,7 +25,6 @@
#include "common_video/h264/pps_parser.h"
#include "common_video/h264/sps_parser.h"
#include "common_video/h264/sps_vui_rewriter.h"
-#include "modules/include/module_common_types.h"
#include "modules/rtp_rtcp/source/byte_io.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "rtc_base/checks.h"
@@ -46,19 +45,18 @@ enum FuDefs : uint8_t { kSBit = 0x80, kEBit = 0x40, kRBit = 0x20 };
} // namespace
-RtpPacketizerH264::RtpPacketizerH264(
- rtc::ArrayView<const uint8_t> payload,
- PayloadSizeLimits limits,
- H264PacketizationMode packetization_mode,
- const RTPFragmentationHeader& fragmentation)
+RtpPacketizerH264::RtpPacketizerH264(rtc::ArrayView<const uint8_t> payload,
+ PayloadSizeLimits limits,
+ H264PacketizationMode packetization_mode)
: limits_(limits), num_packets_left_(0) {
// Guard against uninitialized memory in packetization_mode.
RTC_CHECK(packetization_mode == H264PacketizationMode::NonInterleaved ||
packetization_mode == H264PacketizationMode::SingleNalUnit);
- for (size_t i = 0; i < fragmentation.fragmentationVectorSize; ++i) {
+ for (const auto& nalu :
+ H264::FindNaluIndices(payload.data(), payload.size())) {
input_fragments_.push_back(
- payload.subview(fragmentation.Offset(i), fragmentation.Length(i)));
+ payload.subview(nalu.payload_start_offset, nalu.payload_size));
}
if (!GeneratePackets(packetization_mode)) {
diff --git a/modules/rtp_rtcp/source/rtp_format_h264.h b/modules/rtp_rtcp/source/rtp_format_h264.h
index 4661dc2163..7c10dd5754 100644
--- a/modules/rtp_rtcp/source/rtp_format_h264.h
+++ b/modules/rtp_rtcp/source/rtp_format_h264.h
@@ -19,7 +19,6 @@
#include <queue>
#include "api/array_view.h"
-#include "modules/include/module_common_types.h"
#include "modules/rtp_rtcp/source/rtp_format.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "modules/video_coding/codecs/h264/include/h264_globals.h"
@@ -34,8 +33,7 @@ class RtpPacketizerH264 : public RtpPacketizer {
// The payload_data must be exactly one encoded H264 frame.
RtpPacketizerH264(rtc::ArrayView<const uint8_t> payload,
PayloadSizeLimits limits,
- H264PacketizationMode packetization_mode,
- const RTPFragmentationHeader& fragmentation);
+ H264PacketizationMode packetization_mode);
~RtpPacketizerH264() override;
diff --git a/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc b/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
index bf9771ab9f..9f660b7a74 100644
--- a/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
@@ -13,9 +13,9 @@
#include <memory>
#include <vector>
+#include "absl/algorithm/container.h"
#include "api/array_view.h"
#include "common_video/h264/h264_common.h"
-#include "modules/include/module_common_types.h"
#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
#include "modules/rtp_rtcp/source/byte_io.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
@@ -56,45 +56,61 @@ enum NalDefs { kFBit = 0x80, kNriMask = 0x60, kTypeMask = 0x1F };
// Bit masks for FU (A and B) headers.
enum FuDefs { kSBit = 0x80, kEBit = 0x40, kRBit = 0x20 };
-RTPFragmentationHeader CreateFragmentation(rtc::ArrayView<const size_t> sizes) {
- RTPFragmentationHeader fragmentation;
- fragmentation.VerifyAndAllocateFragmentationHeader(sizes.size());
- size_t offset = 0;
- for (size_t i = 0; i < sizes.size(); ++i) {
- fragmentation.fragmentationOffset[i] = offset;
- fragmentation.fragmentationLength[i] = sizes[i];
- offset += sizes[i];
+// Creates Buffer that looks like nal unit of given size.
+rtc::Buffer GenerateNalUnit(size_t size) {
+ RTC_CHECK_GT(size, 0);
+ rtc::Buffer buffer(size);
+ // Set some valid header.
+ buffer[0] = kSlice;
+ for (size_t i = 1; i < size; ++i) {
+ buffer[i] = static_cast<uint8_t>(i);
}
- return fragmentation;
-}
-
-// Create fragmentation with single fragment of same size as |frame|
-RTPFragmentationHeader NoFragmentation(rtc::ArrayView<const uint8_t> frame) {
- size_t frame_size[] = {frame.size()};
- return CreateFragmentation(frame_size);
+ // Last byte shouldn't be 0, or it may be counted as part of next 4-byte start
+ // sequence.
+ buffer[size - 1] |= 0x10;
+ return buffer;
}
-// Create frame of given size.
-rtc::Buffer CreateFrame(size_t frame_size) {
- rtc::Buffer frame(frame_size);
- // Set some valid header.
- frame[0] = 0x01;
- // Generate payload to detect when shifted payload was put into a packet.
- for (size_t i = 1; i < frame_size; ++i)
- frame[i] = static_cast<uint8_t>(i);
+// Create frame consisting of nalus of given size.
+rtc::Buffer CreateFrame(std::initializer_list<size_t> nalu_sizes) {
+ static constexpr int kStartCodeSize = 3;
+ rtc::Buffer frame(absl::c_accumulate(nalu_sizes, 0) +
+ kStartCodeSize * nalu_sizes.size());
+ size_t offset = 0;
+ for (size_t nalu_size : nalu_sizes) {
+ EXPECT_GE(nalu_size, 1u);
+ // Insert nalu start code
+ frame[offset] = 0;
+ frame[offset + 1] = 0;
+ frame[offset + 2] = 1;
+ // Set some valid header.
+ frame[offset + 3] = 1;
+ // Fill payload avoiding accidental start codes
+ if (nalu_size > 1) {
+ memset(frame.data() + offset + 4, 0x3f, nalu_size - 1);
+ }
+ offset += (kStartCodeSize + nalu_size);
+ }
return frame;
}
-// Create frame with size deduced from fragmentation.
-rtc::Buffer CreateFrame(const RTPFragmentationHeader& fragmentation) {
- size_t last_frame_index = fragmentation.fragmentationVectorSize - 1;
- size_t frame_size = fragmentation.fragmentationOffset[last_frame_index] +
- fragmentation.fragmentationLength[last_frame_index];
- rtc::Buffer frame = CreateFrame(frame_size);
- // Set some headers.
- // Tests can expect those are valid but shouln't rely on actual values.
- for (size_t i = 0; i <= last_frame_index; ++i) {
- frame[fragmentation.fragmentationOffset[i]] = i + 1;
+// Create frame consisting of given nalus.
+rtc::Buffer CreateFrame(rtc::ArrayView<const rtc::Buffer> nalus) {
+ static constexpr int kStartCodeSize = 3;
+ int frame_size = 0;
+ for (const rtc::Buffer& nalu : nalus) {
+ frame_size += (kStartCodeSize + nalu.size());
+ }
+ rtc::Buffer frame(frame_size);
+ size_t offset = 0;
+ for (const rtc::Buffer& nalu : nalus) {
+ // Insert nalu start code
+ frame[offset] = 0;
+ frame[offset + 1] = 0;
+ frame[offset + 2] = 1;
+ // Copy the nalu unit.
+ memcpy(frame.data() + offset + 3, nalu.data(), nalu.size());
+ offset += (kStartCodeSize + nalu.size());
}
return frame;
}
@@ -117,31 +133,28 @@ class RtpPacketizerH264ModeTest
: public ::testing::TestWithParam<H264PacketizationMode> {};
TEST_P(RtpPacketizerH264ModeTest, SingleNalu) {
- const uint8_t frame[2] = {kIdr, 0xFF};
+ const uint8_t frame[] = {0, 0, 1, kIdr, 0xFF};
- RtpPacketizerH264 packetizer(frame, kNoLimits, GetParam(),
- NoFragmentation(frame));
+ RtpPacketizerH264 packetizer(frame, kNoLimits, GetParam());
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
ASSERT_THAT(packets, SizeIs(1));
- EXPECT_THAT(packets[0].payload(), ElementsAreArray(frame));
+ EXPECT_THAT(packets[0].payload(), ElementsAre(kIdr, 0xFF));
}
TEST_P(RtpPacketizerH264ModeTest, SingleNaluTwoPackets) {
RtpPacketizer::PayloadSizeLimits limits;
limits.max_payload_len = kMaxPayloadSize;
- const size_t fragment_sizes[] = {kMaxPayloadSize, 100};
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragment_sizes);
- rtc::Buffer frame = CreateFrame(fragmentation);
+ rtc::Buffer nalus[] = {GenerateNalUnit(kMaxPayloadSize),
+ GenerateNalUnit(100)};
+ rtc::Buffer frame = CreateFrame(nalus);
- RtpPacketizerH264 packetizer(frame, limits, GetParam(), fragmentation);
+ RtpPacketizerH264 packetizer(frame, limits, GetParam());
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
ASSERT_THAT(packets, SizeIs(2));
- EXPECT_THAT(packets[0].payload(),
- ElementsAreArray(frame.data(), kMaxPayloadSize));
- EXPECT_THAT(packets[1].payload(),
- ElementsAreArray(frame.data() + kMaxPayloadSize, 100));
+ EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0]));
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1]));
}
TEST_P(RtpPacketizerH264ModeTest,
@@ -149,21 +162,18 @@ TEST_P(RtpPacketizerH264ModeTest,
RtpPacketizer::PayloadSizeLimits limits;
limits.max_payload_len = 200;
limits.first_packet_reduction_len = 5;
- const size_t fragments[] = {195, 200, 200};
-
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragments);
- rtc::Buffer frame = CreateFrame(fragmentation);
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/195),
+ GenerateNalUnit(/*size=*/200),
+ GenerateNalUnit(/*size=*/200)};
+ rtc::Buffer frame = CreateFrame(nalus);
- RtpPacketizerH264 packetizer(frame, limits, GetParam(), fragmentation);
+ RtpPacketizerH264 packetizer(frame, limits, GetParam());
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
ASSERT_THAT(packets, SizeIs(3));
- const uint8_t* next_fragment = frame.data();
- EXPECT_THAT(packets[0].payload(), ElementsAreArray(next_fragment, 195));
- next_fragment += 195;
- EXPECT_THAT(packets[1].payload(), ElementsAreArray(next_fragment, 200));
- next_fragment += 200;
- EXPECT_THAT(packets[2].payload(), ElementsAreArray(next_fragment, 200));
+ EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0]));
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1]));
+ EXPECT_THAT(packets[2].payload(), ElementsAreArray(nalus[2]));
}
TEST_P(RtpPacketizerH264ModeTest,
@@ -171,21 +181,18 @@ TEST_P(RtpPacketizerH264ModeTest,
RtpPacketizer::PayloadSizeLimits limits;
limits.max_payload_len = 200;
limits.last_packet_reduction_len = 5;
- const size_t fragments[] = {200, 200, 195};
-
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragments);
- rtc::Buffer frame = CreateFrame(fragmentation);
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/200),
+ GenerateNalUnit(/*size=*/200),
+ GenerateNalUnit(/*size=*/195)};
+ rtc::Buffer frame = CreateFrame(nalus);
- RtpPacketizerH264 packetizer(frame, limits, GetParam(), fragmentation);
+ RtpPacketizerH264 packetizer(frame, limits, GetParam());
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
ASSERT_THAT(packets, SizeIs(3));
- const uint8_t* next_fragment = frame.data();
- EXPECT_THAT(packets[0].payload(), ElementsAreArray(next_fragment, 200));
- next_fragment += 200;
- EXPECT_THAT(packets[1].payload(), ElementsAreArray(next_fragment, 200));
- next_fragment += 200;
- EXPECT_THAT(packets[2].payload(), ElementsAreArray(next_fragment, 195));
+ EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0]));
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1]));
+ EXPECT_THAT(packets[2].payload(), ElementsAreArray(nalus[2]));
}
TEST_P(RtpPacketizerH264ModeTest,
@@ -194,10 +201,9 @@ TEST_P(RtpPacketizerH264ModeTest,
limits.max_payload_len = 200;
limits.first_packet_reduction_len = 20;
limits.last_packet_reduction_len = 30;
- rtc::Buffer frame = CreateFrame(150);
+ rtc::Buffer frame = CreateFrame({150});
- RtpPacketizerH264 packetizer(frame, limits, GetParam(),
- NoFragmentation(frame));
+ RtpPacketizerH264 packetizer(frame, limits, GetParam());
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
EXPECT_THAT(packets, SizeIs(1));
@@ -211,19 +217,19 @@ INSTANTIATE_TEST_SUITE_P(
// Aggregation tests.
TEST(RtpPacketizerH264Test, StapA) {
- size_t fragments[] = {2, 2, 0x123};
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/0x123)};
+ rtc::Buffer frame = CreateFrame(nalus);
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragments);
- rtc::Buffer frame = CreateFrame(fragmentation);
-
- RtpPacketizerH264 packetizer(
- frame, kNoLimits, H264PacketizationMode::NonInterleaved, fragmentation);
+ RtpPacketizerH264 packetizer(frame, kNoLimits,
+ H264PacketizationMode::NonInterleaved);
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
ASSERT_THAT(packets, SizeIs(1));
auto payload = packets[0].payload();
EXPECT_EQ(payload.size(),
- kNalHeaderSize + 3 * kLengthFieldLength + frame.size());
+ kNalHeaderSize + 3 * kLengthFieldLength + 2 + 2 + 0x123);
EXPECT_EQ(payload[0], kStapA);
payload = payload.subview(kNalHeaderSize);
@@ -231,29 +237,26 @@ TEST(RtpPacketizerH264Test, StapA) {
EXPECT_THAT(payload.subview(0, kLengthFieldLength),
ElementsAre(0, 2)); // Size.
EXPECT_THAT(payload.subview(kLengthFieldLength, 2),
- ElementsAreArray(frame.data(), 2));
+ ElementsAreArray(nalus[0]));
payload = payload.subview(kLengthFieldLength + 2);
// 2nd fragment.
EXPECT_THAT(payload.subview(0, kLengthFieldLength),
ElementsAre(0, 2)); // Size.
EXPECT_THAT(payload.subview(kLengthFieldLength, 2),
- ElementsAreArray(frame.data() + 2, 2));
+ ElementsAreArray(nalus[1]));
payload = payload.subview(kLengthFieldLength + 2);
// 3rd fragment.
EXPECT_THAT(payload.subview(0, kLengthFieldLength),
ElementsAre(0x1, 0x23)); // Size.
- EXPECT_THAT(payload.subview(kLengthFieldLength),
- ElementsAreArray(frame.data() + 4, 0x123));
+ EXPECT_THAT(payload.subview(kLengthFieldLength), ElementsAreArray(nalus[2]));
}
TEST(RtpPacketizerH264Test, SingleNalUnitModeHasNoStapA) {
// This is the same setup as for the StapA test.
- size_t fragments[] = {2, 2, 0x123};
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragments);
- rtc::Buffer frame = CreateFrame(fragmentation);
+ rtc::Buffer frame = CreateFrame({2, 2, 0x123});
- RtpPacketizerH264 packetizer(
- frame, kNoLimits, H264PacketizationMode::SingleNalUnit, fragmentation);
+ RtpPacketizerH264 packetizer(frame, kNoLimits,
+ H264PacketizationMode::SingleNalUnit);
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
// The three fragments should be returned as three packets.
@@ -269,23 +272,23 @@ TEST(RtpPacketizerH264Test, StapARespectsFirstPacketReduction) {
limits.first_packet_reduction_len = 100;
const size_t kFirstFragmentSize =
limits.max_payload_len - limits.first_packet_reduction_len;
- size_t fragments[] = {kFirstFragmentSize, 2, 2};
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragments);
- rtc::Buffer frame = CreateFrame(fragmentation);
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/kFirstFragmentSize),
+ GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/2)};
+ rtc::Buffer frame = CreateFrame(nalus);
- RtpPacketizerH264 packetizer(
- frame, limits, H264PacketizationMode::NonInterleaved, fragmentation);
+ RtpPacketizerH264 packetizer(frame, limits,
+ H264PacketizationMode::NonInterleaved);
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
ASSERT_THAT(packets, SizeIs(2));
// Expect 1st packet is single nalu.
- EXPECT_THAT(packets[0].payload(),
- ElementsAreArray(frame.data(), kFirstFragmentSize));
+ EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0]));
// Expect 2nd packet is aggregate of last two fragments.
- const uint8_t* tail = frame.data() + kFirstFragmentSize;
- EXPECT_THAT(packets[1].payload(), ElementsAre(kStapA, //
- 0, 2, tail[0], tail[1], //
- 0, 2, tail[2], tail[3]));
+ EXPECT_THAT(packets[1].payload(),
+ ElementsAre(kStapA, //
+ 0, 2, nalus[1][0], nalus[1][1], //
+ 0, 2, nalus[2][0], nalus[2][1]));
}
TEST(RtpPacketizerH264Test, StapARespectsLastPacketReduction) {
@@ -294,22 +297,23 @@ TEST(RtpPacketizerH264Test, StapARespectsLastPacketReduction) {
limits.last_packet_reduction_len = 100;
const size_t kLastFragmentSize =
limits.max_payload_len - limits.last_packet_reduction_len;
- size_t fragments[] = {2, 2, kLastFragmentSize};
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragments);
- rtc::Buffer frame = CreateFrame(fragmentation);
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/kLastFragmentSize)};
+ rtc::Buffer frame = CreateFrame(nalus);
- RtpPacketizerH264 packetizer(
- frame, limits, H264PacketizationMode::NonInterleaved, fragmentation);
+ RtpPacketizerH264 packetizer(frame, limits,
+ H264PacketizationMode::NonInterleaved);
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
ASSERT_THAT(packets, SizeIs(2));
// Expect 1st packet is aggregate of 1st two fragments.
- EXPECT_THAT(packets[0].payload(), ElementsAre(kStapA, //
- 0, 2, frame[0], frame[1], //
- 0, 2, frame[2], frame[3]));
+ EXPECT_THAT(packets[0].payload(),
+ ElementsAre(kStapA, //
+ 0, 2, nalus[0][0], nalus[0][1], //
+ 0, 2, nalus[1][0], nalus[1][1]));
// Expect 2nd packet is single nalu.
- EXPECT_THAT(packets[1].payload(),
- ElementsAreArray(frame.data() + 4, kLastFragmentSize));
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[2]));
}
TEST(RtpPacketizerH264Test, TooSmallForStapAHeaders) {
@@ -317,22 +321,23 @@ TEST(RtpPacketizerH264Test, TooSmallForStapAHeaders) {
limits.max_payload_len = 1000;
const size_t kLastFragmentSize =
limits.max_payload_len - 3 * kLengthFieldLength - 4;
- size_t fragments[] = {2, 2, kLastFragmentSize};
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragments);
- rtc::Buffer frame = CreateFrame(fragmentation);
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/kLastFragmentSize)};
+ rtc::Buffer frame = CreateFrame(nalus);
- RtpPacketizerH264 packetizer(
- frame, limits, H264PacketizationMode::NonInterleaved, fragmentation);
+ RtpPacketizerH264 packetizer(frame, limits,
+ H264PacketizationMode::NonInterleaved);
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
ASSERT_THAT(packets, SizeIs(2));
// Expect 1st packet is aggregate of 1st two fragments.
- EXPECT_THAT(packets[0].payload(), ElementsAre(kStapA, //
- 0, 2, frame[0], frame[1], //
- 0, 2, frame[2], frame[3]));
+ EXPECT_THAT(packets[0].payload(),
+ ElementsAre(kStapA, //
+ 0, 2, nalus[0][0], nalus[0][1], //
+ 0, 2, nalus[1][0], nalus[1][1]));
// Expect 2nd packet is single nalu.
- EXPECT_THAT(packets[1].payload(),
- ElementsAreArray(frame.data() + 4, kLastFragmentSize));
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[2]));
}
// Fragmentation + aggregation.
@@ -342,28 +347,29 @@ TEST(RtpPacketizerH264Test, MixedStapAFUA) {
const size_t kFuaPayloadSize = 70;
const size_t kFuaNaluSize = kNalHeaderSize + 2 * kFuaPayloadSize;
const size_t kStapANaluSize = 20;
- size_t fragments[] = {kFuaNaluSize, kStapANaluSize, kStapANaluSize};
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragments);
- rtc::Buffer frame = CreateFrame(fragmentation);
+ rtc::Buffer nalus[] = {GenerateNalUnit(kFuaNaluSize),
+ GenerateNalUnit(kStapANaluSize),
+ GenerateNalUnit(kStapANaluSize)};
+ rtc::Buffer frame = CreateFrame(nalus);
- RtpPacketizerH264 packetizer(
- frame, limits, H264PacketizationMode::NonInterleaved, fragmentation);
+ RtpPacketizerH264 packetizer(frame, limits,
+ H264PacketizationMode::NonInterleaved);
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
ASSERT_THAT(packets, SizeIs(3));
- const uint8_t* next_fragment = frame.data() + kNalHeaderSize;
// First expect two FU-A packets.
EXPECT_THAT(packets[0].payload().subview(0, kFuAHeaderSize),
- ElementsAre(kFuA, FuDefs::kSBit | frame[0]));
- EXPECT_THAT(packets[0].payload().subview(kFuAHeaderSize),
- ElementsAreArray(next_fragment, kFuaPayloadSize));
- next_fragment += kFuaPayloadSize;
+ ElementsAre(kFuA, FuDefs::kSBit | nalus[0][0]));
+ EXPECT_THAT(
+ packets[0].payload().subview(kFuAHeaderSize),
+ ElementsAreArray(nalus[0].data() + kNalHeaderSize, kFuaPayloadSize));
EXPECT_THAT(packets[1].payload().subview(0, kFuAHeaderSize),
- ElementsAre(kFuA, FuDefs::kEBit | frame[0]));
- EXPECT_THAT(packets[1].payload().subview(kFuAHeaderSize),
- ElementsAreArray(next_fragment, kFuaPayloadSize));
- next_fragment += kFuaPayloadSize;
+ ElementsAre(kFuA, FuDefs::kEBit | nalus[0][0]));
+ EXPECT_THAT(
+ packets[1].payload().subview(kFuAHeaderSize),
+ ElementsAreArray(nalus[0].data() + kNalHeaderSize + kFuaPayloadSize,
+ kFuaPayloadSize));
// Then expect one STAP-A packet with two nal units.
EXPECT_THAT(packets[2].payload()[0], kStapA);
@@ -371,13 +377,11 @@ TEST(RtpPacketizerH264Test, MixedStapAFUA) {
EXPECT_THAT(payload.subview(0, kLengthFieldLength),
ElementsAre(0, kStapANaluSize));
EXPECT_THAT(payload.subview(kLengthFieldLength, kStapANaluSize),
- ElementsAreArray(next_fragment, kStapANaluSize));
+ ElementsAreArray(nalus[1]));
payload = payload.subview(kLengthFieldLength + kStapANaluSize);
- next_fragment += kStapANaluSize;
EXPECT_THAT(payload.subview(0, kLengthFieldLength),
ElementsAre(0, kStapANaluSize));
- EXPECT_THAT(payload.subview(kLengthFieldLength),
- ElementsAreArray(next_fragment, kStapANaluSize));
+ EXPECT_THAT(payload.subview(kLengthFieldLength), ElementsAreArray(nalus[2]));
}
TEST(RtpPacketizerH264Test, LastFragmentFitsInSingleButNotLastPacket) {
@@ -387,12 +391,10 @@ TEST(RtpPacketizerH264Test, LastFragmentFitsInSingleButNotLastPacket) {
limits.last_packet_reduction_len = 20;
limits.single_packet_reduction_len = 20;
// Actual sizes, which triggered this bug.
- size_t fragments[] = {20, 8, 18, 1161};
- RTPFragmentationHeader fragmentation = CreateFragmentation(fragments);
- rtc::Buffer frame = CreateFrame(fragmentation);
+ rtc::Buffer frame = CreateFrame({20, 8, 18, 1161});
- RtpPacketizerH264 packetizer(
- frame, limits, H264PacketizationMode::NonInterleaved, fragmentation);
+ RtpPacketizerH264 packetizer(frame, limits,
+ H264PacketizationMode::NonInterleaved);
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
// Last packet has to be of correct size.
@@ -406,11 +408,11 @@ TEST(RtpPacketizerH264Test, LastFragmentFitsInSingleButNotLastPacket) {
// Returns sizes of the payloads excluding fua headers.
std::vector<int> TestFua(size_t frame_payload_size,
const RtpPacketizer::PayloadSizeLimits& limits) {
- rtc::Buffer frame = CreateFrame(kNalHeaderSize + frame_payload_size);
+ rtc::Buffer nalu[] = {GenerateNalUnit(kNalHeaderSize + frame_payload_size)};
+ rtc::Buffer frame = CreateFrame(nalu);
RtpPacketizerH264 packetizer(frame, limits,
- H264PacketizationMode::NonInterleaved,
- NoFragmentation(frame));
+ H264PacketizationMode::NonInterleaved);
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
EXPECT_GE(packets.size(), 2u); // Single packet indicates it is not FuA.
@@ -429,7 +431,7 @@ std::vector<int> TestFua(size_t frame_payload_size,
// Clear S and E bits before testing all are duplicating same original header.
fua_header.front() &= ~FuDefs::kSBit;
fua_header.back() &= ~FuDefs::kEBit;
- EXPECT_THAT(fua_header, Each(Eq((kFuA << 8) | frame[0])));
+ EXPECT_THAT(fua_header, Each(Eq((kFuA << 8) | nalu[0][0])));
return payload_sizes;
}
@@ -488,11 +490,10 @@ TEST(RtpPacketizerH264Test, FUABig) {
TEST(RtpPacketizerH264Test, RejectsOverlongDataInPacketizationMode0) {
RtpPacketizer::PayloadSizeLimits limits;
- rtc::Buffer frame = CreateFrame(kMaxPayloadSize + 1);
- RTPFragmentationHeader fragmentation = NoFragmentation(frame);
+ rtc::Buffer frame = CreateFrame({kMaxPayloadSize + 1});
- RtpPacketizerH264 packetizer(
- frame, limits, H264PacketizationMode::SingleNalUnit, fragmentation);
+ RtpPacketizerH264 packetizer(frame, limits,
+ H264PacketizationMode::SingleNalUnit);
std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
EXPECT_THAT(packets, IsEmpty());
diff --git a/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
index 03d4e58576..916d6577f1 100644
--- a/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
+++ b/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
@@ -10,10 +10,9 @@
// This file contains the class RtpFormatVp8TestHelper. The class is
// responsible for setting up a fake VP8 bitstream according to the
-// RTPVideoHeaderVP8 header, and partition information. After initialization,
-// an RTPFragmentationHeader is provided so that the tester can create a
-// packetizer. The packetizer can then be provided to this helper class, which
-// will then extract all packets and compare to the expected outcome.
+// RTPVideoHeaderVP8 header. The packetizer can then be provided to this helper
+// class, which will then extract all packets and compare to the expected
+// outcome.
#ifndef MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
#define MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
diff --git a/modules/rtp_rtcp/source/rtp_header_extension_map.cc b/modules/rtp_rtcp/source/rtp_header_extension_map.cc
index f59f9c4ebb..63562c5b89 100644
--- a/modules/rtp_rtcp/source/rtp_header_extension_map.cc
+++ b/modules/rtp_rtcp/source/rtp_header_extension_map.cc
@@ -41,7 +41,6 @@ constexpr ExtensionInfo kExtensions[] = {
CreateExtensionInfo<PlayoutDelayLimits>(),
CreateExtensionInfo<VideoContentTypeExtension>(),
CreateExtensionInfo<VideoTimingExtension>(),
- CreateExtensionInfo<FrameMarkingExtension>(),
CreateExtensionInfo<RtpStreamId>(),
CreateExtensionInfo<RepairedRtpStreamId>(),
CreateExtensionInfo<RtpMid>(),
diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.cc b/modules/rtp_rtcp/source/rtp_header_extensions.cc
index fefe6c618f..527874d785 100644
--- a/modules/rtp_rtcp/source/rtp_header_extensions.cc
+++ b/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -525,86 +525,6 @@ bool VideoTimingExtension::Write(rtc::ArrayView<uint8_t> data,
return true;
}
-// Frame Marking.
-//
-// Meta-information about an RTP stream outside the encrypted media payload,
-// useful for an RTP switch to do codec-agnostic selective forwarding
-// without decrypting the payload.
-//
-// For non-scalable streams:
-// 0 1
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | ID | L = 0 |S|E|I|D|0 0 0 0|
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//
-// For scalable streams:
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | ID | L = 2 |S|E|I|D|B| TID | LID | TL0PICIDX |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-constexpr RTPExtensionType FrameMarkingExtension::kId;
-constexpr const char FrameMarkingExtension::kUri[];
-
-bool FrameMarkingExtension::IsScalable(uint8_t temporal_id, uint8_t layer_id) {
- return temporal_id != kNoTemporalIdx || layer_id != kNoSpatialIdx;
-}
-
-bool FrameMarkingExtension::Parse(rtc::ArrayView<const uint8_t> data,
- FrameMarking* frame_marking) {
- RTC_DCHECK(frame_marking);
-
- if (data.size() != 1 && data.size() != 3)
- return false;
-
- frame_marking->start_of_frame = (data[0] & 0x80) != 0;
- frame_marking->end_of_frame = (data[0] & 0x40) != 0;
- frame_marking->independent_frame = (data[0] & 0x20) != 0;
- frame_marking->discardable_frame = (data[0] & 0x10) != 0;
-
- if (data.size() == 3) {
- frame_marking->base_layer_sync = (data[0] & 0x08) != 0;
- frame_marking->temporal_id = data[0] & 0x7;
- frame_marking->layer_id = data[1];
- frame_marking->tl0_pic_idx = data[2];
- } else {
- // non-scalable
- frame_marking->base_layer_sync = false;
- frame_marking->temporal_id = kNoTemporalIdx;
- frame_marking->layer_id = kNoSpatialIdx;
- frame_marking->tl0_pic_idx = 0;
- }
- return true;
-}
-
-size_t FrameMarkingExtension::ValueSize(const FrameMarking& frame_marking) {
- if (IsScalable(frame_marking.temporal_id, frame_marking.layer_id))
- return 3;
- else
- return 1;
-}
-
-bool FrameMarkingExtension::Write(rtc::ArrayView<uint8_t> data,
- const FrameMarking& frame_marking) {
- RTC_DCHECK_GE(data.size(), 1);
- RTC_CHECK_LE(frame_marking.temporal_id, 0x07);
- data[0] = frame_marking.start_of_frame ? 0x80 : 0x00;
- data[0] |= frame_marking.end_of_frame ? 0x40 : 0x00;
- data[0] |= frame_marking.independent_frame ? 0x20 : 0x00;
- data[0] |= frame_marking.discardable_frame ? 0x10 : 0x00;
-
- if (IsScalable(frame_marking.temporal_id, frame_marking.layer_id)) {
- RTC_DCHECK_EQ(data.size(), 3);
- data[0] |= frame_marking.base_layer_sync ? 0x08 : 0x00;
- data[0] |= frame_marking.temporal_id & 0x07;
- data[1] = frame_marking.layer_id;
- data[2] = frame_marking.tl0_pic_idx;
- }
- return true;
-}
-
// Color space including HDR metadata as an optional field.
//
// RTP header extension to carry color space information and optionally HDR
diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.h b/modules/rtp_rtcp/source/rtp_header_extensions.h
index f4517bb513..8a81280f7b 100644
--- a/modules/rtp_rtcp/source/rtp_header_extensions.h
+++ b/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -19,7 +19,6 @@
#include "api/rtp_headers.h"
#include "api/video/color_space.h"
#include "api/video/video_content_type.h"
-#include "api/video/video_frame_marking.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -217,23 +216,6 @@ class VideoTimingExtension {
uint8_t offset);
};
-class FrameMarkingExtension {
- public:
- using value_type = FrameMarking;
- static constexpr RTPExtensionType kId = kRtpExtensionFrameMarking;
- static constexpr const char kUri[] =
- "http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07";
-
- static bool Parse(rtc::ArrayView<const uint8_t> data,
- FrameMarking* frame_marking);
- static size_t ValueSize(const FrameMarking& frame_marking);
- static bool Write(rtc::ArrayView<uint8_t> data,
- const FrameMarking& frame_marking);
-
- private:
- static bool IsScalable(uint8_t temporal_id, uint8_t layer_id);
-};
-
class ColorSpaceExtension {
public:
using value_type = ColorSpace;
diff --git a/modules/rtp_rtcp/source/rtp_packet.cc b/modules/rtp_rtcp/source/rtp_packet.cc
index e054bb8306..767c9a0688 100644
--- a/modules/rtp_rtcp/source/rtp_packet.cc
+++ b/modules/rtp_rtcp/source/rtp_packet.cc
@@ -188,7 +188,6 @@ void RtpPacket::ZeroMutableExtensions() {
case RTPExtensionType::kRtpExtensionAudioLevel:
case RTPExtensionType::kRtpExtensionAbsoluteCaptureTime:
case RTPExtensionType::kRtpExtensionColorSpace:
- case RTPExtensionType::kRtpExtensionFrameMarking:
case RTPExtensionType::kRtpExtensionGenericFrameDescriptor00:
case RTPExtensionType::kRtpExtensionGenericFrameDescriptor02:
case RTPExtensionType::kRtpExtensionMid:
diff --git a/modules/rtp_rtcp/source/rtp_packet_history.cc b/modules/rtp_rtcp/source/rtp_packet_history.cc
index 58e971ff1d..1fbfb7651d 100644
--- a/modules/rtp_rtcp/source/rtp_packet_history.cc
+++ b/modules/rtp_rtcp/source/rtp_packet_history.cc
@@ -93,7 +93,7 @@ RtpPacketHistory::~RtpPacketHistory() {}
void RtpPacketHistory::SetStorePacketsStatus(StorageMode mode,
size_t number_to_store) {
RTC_DCHECK_LE(number_to_store, kMaxCapacity);
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (mode != StorageMode::kDisabled && mode_ != StorageMode::kDisabled) {
RTC_LOG(LS_WARNING) << "Purging packet history in order to re-set status.";
}
@@ -103,12 +103,12 @@ void RtpPacketHistory::SetStorePacketsStatus(StorageMode mode,
}
RtpPacketHistory::StorageMode RtpPacketHistory::GetStorageMode() const {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
return mode_;
}
void RtpPacketHistory::SetRtt(int64_t rtt_ms) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
RTC_DCHECK_GE(rtt_ms, 0);
rtt_ms_ = rtt_ms;
// If storage is not disabled, packets will be removed after a timeout
@@ -122,7 +122,7 @@ void RtpPacketHistory::SetRtt(int64_t rtt_ms) {
void RtpPacketHistory::PutRtpPacket(std::unique_ptr<RtpPacketToSend> packet,
absl::optional<int64_t> send_time_ms) {
RTC_DCHECK(packet);
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
int64_t now_ms = clock_->TimeInMilliseconds();
if (mode_ == StorageMode::kDisabled) {
return;
@@ -170,7 +170,7 @@ void RtpPacketHistory::PutRtpPacket(std::unique_ptr<RtpPacketToSend> packet,
std::unique_ptr<RtpPacketToSend> RtpPacketHistory::GetPacketAndSetSendTime(
uint16_t sequence_number) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (mode_ == StorageMode::kDisabled) {
return nullptr;
}
@@ -210,7 +210,7 @@ std::unique_ptr<RtpPacketToSend> RtpPacketHistory::GetPacketAndMarkAsPending(
uint16_t sequence_number,
rtc::FunctionView<std::unique_ptr<RtpPacketToSend>(const RtpPacketToSend&)>
encapsulate) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (mode_ == StorageMode::kDisabled) {
return nullptr;
}
@@ -241,7 +241,7 @@ std::unique_ptr<RtpPacketToSend> RtpPacketHistory::GetPacketAndMarkAsPending(
}
void RtpPacketHistory::MarkPacketAsSent(uint16_t sequence_number) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (mode_ == StorageMode::kDisabled) {
return;
}
@@ -263,7 +263,7 @@ void RtpPacketHistory::MarkPacketAsSent(uint16_t sequence_number) {
absl::optional<RtpPacketHistory::PacketState> RtpPacketHistory::GetPacketState(
uint16_t sequence_number) const {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (mode_ == StorageMode::kDisabled) {
return absl::nullopt;
}
@@ -311,7 +311,7 @@ std::unique_ptr<RtpPacketToSend> RtpPacketHistory::GetPayloadPaddingPacket() {
std::unique_ptr<RtpPacketToSend> RtpPacketHistory::GetPayloadPaddingPacket(
rtc::FunctionView<std::unique_ptr<RtpPacketToSend>(const RtpPacketToSend&)>
encapsulate) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (mode_ == StorageMode::kDisabled) {
return nullptr;
}
@@ -357,7 +357,7 @@ std::unique_ptr<RtpPacketToSend> RtpPacketHistory::GetPayloadPaddingPacket(
void RtpPacketHistory::CullAcknowledgedPackets(
rtc::ArrayView<const uint16_t> sequence_numbers) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
for (uint16_t sequence_number : sequence_numbers) {
int packet_index = GetPacketIndex(sequence_number);
if (packet_index < 0 ||
@@ -369,7 +369,7 @@ void RtpPacketHistory::CullAcknowledgedPackets(
}
bool RtpPacketHistory::SetPendingTransmission(uint16_t sequence_number) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (mode_ == StorageMode::kDisabled) {
return false;
}
@@ -384,7 +384,7 @@ bool RtpPacketHistory::SetPendingTransmission(uint16_t sequence_number) {
}
void RtpPacketHistory::Clear() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
Reset();
}
diff --git a/modules/rtp_rtcp/source/rtp_packet_history.h b/modules/rtp_rtcp/source/rtp_packet_history.h
index db25b17a17..f249116b7a 100644
--- a/modules/rtp_rtcp/source/rtp_packet_history.h
+++ b/modules/rtp_rtcp/source/rtp_packet_history.h
@@ -20,7 +20,7 @@
#include "api/function_view.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -193,7 +193,7 @@ class RtpPacketHistory {
Clock* const clock_;
const bool enable_padding_prio_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
size_t number_to_store_ RTC_GUARDED_BY(lock_);
StorageMode mode_ RTC_GUARDED_BY(lock_);
int64_t rtt_ms_ RTC_GUARDED_BY(lock_);
diff --git a/modules/rtp_rtcp/source/rtp_packet_received.cc b/modules/rtp_rtcp/source/rtp_packet_received.cc
index 56aea8eb5e..feadee1db1 100644
--- a/modules/rtp_rtcp/source/rtp_packet_received.cc
+++ b/modules/rtp_rtcp/source/rtp_packet_received.cc
@@ -69,8 +69,6 @@ void RtpPacketReceived::GetHeader(RTPHeader* header) const {
&header->extension.videoContentType);
header->extension.has_video_timing =
GetExtension<VideoTimingExtension>(&header->extension.video_timing);
- header->extension.has_frame_marking =
- GetExtension<FrameMarkingExtension>(&header->extension.frame_marking);
GetExtension<RtpStreamId>(&header->extension.stream_id);
GetExtension<RepairedRtpStreamId>(&header->extension.repaired_stream_id);
GetExtension<RtpMid>(&header->extension.mid);
diff --git a/modules/rtp_rtcp/source/rtp_packet_to_send.h b/modules/rtp_rtcp/source/rtp_packet_to_send.h
index 8997bce0d2..9aaf9a52e6 100644
--- a/modules/rtp_rtcp/source/rtp_packet_to_send.h
+++ b/modules/rtp_rtcp/source/rtp_packet_to_send.h
@@ -108,6 +108,15 @@ class RtpPacketToSend : public RtpPacket {
void set_is_key_frame(bool is_key_frame) { is_key_frame_ = is_key_frame; }
bool is_key_frame() const { return is_key_frame_; }
+ // Indicates if packets should be protected by FEC (Forward Error Correction).
+ void set_fec_protect_packet(bool protect) { fec_protect_packet_ = protect; }
+ bool fec_protect_packet() const { return fec_protect_packet_; }
+
+ // Indicates if packet is using RED encapsulation, in accordance with
+ // https://tools.ietf.org/html/rfc2198
+ void set_is_red(bool is_red) { is_red_ = is_red; }
+ bool is_red() const { return is_red_; }
+
private:
int64_t capture_time_ms_ = 0;
absl::optional<RtpPacketMediaType> packet_type_;
@@ -116,6 +125,8 @@ class RtpPacketToSend : public RtpPacket {
std::vector<uint8_t> application_data_;
bool is_first_packet_of_frame_ = false;
bool is_key_frame_ = false;
+ bool fec_protect_packet_ = false;
+ bool is_red_ = false;
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index fb6f8a3f8f..92abd9be6f 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -39,7 +39,7 @@ const int64_t kDefaultExpectedRetransmissionTimeMs = 125;
} // namespace
ModuleRtpRtcpImpl::RtpSenderContext::RtpSenderContext(
- const RtpRtcp::Configuration& config)
+ const RtpRtcpInterface::Configuration& config)
: packet_history(config.clock, config.enable_rtx_padding_prioritization),
packet_sender(config, &packet_history),
non_paced_sender(&packet_sender),
@@ -48,11 +48,11 @@ ModuleRtpRtcpImpl::RtpSenderContext::RtpSenderContext(
&packet_history,
config.paced_sender ? config.paced_sender : &non_paced_sender) {}
-RtpRtcp::Configuration::Configuration() = default;
-RtpRtcp::Configuration::Configuration(Configuration&& rhs) = default;
-
-std::unique_ptr<RtpRtcp> RtpRtcp::Create(const Configuration& configuration) {
+std::unique_ptr<RtpRtcp> RtpRtcp::DEPRECATED_Create(
+ const Configuration& configuration) {
RTC_DCHECK(configuration.clock);
+ RTC_LOG(LS_ERROR)
+ << "*********** USING WebRTC INTERNAL IMPLEMENTATION DETAILS ***********";
return std::make_unique<ModuleRtpRtcpImpl>(configuration);
}
@@ -96,23 +96,34 @@ int64_t ModuleRtpRtcpImpl::TimeUntilNextProcess() {
// Process any pending tasks such as timeouts (non time critical events).
void ModuleRtpRtcpImpl::Process() {
const int64_t now = clock_->TimeInMilliseconds();
+ // TODO(bugs.webrtc.org/11581): Figure out why we need to call Process() 200
+ // times a second.
next_process_time_ = now + kRtpRtcpMaxIdleTimeProcessMs;
if (rtp_sender_) {
if (now >= last_bitrate_process_time_ + kRtpRtcpBitrateProcessTimeMs) {
rtp_sender_->packet_sender.ProcessBitrateAndNotifyObservers();
last_bitrate_process_time_ = now;
+ // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function,
+ // next_process_time_ is incremented by 5ms, here we effectively do a
+ // std::min() of (now + 5ms, now + 10ms). Seems like this is a no-op?
next_process_time_ =
std::min(next_process_time_, now + kRtpRtcpBitrateProcessTimeMs);
}
}
+ // TODO(bugs.webrtc.org/11581): We update the RTT once a second, whereas other
+ // things that run in this method are updated much more frequently. Move the
+ // RTT checking over to the worker thread, which matches better with where the
+ // stats are maintained.
bool process_rtt = now >= last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs;
if (rtcp_sender_.Sending()) {
// Process RTT if we have received a report block and we haven't
// processed RTT for at least |kRtpRtcpRttProcessTimeMs| milliseconds.
- if (rtcp_receiver_.LastReceivedReportBlockMs() > last_rtt_process_time_ &&
- process_rtt) {
+ // Note that LastReceivedReportBlockMs() grabs a lock, so check
+ // |process_rtt| first.
+ if (process_rtt &&
+ rtcp_receiver_.LastReceivedReportBlockMs() > last_rtt_process_time_) {
std::vector<RTCPReportBlock> receive_blocks;
rtcp_receiver_.StatisticsReceived(&receive_blocks);
int64_t max_rtt = 0;
@@ -129,6 +140,12 @@ void ModuleRtpRtcpImpl::Process() {
// Verify receiver reports are delivered and the reported sequence number
// is increasing.
+ // TODO(bugs.webrtc.org/11581): The timeout value needs to be checked every
+ // few seconds (see internals of RtcpRrTimeout). Here, we may be polling it
+ // a couple of hundred times a second, which isn't great since it grabs a
+ // lock. Note also that LastReceivedReportBlockMs() (called above) and
+ // RtcpRrTimeout() both grab the same lock and check the same timer, so
+ // it should be possible to consolidate that work somehow.
if (rtcp_receiver_.RtcpRrTimeout()) {
RTC_LOG_F(LS_WARNING) << "Timeout: No RTCP RR received.";
} else if (rtcp_receiver_.RtcpRrSequenceNumberTimeout()) {
@@ -159,6 +176,9 @@ void ModuleRtpRtcpImpl::Process() {
// Get processed rtt.
if (process_rtt) {
last_rtt_process_time_ = now;
+ // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function,
+ // next_process_time_ is incremented by 5ms, here we effectively do a
+ // std::min() of (now + 5ms, now + 1000ms). Seems like this is a no-op?
next_process_time_ = std::min(
next_process_time_, last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs);
if (rtt_stats_) {
@@ -238,7 +258,6 @@ void ModuleRtpRtcpImpl::SetSequenceNumber(const uint16_t seq_num) {
void ModuleRtpRtcpImpl::SetRtpState(const RtpState& rtp_state) {
rtp_sender_->packet_generator.SetRtpState(rtp_state);
- rtp_sender_->packet_sender.SetMediaHasBeenSent(rtp_state.media_has_been_sent);
rtcp_sender_.SetTimestampOffset(rtp_state.start_timestamp);
}
@@ -248,7 +267,6 @@ void ModuleRtpRtcpImpl::SetRtxState(const RtpState& rtp_state) {
RtpState ModuleRtpRtcpImpl::GetRtpState() const {
RtpState state = rtp_sender_->packet_generator.GetRtpState();
- state.media_has_been_sent = rtp_sender_->packet_sender.MediaHasBeenSent();
return state;
}
@@ -292,7 +310,7 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() {
state.send_bitrate =
rtp_sender_->packet_sender.GetSendRates().Sum().bps<uint32_t>();
}
- state.module = this;
+ state.receiver = &rtcp_receiver_;
LastReceivedNTP(&state.last_rr_ntp_secs, &state.last_rr_ntp_frac,
&state.remote_sr);
@@ -371,6 +389,17 @@ bool ModuleRtpRtcpImpl::TrySendPacket(RtpPacketToSend* packet,
return true;
}
+void ModuleRtpRtcpImpl::SetFecProtectionParams(const FecProtectionParams&,
+ const FecProtectionParams&) {
+ // Deferred FEC not supported in deprecated RTP module.
+}
+
+std::vector<std::unique_ptr<RtpPacketToSend>>
+ModuleRtpRtcpImpl::FetchFecPackets() {
+ // Deferred FEC not supported in deprecated RTP module.
+ return {};
+}
+
void ModuleRtpRtcpImpl::OnPacketsAcknowledged(
rtc::ArrayView<const uint16_t> sequence_numbers) {
RTC_DCHECK(rtp_sender_);
@@ -503,7 +532,8 @@ int32_t ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData(
const uint32_t name,
const uint8_t* data,
const uint16_t length) {
- return rtcp_sender_.SetApplicationSpecificData(sub_type, name, data, length);
+ RTC_NOTREACHED() << "Not implemented";
+ return -1;
}
void ModuleRtpRtcpImpl::SetRtcpXrRrtrStatus(bool enable) {
@@ -697,7 +727,6 @@ void ModuleRtpRtcpImpl::SetRemoteSSRC(const uint32_t ssrc) {
rtcp_receiver_.SetRemoteSSRC(ssrc);
}
-// TODO(nisse): Delete video_rate amd fec_rate arguments.
void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
uint32_t* video_rate,
uint32_t* fec_rate,
@@ -773,14 +802,9 @@ bool ModuleRtpRtcpImpl::LastReceivedNTP(
return true;
}
-// Called from RTCPsender.
-std::vector<rtcp::TmmbItem> ModuleRtpRtcpImpl::BoundingSet(bool* tmmbr_owner) {
- return rtcp_receiver_.BoundingSet(tmmbr_owner);
-}
-
void ModuleRtpRtcpImpl::set_rtt_ms(int64_t rtt_ms) {
{
- rtc::CritScope cs(&critical_section_rtt_);
+ MutexLock lock(&mutex_rtt_);
rtt_ms_ = rtt_ms;
}
if (rtp_sender_) {
@@ -789,7 +813,7 @@ void ModuleRtpRtcpImpl::set_rtt_ms(int64_t rtt_ms) {
}
int64_t ModuleRtpRtcpImpl::rtt_ms() const {
- rtc::CritScope cs(&critical_section_rtt_);
+ MutexLock lock(&mutex_rtt_);
return rtt_ms_;
}
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index debb433297..c413efe13a 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -26,15 +26,15 @@
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType
+#include "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h"
#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
#include "modules/rtp_rtcp/source/rtcp_receiver.h"
#include "modules/rtp_rtcp/source/rtcp_sender.h"
#include "modules/rtp_rtcp/source/rtp_packet_history.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
-#include "modules/rtp_rtcp/source/rtp_sender_egress.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/gtest_prod_util.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -42,9 +42,11 @@ class Clock;
struct PacedPacketInfo;
struct RTPVideoHeader;
+// DEPRECATED.
class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
public:
- explicit ModuleRtpRtcpImpl(const RtpRtcp::Configuration& configuration);
+ explicit ModuleRtpRtcpImpl(
+ const RtpRtcpInterface::Configuration& configuration);
~ModuleRtpRtcpImpl() override;
// Returns the number of milliseconds until the module want a worker thread to
@@ -137,6 +139,11 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
bool TrySendPacket(RtpPacketToSend* packet,
const PacedPacketInfo& pacing_info) override;
+ void SetFecProtectionParams(const FecProtectionParams& delta_params,
+ const FecProtectionParams& key_params) override;
+
+ std::vector<std::unique_ptr<RtpPacketToSend>> FetchFecPackets() override;
+
void OnPacketsAcknowledged(
rtc::ArrayView<const uint16_t> sequence_numbers) override;
@@ -257,8 +264,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
uint32_t* NTPfrac,
uint32_t* remote_sr) const;
- std::vector<rtcp::TmmbItem> BoundingSet(bool* tmmbr_owner);
-
void BitrateSent(uint32_t* total_rate,
uint32_t* video_rate,
uint32_t* fec_rate,
@@ -294,6 +299,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
RTCPReceiver* rtcp_receiver() { return &rtcp_receiver_; }
const RTCPReceiver* rtcp_receiver() const { return &rtcp_receiver_; }
+ void SetMediaHasBeenSent(bool media_has_been_sent) {
+ rtp_sender_->packet_sender.SetMediaHasBeenSent(media_has_been_sent);
+ }
+
Clock* clock() const { return clock_; }
// TODO(sprang): Remove when usage is gone.
@@ -305,14 +314,14 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, RttForReceiverOnly);
struct RtpSenderContext {
- explicit RtpSenderContext(const RtpRtcp::Configuration& config);
+ explicit RtpSenderContext(const RtpRtcpInterface::Configuration& config);
// Storage of packets, for retransmissions and padding, if applicable.
RtpPacketHistory packet_history;
// Handles final time timestamping/stats/etc and handover to Transport.
- RtpSenderEgress packet_sender;
+ DEPRECATED_RtpSenderEgress packet_sender;
// If no paced sender configured, this class will be used to pass packets
// from |packet_generator_| to |packet_sender_|.
- RtpSenderEgress::NonPacedPacketSender non_paced_sender;
+ DEPRECATED_RtpSenderEgress::NonPacedPacketSender non_paced_sender;
// Handles creation of RTP packets to be sent.
RTPSender packet_generator;
};
@@ -343,7 +352,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
RtcpRttStats* const rtt_stats_;
// The processed RTT from RtcpRttStats.
- rtc::CriticalSection critical_section_rtt_;
+ mutable Mutex mutex_rtt_;
int64_t rtt_ms_;
};
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
new file mode 100644
index 0000000000..31e4854839
--- /dev/null
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
@@ -0,0 +1,760 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+
+#include <string.h>
+
+#include <algorithm>
+#include <cstdint>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+
+#include "api/transport/field_trial_based_config.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+#ifdef _WIN32
+// Disable warning C4355: 'this' : used in base member initializer list.
+#pragma warning(disable : 4355)
+#endif
+
+namespace webrtc {
+namespace {
+const int64_t kRtpRtcpMaxIdleTimeProcessMs = 5;
+const int64_t kDefaultExpectedRetransmissionTimeMs = 125;
+
+constexpr TimeDelta kRttUpdateInterval = TimeDelta::Millis(1000);
+} // namespace
+
+ModuleRtpRtcpImpl2::RtpSenderContext::RtpSenderContext(
+ const RtpRtcpInterface::Configuration& config)
+ : packet_history(config.clock, config.enable_rtx_padding_prioritization),
+ packet_sender(config, &packet_history),
+ non_paced_sender(&packet_sender, this),
+ packet_generator(
+ config,
+ &packet_history,
+ config.paced_sender ? config.paced_sender : &non_paced_sender) {}
+void ModuleRtpRtcpImpl2::RtpSenderContext::AssignSequenceNumber(
+ RtpPacketToSend* packet) {
+ packet_generator.AssignSequenceNumber(packet);
+}
+
+ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration)
+ : worker_queue_(TaskQueueBase::Current()),
+ rtcp_sender_(configuration),
+ rtcp_receiver_(configuration, this),
+ clock_(configuration.clock),
+ last_rtt_process_time_(clock_->TimeInMilliseconds()),
+ next_process_time_(clock_->TimeInMilliseconds() +
+ kRtpRtcpMaxIdleTimeProcessMs),
+ packet_overhead_(28), // IPV4 UDP.
+ nack_last_time_sent_full_ms_(0),
+ nack_last_seq_number_sent_(0),
+ remote_bitrate_(configuration.remote_bitrate_estimator),
+ rtt_stats_(configuration.rtt_stats),
+ rtt_ms_(0) {
+ RTC_DCHECK(worker_queue_);
+ process_thread_checker_.Detach();
+ if (!configuration.receiver_only) {
+ rtp_sender_ = std::make_unique<RtpSenderContext>(configuration);
+ // Make sure rtcp sender use same timestamp offset as rtp sender.
+ rtcp_sender_.SetTimestampOffset(
+ rtp_sender_->packet_generator.TimestampOffset());
+ }
+
+ // Set default packet size limit.
+ // TODO(nisse): Kind-of duplicates
+ // webrtc::VideoSendStream::Config::Rtp::kDefaultMaxPacketSize.
+ const size_t kTcpOverIpv4HeaderSize = 40;
+ SetMaxRtpPacketSize(IP_PACKET_SIZE - kTcpOverIpv4HeaderSize);
+
+ if (rtt_stats_) {
+ rtt_update_task_ = RepeatingTaskHandle::DelayedStart(
+ worker_queue_, kRttUpdateInterval, [this]() {
+ PeriodicUpdate();
+ return kRttUpdateInterval;
+ });
+ }
+}
+
+ModuleRtpRtcpImpl2::~ModuleRtpRtcpImpl2() {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ rtt_update_task_.Stop();
+}
+
+// static
+std::unique_ptr<ModuleRtpRtcpImpl2> ModuleRtpRtcpImpl2::Create(
+ const Configuration& configuration) {
+ RTC_DCHECK(configuration.clock);
+ RTC_DCHECK(TaskQueueBase::Current());
+ return std::make_unique<ModuleRtpRtcpImpl2>(configuration);
+}
+
+// Returns the number of milliseconds until the module want a worker thread
+// to call Process.
+int64_t ModuleRtpRtcpImpl2::TimeUntilNextProcess() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ return std::max<int64_t>(0,
+ next_process_time_ - clock_->TimeInMilliseconds());
+}
+
+// Process any pending tasks such as timeouts (non time critical events).
+void ModuleRtpRtcpImpl2::Process() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+
+ const Timestamp now = clock_->CurrentTime();
+
+ // TODO(bugs.webrtc.org/11581): Figure out why we need to call Process() 200
+ // times a second.
+ next_process_time_ = now.ms() + kRtpRtcpMaxIdleTimeProcessMs;
+
+ // TODO(bugs.webrtc.org/11581): once we don't use Process() to trigger
+ // calls to SendRTCP(), the only remaining timer will require remote_bitrate_
+ // to be not null. In that case, we can disable the timer when it is null.
+ if (remote_bitrate_ && rtcp_sender_.Sending() && rtcp_sender_.TMMBR()) {
+ unsigned int target_bitrate = 0;
+ std::vector<unsigned int> ssrcs;
+ if (remote_bitrate_->LatestEstimate(&ssrcs, &target_bitrate)) {
+ if (!ssrcs.empty()) {
+ target_bitrate = target_bitrate / ssrcs.size();
+ }
+ rtcp_sender_.SetTargetBitrate(target_bitrate);
+ }
+ }
+
+ // TODO(bugs.webrtc.org/11581): Run this on a separate set of delayed tasks
+ // based off of next_time_to_send_rtcp_ in RTCPSender.
+ if (rtcp_sender_.TimeToSendRTCPReport())
+ rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
+}
+
+void ModuleRtpRtcpImpl2::SetRtxSendStatus(int mode) {
+ rtp_sender_->packet_generator.SetRtxStatus(mode);
+}
+
+int ModuleRtpRtcpImpl2::RtxSendStatus() const {
+ return rtp_sender_ ? rtp_sender_->packet_generator.RtxStatus() : kRtxOff;
+}
+
+void ModuleRtpRtcpImpl2::SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) {
+ rtp_sender_->packet_generator.SetRtxPayloadType(payload_type,
+ associated_payload_type);
+}
+
+absl::optional<uint32_t> ModuleRtpRtcpImpl2::RtxSsrc() const {
+ return rtp_sender_ ? rtp_sender_->packet_generator.RtxSsrc() : absl::nullopt;
+}
+
+absl::optional<uint32_t> ModuleRtpRtcpImpl2::FlexfecSsrc() const {
+ if (rtp_sender_) {
+ return rtp_sender_->packet_generator.FlexfecSsrc();
+ }
+ return absl::nullopt;
+}
+
+void ModuleRtpRtcpImpl2::IncomingRtcpPacket(const uint8_t* rtcp_packet,
+ const size_t length) {
+ rtcp_receiver_.IncomingPacket(rtcp_packet, length);
+}
+
+void ModuleRtpRtcpImpl2::RegisterSendPayloadFrequency(int payload_type,
+ int payload_frequency) {
+ rtcp_sender_.SetRtpClockRate(payload_type, payload_frequency);
+}
+
+int32_t ModuleRtpRtcpImpl2::DeRegisterSendPayload(const int8_t payload_type) {
+ return 0;
+}
+
+uint32_t ModuleRtpRtcpImpl2::StartTimestamp() const {
+ return rtp_sender_->packet_generator.TimestampOffset();
+}
+
+// Configure start timestamp, default is a random number.
+void ModuleRtpRtcpImpl2::SetStartTimestamp(const uint32_t timestamp) {
+ rtcp_sender_.SetTimestampOffset(timestamp);
+ rtp_sender_->packet_generator.SetTimestampOffset(timestamp);
+ rtp_sender_->packet_sender.SetTimestampOffset(timestamp);
+}
+
+uint16_t ModuleRtpRtcpImpl2::SequenceNumber() const {
+ return rtp_sender_->packet_generator.SequenceNumber();
+}
+
+// Set SequenceNumber, default is a random number.
+void ModuleRtpRtcpImpl2::SetSequenceNumber(const uint16_t seq_num) {
+ rtp_sender_->packet_generator.SetSequenceNumber(seq_num);
+}
+
+void ModuleRtpRtcpImpl2::SetRtpState(const RtpState& rtp_state) {
+ rtp_sender_->packet_generator.SetRtpState(rtp_state);
+ rtcp_sender_.SetTimestampOffset(rtp_state.start_timestamp);
+}
+
+void ModuleRtpRtcpImpl2::SetRtxState(const RtpState& rtp_state) {
+ rtp_sender_->packet_generator.SetRtxRtpState(rtp_state);
+}
+
+RtpState ModuleRtpRtcpImpl2::GetRtpState() const {
+ RtpState state = rtp_sender_->packet_generator.GetRtpState();
+ return state;
+}
+
+RtpState ModuleRtpRtcpImpl2::GetRtxState() const {
+ return rtp_sender_->packet_generator.GetRtxRtpState();
+}
+
+void ModuleRtpRtcpImpl2::SetRid(const std::string& rid) {
+ if (rtp_sender_) {
+ rtp_sender_->packet_generator.SetRid(rid);
+ }
+}
+
+void ModuleRtpRtcpImpl2::SetMid(const std::string& mid) {
+ if (rtp_sender_) {
+ rtp_sender_->packet_generator.SetMid(mid);
+ }
+ // TODO(bugs.webrtc.org/4050): If we end up supporting the MID SDES item for
+ // RTCP, this will need to be passed down to the RTCPSender also.
+}
+
+void ModuleRtpRtcpImpl2::SetCsrcs(const std::vector<uint32_t>& csrcs) {
+ rtcp_sender_.SetCsrcs(csrcs);
+ rtp_sender_->packet_generator.SetCsrcs(csrcs);
+}
+
+// TODO(pbos): Handle media and RTX streams separately (separate RTCP
+// feedbacks).
+RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() {
+ // TODO(bugs.webrtc.org/11581): Called by potentially multiple threads.
+ // Mostly "Send*" methods. Make sure it's only called on the
+ // construction thread.
+
+ RTCPSender::FeedbackState state;
+ // This is called also when receiver_only is true. Hence below
+ // checks that rtp_sender_ exists.
+ if (rtp_sender_) {
+ StreamDataCounters rtp_stats;
+ StreamDataCounters rtx_stats;
+ rtp_sender_->packet_sender.GetDataCounters(&rtp_stats, &rtx_stats);
+ state.packets_sent =
+ rtp_stats.transmitted.packets + rtx_stats.transmitted.packets;
+ state.media_bytes_sent = rtp_stats.transmitted.payload_bytes +
+ rtx_stats.transmitted.payload_bytes;
+ state.send_bitrate =
+ rtp_sender_->packet_sender.GetSendRates().Sum().bps<uint32_t>();
+ }
+ state.receiver = &rtcp_receiver_;
+
+ LastReceivedNTP(&state.last_rr_ntp_secs, &state.last_rr_ntp_frac,
+ &state.remote_sr);
+
+ state.last_xr_rtis = rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo();
+
+ return state;
+}
+
+// TODO(nisse): This method shouldn't be called for a receive-only
+// stream. Delete rtp_sender_ check as soon as all applications are
+// updated.
+int32_t ModuleRtpRtcpImpl2::SetSendingStatus(const bool sending) {
+ if (rtcp_sender_.Sending() != sending) {
+ // Sends RTCP BYE when going from true to false
+ if (rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending) != 0) {
+ RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE";
+ }
+ }
+ return 0;
+}
+
+bool ModuleRtpRtcpImpl2::Sending() const {
+ return rtcp_sender_.Sending();
+}
+
+// TODO(nisse): This method shouldn't be called for a receive-only
+// stream. Delete rtp_sender_ check as soon as all applications are
+// updated.
+void ModuleRtpRtcpImpl2::SetSendingMediaStatus(const bool sending) {
+ if (rtp_sender_) {
+ rtp_sender_->packet_generator.SetSendingMediaStatus(sending);
+ } else {
+ RTC_DCHECK(!sending);
+ }
+}
+
+bool ModuleRtpRtcpImpl2::SendingMedia() const {
+ return rtp_sender_ ? rtp_sender_->packet_generator.SendingMedia() : false;
+}
+
+bool ModuleRtpRtcpImpl2::IsAudioConfigured() const {
+ return rtp_sender_ ? rtp_sender_->packet_generator.IsAudioConfigured()
+ : false;
+}
+
+void ModuleRtpRtcpImpl2::SetAsPartOfAllocation(bool part_of_allocation) {
+ RTC_CHECK(rtp_sender_);
+ rtp_sender_->packet_sender.ForceIncludeSendPacketsInAllocation(
+ part_of_allocation);
+}
+
+bool ModuleRtpRtcpImpl2::OnSendingRtpFrame(uint32_t timestamp,
+ int64_t capture_time_ms,
+ int payload_type,
+ bool force_sender_report) {
+ if (!Sending())
+ return false;
+
+ rtcp_sender_.SetLastRtpTime(timestamp, capture_time_ms, payload_type);
+ // Make sure an RTCP report isn't queued behind a key frame.
+ if (rtcp_sender_.TimeToSendRTCPReport(force_sender_report))
+ rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
+
+ return true;
+}
+
+bool ModuleRtpRtcpImpl2::TrySendPacket(RtpPacketToSend* packet,
+ const PacedPacketInfo& pacing_info) {
+ RTC_DCHECK(rtp_sender_);
+ // TODO(sprang): Consider if we can remove this check.
+ if (!rtp_sender_->packet_generator.SendingMedia()) {
+ return false;
+ }
+ rtp_sender_->packet_sender.SendPacket(packet, pacing_info);
+ return true;
+}
+
+void ModuleRtpRtcpImpl2::SetFecProtectionParams(
+ const FecProtectionParams& delta_params,
+ const FecProtectionParams& key_params) {
+ RTC_DCHECK(rtp_sender_);
+ rtp_sender_->packet_sender.SetFecProtectionParameters(delta_params,
+ key_params);
+}
+
+std::vector<std::unique_ptr<RtpPacketToSend>>
+ModuleRtpRtcpImpl2::FetchFecPackets() {
+ RTC_DCHECK(rtp_sender_);
+ auto fec_packets = rtp_sender_->packet_sender.FetchFecPackets();
+ if (!fec_packets.empty()) {
+ // Don't assign sequence numbers for FlexFEC packets.
+ const bool generate_sequence_numbers =
+ !rtp_sender_->packet_sender.FlexFecSsrc().has_value();
+ if (generate_sequence_numbers) {
+ for (auto& fec_packet : fec_packets) {
+ rtp_sender_->packet_generator.AssignSequenceNumber(fec_packet.get());
+ }
+ }
+ }
+ return fec_packets;
+}
+
+void ModuleRtpRtcpImpl2::OnPacketsAcknowledged(
+ rtc::ArrayView<const uint16_t> sequence_numbers) {
+ RTC_DCHECK(rtp_sender_);
+ rtp_sender_->packet_history.CullAcknowledgedPackets(sequence_numbers);
+}
+
+bool ModuleRtpRtcpImpl2::SupportsPadding() const {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_generator.SupportsPadding();
+}
+
+bool ModuleRtpRtcpImpl2::SupportsRtxPayloadPadding() const {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_generator.SupportsRtxPayloadPadding();
+}
+
+std::vector<std::unique_ptr<RtpPacketToSend>>
+ModuleRtpRtcpImpl2::GeneratePadding(size_t target_size_bytes) {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_generator.GeneratePadding(
+ target_size_bytes, rtp_sender_->packet_sender.MediaHasBeenSent());
+}
+
+std::vector<RtpSequenceNumberMap::Info>
+ModuleRtpRtcpImpl2::GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_sender.GetSentRtpPacketInfos(sequence_numbers);
+}
+
+size_t ModuleRtpRtcpImpl2::ExpectedPerPacketOverhead() const {
+ if (!rtp_sender_) {
+ return 0;
+ }
+ return rtp_sender_->packet_generator.ExpectedPerPacketOverhead();
+}
+
+size_t ModuleRtpRtcpImpl2::MaxRtpPacketSize() const {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_generator.MaxRtpPacketSize();
+}
+
+void ModuleRtpRtcpImpl2::SetMaxRtpPacketSize(size_t rtp_packet_size) {
+ RTC_DCHECK_LE(rtp_packet_size, IP_PACKET_SIZE)
+ << "rtp packet size too large: " << rtp_packet_size;
+ RTC_DCHECK_GT(rtp_packet_size, packet_overhead_)
+ << "rtp packet size too small: " << rtp_packet_size;
+
+ rtcp_sender_.SetMaxRtpPacketSize(rtp_packet_size);
+ if (rtp_sender_) {
+ rtp_sender_->packet_generator.SetMaxRtpPacketSize(rtp_packet_size);
+ }
+}
+
+RtcpMode ModuleRtpRtcpImpl2::RTCP() const {
+ return rtcp_sender_.Status();
+}
+
+// Configure RTCP status i.e on/off.
+void ModuleRtpRtcpImpl2::SetRTCPStatus(const RtcpMode method) {
+ rtcp_sender_.SetRTCPStatus(method);
+}
+
+int32_t ModuleRtpRtcpImpl2::SetCNAME(const char* c_name) {
+ return rtcp_sender_.SetCNAME(c_name);
+}
+
+int32_t ModuleRtpRtcpImpl2::RemoteNTP(uint32_t* received_ntpsecs,
+ uint32_t* received_ntpfrac,
+ uint32_t* rtcp_arrival_time_secs,
+ uint32_t* rtcp_arrival_time_frac,
+ uint32_t* rtcp_timestamp) const {
+ return rtcp_receiver_.NTP(received_ntpsecs, received_ntpfrac,
+ rtcp_arrival_time_secs, rtcp_arrival_time_frac,
+ rtcp_timestamp)
+ ? 0
+ : -1;
+}
+
+// TODO(tommi): Check if |avg_rtt_ms|, |min_rtt_ms|, |max_rtt_ms| params are
+// actually used in practice (some callers ask for it but don't use it). It
+// could be that only |rtt| is needed and if so, then the fast path could be to
+// just call rtt_ms() and rely on the calculation being done periodically.
+int32_t ModuleRtpRtcpImpl2::RTT(const uint32_t remote_ssrc,
+ int64_t* rtt,
+ int64_t* avg_rtt,
+ int64_t* min_rtt,
+ int64_t* max_rtt) const {
+ int32_t ret = rtcp_receiver_.RTT(remote_ssrc, rtt, avg_rtt, min_rtt, max_rtt);
+ if (rtt && *rtt == 0) {
+ // Try to get RTT from RtcpRttStats class.
+ *rtt = rtt_ms();
+ }
+ return ret;
+}
+
+int64_t ModuleRtpRtcpImpl2::ExpectedRetransmissionTimeMs() const {
+ int64_t expected_retransmission_time_ms = rtt_ms();
+ if (expected_retransmission_time_ms > 0) {
+ return expected_retransmission_time_ms;
+ }
+ // No rtt available (|kRttUpdateInterval| not yet passed?), so try to
+ // poll avg_rtt_ms directly from rtcp receiver.
+ if (rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), nullptr,
+ &expected_retransmission_time_ms, nullptr,
+ nullptr) == 0) {
+ return expected_retransmission_time_ms;
+ }
+ return kDefaultExpectedRetransmissionTimeMs;
+}
+
+// Force a send of an RTCP packet.
+// Normal SR and RR are triggered via the process function.
+int32_t ModuleRtpRtcpImpl2::SendRTCP(RTCPPacketType packet_type) {
+ return rtcp_sender_.SendRTCP(GetFeedbackState(), packet_type);
+}
+
+void ModuleRtpRtcpImpl2::SetRtcpXrRrtrStatus(bool enable) {
+ rtcp_receiver_.SetRtcpXrRrtrStatus(enable);
+ rtcp_sender_.SendRtcpXrReceiverReferenceTime(enable);
+}
+
+bool ModuleRtpRtcpImpl2::RtcpXrRrtrStatus() const {
+ return rtcp_sender_.RtcpXrReceiverReferenceTime();
+}
+
+void ModuleRtpRtcpImpl2::GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const {
+ rtp_sender_->packet_sender.GetDataCounters(rtp_counters, rtx_counters);
+}
+
+// Received RTCP report.
+int32_t ModuleRtpRtcpImpl2::RemoteRTCPStat(
+ std::vector<RTCPReportBlock>* receive_blocks) const {
+ return rtcp_receiver_.StatisticsReceived(receive_blocks);
+}
+
+std::vector<ReportBlockData> ModuleRtpRtcpImpl2::GetLatestReportBlockData()
+ const {
+ return rtcp_receiver_.GetLatestReportBlockData();
+}
+
+// (REMB) Receiver Estimated Max Bitrate.
+void ModuleRtpRtcpImpl2::SetRemb(int64_t bitrate_bps,
+ std::vector<uint32_t> ssrcs) {
+ rtcp_sender_.SetRemb(bitrate_bps, std::move(ssrcs));
+}
+
+void ModuleRtpRtcpImpl2::UnsetRemb() {
+ rtcp_sender_.UnsetRemb();
+}
+
+void ModuleRtpRtcpImpl2::SetExtmapAllowMixed(bool extmap_allow_mixed) {
+ rtp_sender_->packet_generator.SetExtmapAllowMixed(extmap_allow_mixed);
+}
+
+void ModuleRtpRtcpImpl2::RegisterRtpHeaderExtension(absl::string_view uri,
+ int id) {
+ bool registered =
+ rtp_sender_->packet_generator.RegisterRtpHeaderExtension(uri, id);
+ RTC_CHECK(registered);
+}
+
+int32_t ModuleRtpRtcpImpl2::DeregisterSendRtpHeaderExtension(
+ const RTPExtensionType type) {
+ return rtp_sender_->packet_generator.DeregisterRtpHeaderExtension(type);
+}
+void ModuleRtpRtcpImpl2::DeregisterSendRtpHeaderExtension(
+ absl::string_view uri) {
+ rtp_sender_->packet_generator.DeregisterRtpHeaderExtension(uri);
+}
+
+void ModuleRtpRtcpImpl2::SetTmmbn(std::vector<rtcp::TmmbItem> bounding_set) {
+ rtcp_sender_.SetTmmbn(std::move(bounding_set));
+}
+
+// Send a Negative acknowledgment packet.
+int32_t ModuleRtpRtcpImpl2::SendNACK(const uint16_t* nack_list,
+ const uint16_t size) {
+ uint16_t nack_length = size;
+ uint16_t start_id = 0;
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (TimeToSendFullNackList(now_ms)) {
+ nack_last_time_sent_full_ms_ = now_ms;
+ } else {
+ // Only send extended list.
+ if (nack_last_seq_number_sent_ == nack_list[size - 1]) {
+ // Last sequence number is the same, do not send list.
+ return 0;
+ }
+ // Send new sequence numbers.
+ for (int i = 0; i < size; ++i) {
+ if (nack_last_seq_number_sent_ == nack_list[i]) {
+ start_id = i + 1;
+ break;
+ }
+ }
+ nack_length = size - start_id;
+ }
+
+ // Our RTCP NACK implementation is limited to kRtcpMaxNackFields sequence
+ // numbers per RTCP packet.
+ if (nack_length > kRtcpMaxNackFields) {
+ nack_length = kRtcpMaxNackFields;
+ }
+ nack_last_seq_number_sent_ = nack_list[start_id + nack_length - 1];
+
+ return rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpNack, nack_length,
+ &nack_list[start_id]);
+}
+
+void ModuleRtpRtcpImpl2::SendNack(
+ const std::vector<uint16_t>& sequence_numbers) {
+ rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpNack, sequence_numbers.size(),
+ sequence_numbers.data());
+}
+
+bool ModuleRtpRtcpImpl2::TimeToSendFullNackList(int64_t now) const {
+ // Use RTT from RtcpRttStats class if provided.
+ int64_t rtt = rtt_ms();
+ if (rtt == 0) {
+ rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &rtt, NULL, NULL);
+ }
+
+ const int64_t kStartUpRttMs = 100;
+ int64_t wait_time = 5 + ((rtt * 3) >> 1); // 5 + RTT * 1.5.
+ if (rtt == 0) {
+ wait_time = kStartUpRttMs;
+ }
+
+ // Send a full NACK list once within every |wait_time|.
+ return now - nack_last_time_sent_full_ms_ > wait_time;
+}
+
+// Store the sent packets, needed to answer to Negative acknowledgment requests.
+void ModuleRtpRtcpImpl2::SetStorePacketsStatus(const bool enable,
+ const uint16_t number_to_store) {
+ rtp_sender_->packet_history.SetStorePacketsStatus(
+ enable ? RtpPacketHistory::StorageMode::kStoreAndCull
+ : RtpPacketHistory::StorageMode::kDisabled,
+ number_to_store);
+}
+
+bool ModuleRtpRtcpImpl2::StorePackets() const {
+ return rtp_sender_->packet_history.GetStorageMode() !=
+ RtpPacketHistory::StorageMode::kDisabled;
+}
+
+void ModuleRtpRtcpImpl2::SendCombinedRtcpPacket(
+ std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets) {
+ rtcp_sender_.SendCombinedRtcpPacket(std::move(rtcp_packets));
+}
+
+int32_t ModuleRtpRtcpImpl2::SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) {
+ return rtcp_sender_.SendLossNotification(
+ GetFeedbackState(), last_decoded_seq_num, last_received_seq_num,
+ decodability_flag, buffering_allowed);
+}
+
+void ModuleRtpRtcpImpl2::SetRemoteSSRC(const uint32_t ssrc) {
+ // Inform about the incoming SSRC.
+ rtcp_sender_.SetRemoteSSRC(ssrc);
+ rtcp_receiver_.SetRemoteSSRC(ssrc);
+}
+
+// TODO(nisse): Delete video_rate amd fec_rate arguments.
+void ModuleRtpRtcpImpl2::BitrateSent(uint32_t* total_rate,
+ uint32_t* video_rate,
+ uint32_t* fec_rate,
+ uint32_t* nack_rate) const {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ RtpSendRates send_rates = rtp_sender_->packet_sender.GetSendRates();
+ *total_rate = send_rates.Sum().bps<uint32_t>();
+ if (video_rate)
+ *video_rate = 0;
+ if (fec_rate)
+ *fec_rate = 0;
+ *nack_rate = send_rates[RtpPacketMediaType::kRetransmission].bps<uint32_t>();
+}
+
+RtpSendRates ModuleRtpRtcpImpl2::GetSendRates() const {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ return rtp_sender_->packet_sender.GetSendRates();
+}
+
+void ModuleRtpRtcpImpl2::OnRequestSendReport() {
+ SendRTCP(kRtcpSr);
+}
+
+void ModuleRtpRtcpImpl2::OnReceivedNack(
+ const std::vector<uint16_t>& nack_sequence_numbers) {
+ if (!rtp_sender_)
+ return;
+
+ if (!StorePackets() || nack_sequence_numbers.empty()) {
+ return;
+ }
+ // Use RTT from RtcpRttStats class if provided.
+ int64_t rtt = rtt_ms();
+ if (rtt == 0) {
+ rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &rtt, NULL, NULL);
+ }
+ rtp_sender_->packet_generator.OnReceivedNack(nack_sequence_numbers, rtt);
+}
+
+void ModuleRtpRtcpImpl2::OnReceivedRtcpReportBlocks(
+ const ReportBlockList& report_blocks) {
+ if (rtp_sender_) {
+ uint32_t ssrc = SSRC();
+ absl::optional<uint32_t> rtx_ssrc;
+ if (rtp_sender_->packet_generator.RtxStatus() != kRtxOff) {
+ rtx_ssrc = rtp_sender_->packet_generator.RtxSsrc();
+ }
+
+ for (const RTCPReportBlock& report_block : report_blocks) {
+ if (ssrc == report_block.source_ssrc) {
+ rtp_sender_->packet_generator.OnReceivedAckOnSsrc(
+ report_block.extended_highest_sequence_number);
+ } else if (rtx_ssrc && *rtx_ssrc == report_block.source_ssrc) {
+ rtp_sender_->packet_generator.OnReceivedAckOnRtxSsrc(
+ report_block.extended_highest_sequence_number);
+ }
+ }
+ }
+}
+
+bool ModuleRtpRtcpImpl2::LastReceivedNTP(
+ uint32_t* rtcp_arrival_time_secs, // When we got the last report.
+ uint32_t* rtcp_arrival_time_frac,
+ uint32_t* remote_sr) const {
+ // Remote SR: NTP inside the last received (mid 16 bits from sec and frac).
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+
+ if (!rtcp_receiver_.NTP(&ntp_secs, &ntp_frac, rtcp_arrival_time_secs,
+ rtcp_arrival_time_frac, NULL)) {
+ return false;
+ }
+ *remote_sr =
+ ((ntp_secs & 0x0000ffff) << 16) + ((ntp_frac & 0xffff0000) >> 16);
+ return true;
+}
+
+void ModuleRtpRtcpImpl2::set_rtt_ms(int64_t rtt_ms) {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ {
+ MutexLock lock(&mutex_rtt_);
+ rtt_ms_ = rtt_ms;
+ }
+ if (rtp_sender_) {
+ rtp_sender_->packet_history.SetRtt(rtt_ms);
+ }
+}
+
+int64_t ModuleRtpRtcpImpl2::rtt_ms() const {
+ MutexLock lock(&mutex_rtt_);
+ return rtt_ms_;
+}
+
+void ModuleRtpRtcpImpl2::SetVideoBitrateAllocation(
+ const VideoBitrateAllocation& bitrate) {
+ rtcp_sender_.SetVideoBitrateAllocation(bitrate);
+}
+
+RTPSender* ModuleRtpRtcpImpl2::RtpSender() {
+ return rtp_sender_ ? &rtp_sender_->packet_generator : nullptr;
+}
+
+const RTPSender* ModuleRtpRtcpImpl2::RtpSender() const {
+ return rtp_sender_ ? &rtp_sender_->packet_generator : nullptr;
+}
+
+void ModuleRtpRtcpImpl2::PeriodicUpdate() {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+
+ Timestamp check_since = clock_->CurrentTime() - kRttUpdateInterval;
+ absl::optional<TimeDelta> rtt =
+ rtcp_receiver_.OnPeriodicRttUpdate(check_since, rtcp_sender_.Sending());
+ if (rtt) {
+ rtt_stats_->OnRttUpdate(rtt->ms());
+ set_rtt_ms(rtt->ms());
+ }
+
+ // kTmmbrTimeoutIntervalMs is 25 seconds, so an order of seconds.
+ // Instead of this polling approach, consider having an optional timer in the
+ // RTCPReceiver class that is started/stopped based on the state of
+ // rtcp_sender_.TMMBR().
+ if (rtcp_sender_.TMMBR() && rtcp_receiver_.UpdateTmmbrTimers())
+ rtcp_receiver_.NotifyTmmbrUpdated();
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h
new file mode 100644
index 0000000000..9611ace910
--- /dev/null
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h
@@ -0,0 +1,329 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL2_H_
+#define MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL2_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/rtp_headers.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "modules/include/module_fec_types.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType
+#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
+#include "modules/rtp_rtcp/source/rtcp_receiver.h"
+#include "modules/rtp_rtcp/source/rtcp_sender.h"
+#include "modules/rtp_rtcp/source/rtp_packet_history.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+#include "modules/rtp_rtcp/source/rtp_sender.h"
+#include "modules/rtp_rtcp/source/rtp_sender_egress.h"
+#include "rtc_base/gtest_prod_util.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/task_utils/repeating_task.h"
+
+namespace webrtc {
+
+class Clock;
+struct PacedPacketInfo;
+struct RTPVideoHeader;
+
+class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface,
+ public Module,
+ public RTCPReceiver::ModuleRtpRtcp {
+ public:
+ explicit ModuleRtpRtcpImpl2(
+ const RtpRtcpInterface::Configuration& configuration);
+ ~ModuleRtpRtcpImpl2() override;
+
+ // This method is provided to easy with migrating away from the
+ // RtpRtcp::Create factory method. Since this is an internal implementation
+ // detail though, creating an instance of ModuleRtpRtcpImpl2 directly should
+ // be fine.
+ static std::unique_ptr<ModuleRtpRtcpImpl2> Create(
+ const Configuration& configuration);
+
+ // Returns the number of milliseconds until the module want a worker thread to
+ // call Process.
+ int64_t TimeUntilNextProcess() override;
+
+ // Process any pending tasks such as timeouts.
+ void Process() override;
+
+ // Receiver part.
+
+ // Called when we receive an RTCP packet.
+ void IncomingRtcpPacket(const uint8_t* incoming_packet,
+ size_t incoming_packet_length) override;
+
+ void SetRemoteSSRC(uint32_t ssrc) override;
+
+ // Sender part.
+ void RegisterSendPayloadFrequency(int payload_type,
+ int payload_frequency) override;
+
+ int32_t DeRegisterSendPayload(int8_t payload_type) override;
+
+ void SetExtmapAllowMixed(bool extmap_allow_mixed) override;
+
+ void RegisterRtpHeaderExtension(absl::string_view uri, int id) override;
+ int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) override;
+ void DeregisterSendRtpHeaderExtension(absl::string_view uri) override;
+
+ bool SupportsPadding() const override;
+ bool SupportsRtxPayloadPadding() const override;
+
+ // Get start timestamp.
+ uint32_t StartTimestamp() const override;
+
+ // Configure start timestamp, default is a random number.
+ void SetStartTimestamp(uint32_t timestamp) override;
+
+ uint16_t SequenceNumber() const override;
+
+ // Set SequenceNumber, default is a random number.
+ void SetSequenceNumber(uint16_t seq) override;
+
+ void SetRtpState(const RtpState& rtp_state) override;
+ void SetRtxState(const RtpState& rtp_state) override;
+ RtpState GetRtpState() const override;
+ RtpState GetRtxState() const override;
+
+ uint32_t SSRC() const override { return rtcp_sender_.SSRC(); }
+
+ void SetRid(const std::string& rid) override;
+
+ void SetMid(const std::string& mid) override;
+
+ void SetCsrcs(const std::vector<uint32_t>& csrcs) override;
+
+ RTCPSender::FeedbackState GetFeedbackState();
+
+ void SetRtxSendStatus(int mode) override;
+ int RtxSendStatus() const override;
+ absl::optional<uint32_t> RtxSsrc() const override;
+
+ void SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) override;
+
+ absl::optional<uint32_t> FlexfecSsrc() const override;
+
+ // Sends kRtcpByeCode when going from true to false.
+ int32_t SetSendingStatus(bool sending) override;
+
+ bool Sending() const override;
+
+ // Drops or relays media packets.
+ void SetSendingMediaStatus(bool sending) override;
+
+ bool SendingMedia() const override;
+
+ bool IsAudioConfigured() const override;
+
+ void SetAsPartOfAllocation(bool part_of_allocation) override;
+
+ bool OnSendingRtpFrame(uint32_t timestamp,
+ int64_t capture_time_ms,
+ int payload_type,
+ bool force_sender_report) override;
+
+ bool TrySendPacket(RtpPacketToSend* packet,
+ const PacedPacketInfo& pacing_info) override;
+
+ void SetFecProtectionParams(const FecProtectionParams& delta_params,
+ const FecProtectionParams& key_params) override;
+
+ std::vector<std::unique_ptr<RtpPacketToSend>> FetchFecPackets() override;
+
+ void OnPacketsAcknowledged(
+ rtc::ArrayView<const uint16_t> sequence_numbers) override;
+
+ std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
+ size_t target_size_bytes) override;
+
+ std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const override;
+
+ size_t ExpectedPerPacketOverhead() const override;
+
+ // RTCP part.
+
+ // Get RTCP status.
+ RtcpMode RTCP() const override;
+
+ // Configure RTCP status i.e on/off.
+ void SetRTCPStatus(RtcpMode method) override;
+
+ // Set RTCP CName.
+ int32_t SetCNAME(const char* c_name) override;
+
+ // Get remote NTP.
+ int32_t RemoteNTP(uint32_t* received_ntp_secs,
+ uint32_t* received_ntp_frac,
+ uint32_t* rtcp_arrival_time_secs,
+ uint32_t* rtcp_arrival_time_frac,
+ uint32_t* rtcp_timestamp) const override;
+
+ // Get RoundTripTime.
+ int32_t RTT(uint32_t remote_ssrc,
+ int64_t* rtt,
+ int64_t* avg_rtt,
+ int64_t* min_rtt,
+ int64_t* max_rtt) const override;
+
+ int64_t ExpectedRetransmissionTimeMs() const override;
+
+ // Force a send of an RTCP packet.
+ // Normal SR and RR are triggered via the process function.
+ int32_t SendRTCP(RTCPPacketType rtcpPacketType) override;
+
+ void GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const override;
+
+ // Get received RTCP report, report block.
+ int32_t RemoteRTCPStat(
+ std::vector<RTCPReportBlock>* receive_blocks) const override;
+ // A snapshot of the most recent Report Block with additional data of
+ // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats.
+ // Within this list, the ReportBlockData::RTCPReportBlock::source_ssrc(),
+ // which is the SSRC of the corresponding outbound RTP stream, is unique.
+ std::vector<ReportBlockData> GetLatestReportBlockData() const override;
+
+ // (REMB) Receiver Estimated Max Bitrate.
+ void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) override;
+ void UnsetRemb() override;
+
+ void SetTmmbn(std::vector<rtcp::TmmbItem> bounding_set) override;
+
+ size_t MaxRtpPacketSize() const override;
+
+ void SetMaxRtpPacketSize(size_t max_packet_size) override;
+
+ // (NACK) Negative acknowledgment part.
+
+ // Send a Negative acknowledgment packet.
+ // TODO(philipel): Deprecate SendNACK and use SendNack instead.
+ int32_t SendNACK(const uint16_t* nack_list, uint16_t size) override;
+
+ void SendNack(const std::vector<uint16_t>& sequence_numbers) override;
+
+ // Store the sent packets, needed to answer to a negative acknowledgment
+ // requests.
+ void SetStorePacketsStatus(bool enable, uint16_t number_to_store) override;
+
+ bool StorePackets() const override;
+
+ void SendCombinedRtcpPacket(
+ std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets) override;
+
+ // (XR) Receiver reference time report.
+ void SetRtcpXrRrtrStatus(bool enable) override;
+
+ bool RtcpXrRrtrStatus() const override;
+
+ // Video part.
+ int32_t SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) override;
+
+ bool LastReceivedNTP(uint32_t* NTPsecs,
+ uint32_t* NTPfrac,
+ uint32_t* remote_sr) const;
+
+ void BitrateSent(uint32_t* total_rate,
+ uint32_t* video_rate,
+ uint32_t* fec_rate,
+ uint32_t* nackRate) const override;
+
+ RtpSendRates GetSendRates() const override;
+
+ void OnReceivedNack(
+ const std::vector<uint16_t>& nack_sequence_numbers) override;
+ void OnReceivedRtcpReportBlocks(
+ const ReportBlockList& report_blocks) override;
+ void OnRequestSendReport() override;
+
+ void SetVideoBitrateAllocation(
+ const VideoBitrateAllocation& bitrate) override;
+
+ RTPSender* RtpSender() override;
+ const RTPSender* RtpSender() const override;
+
+ private:
+ FRIEND_TEST_ALL_PREFIXES(RtpRtcpImpl2Test, Rtt);
+ FRIEND_TEST_ALL_PREFIXES(RtpRtcpImpl2Test, RttForReceiverOnly);
+
+ struct RtpSenderContext : public SequenceNumberAssigner {
+ explicit RtpSenderContext(const RtpRtcpInterface::Configuration& config);
+ void AssignSequenceNumber(RtpPacketToSend* packet) override;
+ // Storage of packets, for retransmissions and padding, if applicable.
+ RtpPacketHistory packet_history;
+ // Handles final time timestamping/stats/etc and handover to Transport.
+ RtpSenderEgress packet_sender;
+ // If no paced sender configured, this class will be used to pass packets
+ // from |packet_generator_| to |packet_sender_|.
+ RtpSenderEgress::NonPacedPacketSender non_paced_sender;
+ // Handles creation of RTP packets to be sent.
+ RTPSender packet_generator;
+ };
+
+ void set_rtt_ms(int64_t rtt_ms);
+ int64_t rtt_ms() const;
+
+ bool TimeToSendFullNackList(int64_t now) const;
+
+ // Called on a timer, once a second, on the worker_queue_, to update the RTT,
+ // check if we need to send RTCP report, send TMMBR updates and fire events.
+ void PeriodicUpdate();
+
+ TaskQueueBase* const worker_queue_;
+ SequenceChecker process_thread_checker_;
+
+ std::unique_ptr<RtpSenderContext> rtp_sender_;
+
+ RTCPSender rtcp_sender_;
+ RTCPReceiver rtcp_receiver_;
+
+ Clock* const clock_;
+
+ int64_t last_rtt_process_time_;
+ int64_t next_process_time_;
+ uint16_t packet_overhead_;
+
+ // Send side
+ int64_t nack_last_time_sent_full_ms_;
+ uint16_t nack_last_seq_number_sent_;
+
+ RemoteBitrateEstimator* const remote_bitrate_;
+
+ RtcpRttStats* const rtt_stats_;
+ RepeatingTaskHandle rtt_update_task_ RTC_GUARDED_BY(worker_queue_);
+
+ // The processed RTT from RtcpRttStats.
+ mutable Mutex mutex_rtt_;
+ int64_t rtt_ms_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL2_H_
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc
new file mode 100644
index 0000000000..81c71aa58e
--- /dev/null
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc
@@ -0,0 +1,645 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+
+#include <map>
+#include <memory>
+#include <set>
+
+#include "api/transport/field_trial_based_config.h"
+#include "api/video_codecs/video_codec.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtcp_packet.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/nack.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_sender_video.h"
+#include "rtc_base/rate_limiter.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+#include "test/rtp_header_parser.h"
+#include "test/run_loop.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+using ::testing::ElementsAre;
+
+namespace webrtc {
+namespace {
+const uint32_t kSenderSsrc = 0x12345;
+const uint32_t kReceiverSsrc = 0x23456;
+const int64_t kOneWayNetworkDelayMs = 100;
+const uint8_t kBaseLayerTid = 0;
+const uint8_t kHigherLayerTid = 1;
+const uint16_t kSequenceNumber = 100;
+
+class RtcpRttStatsTestImpl : public RtcpRttStats {
+ public:
+ RtcpRttStatsTestImpl() : rtt_ms_(0) {}
+ ~RtcpRttStatsTestImpl() override = default;
+
+ void OnRttUpdate(int64_t rtt_ms) override { rtt_ms_ = rtt_ms; }
+ int64_t LastProcessedRtt() const override { return rtt_ms_; }
+ int64_t rtt_ms_;
+};
+
+class SendTransport : public Transport {
+ public:
+ SendTransport()
+ : receiver_(nullptr),
+ time_controller_(nullptr),
+ delay_ms_(0),
+ rtp_packets_sent_(0),
+ rtcp_packets_sent_(0) {}
+
+ void SetRtpRtcpModule(ModuleRtpRtcpImpl2* receiver) { receiver_ = receiver; }
+ void SimulateNetworkDelay(int64_t delay_ms, TimeController* time_controller) {
+ time_controller_ = time_controller;
+ delay_ms_ = delay_ms;
+ }
+ bool SendRtp(const uint8_t* data,
+ size_t len,
+ const PacketOptions& options) override {
+ RTPHeader header;
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::CreateForTest());
+ EXPECT_TRUE(parser->Parse(static_cast<const uint8_t*>(data), len, &header));
+ ++rtp_packets_sent_;
+ last_rtp_header_ = header;
+ return true;
+ }
+ bool SendRtcp(const uint8_t* data, size_t len) override {
+ test::RtcpPacketParser parser;
+ parser.Parse(data, len);
+ last_nack_list_ = parser.nack()->packet_ids();
+
+ if (time_controller_) {
+ time_controller_->AdvanceTime(TimeDelta::Millis(delay_ms_));
+ }
+ EXPECT_TRUE(receiver_);
+ receiver_->IncomingRtcpPacket(data, len);
+ ++rtcp_packets_sent_;
+ return true;
+ }
+
+ size_t NumRtcpSent() { return rtcp_packets_sent_; }
+
+ ModuleRtpRtcpImpl2* receiver_;
+ TimeController* time_controller_;
+ int64_t delay_ms_;
+ int rtp_packets_sent_;
+ size_t rtcp_packets_sent_;
+ RTPHeader last_rtp_header_;
+ std::vector<uint16_t> last_nack_list_;
+};
+
+class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
+ public:
+ RtpRtcpModule(TimeController* time_controller, bool is_sender)
+ : is_sender_(is_sender),
+ receive_statistics_(
+ ReceiveStatistics::Create(time_controller->GetClock())),
+ time_controller_(time_controller) {
+ CreateModuleImpl();
+ transport_.SimulateNetworkDelay(kOneWayNetworkDelayMs, time_controller);
+ }
+
+ const bool is_sender_;
+ RtcpPacketTypeCounter packets_sent_;
+ RtcpPacketTypeCounter packets_received_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
+ SendTransport transport_;
+ RtcpRttStatsTestImpl rtt_stats_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> impl_;
+ int rtcp_report_interval_ms_ = 0;
+
+ void RtcpPacketTypesCounterUpdated(
+ uint32_t ssrc,
+ const RtcpPacketTypeCounter& packet_counter) override {
+ counter_map_[ssrc] = packet_counter;
+ }
+
+ RtcpPacketTypeCounter RtcpSent() {
+ // RTCP counters for remote SSRC.
+ return counter_map_[is_sender_ ? kReceiverSsrc : kSenderSsrc];
+ }
+
+ RtcpPacketTypeCounter RtcpReceived() {
+ // Received RTCP stats for (own) local SSRC.
+ return counter_map_[impl_->SSRC()];
+ }
+ int RtpSent() { return transport_.rtp_packets_sent_; }
+ uint16_t LastRtpSequenceNumber() {
+ return transport_.last_rtp_header_.sequenceNumber;
+ }
+ std::vector<uint16_t> LastNackListSent() {
+ return transport_.last_nack_list_;
+ }
+ void SetRtcpReportIntervalAndReset(int rtcp_report_interval_ms) {
+ rtcp_report_interval_ms_ = rtcp_report_interval_ms;
+ CreateModuleImpl();
+ }
+
+ private:
+ void CreateModuleImpl() {
+ RtpRtcpInterface::Configuration config;
+ config.audio = false;
+ config.clock = time_controller_->GetClock();
+ config.outgoing_transport = &transport_;
+ config.receive_statistics = receive_statistics_.get();
+ config.rtcp_packet_type_counter_observer = this;
+ config.rtt_stats = &rtt_stats_;
+ config.rtcp_report_interval_ms = rtcp_report_interval_ms_;
+ config.local_media_ssrc = is_sender_ ? kSenderSsrc : kReceiverSsrc;
+ config.need_rtp_packet_infos = true;
+
+ impl_.reset(new ModuleRtpRtcpImpl2(config));
+ impl_->SetRemoteSSRC(is_sender_ ? kReceiverSsrc : kSenderSsrc);
+ impl_->SetRTCPStatus(RtcpMode::kCompound);
+ }
+
+ TimeController* const time_controller_;
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map_;
+};
+} // namespace
+
+class RtpRtcpImpl2Test : public ::testing::Test {
+ protected:
+ RtpRtcpImpl2Test()
+ : time_controller_(Timestamp::Micros(133590000000000)),
+ sender_(&time_controller_, /*is_sender=*/true),
+ receiver_(&time_controller_, /*is_sender=*/false) {}
+
+ void SetUp() override {
+ // Send module.
+ EXPECT_EQ(0, sender_.impl_->SetSendingStatus(true));
+ sender_.impl_->SetSendingMediaStatus(true);
+ sender_.impl_->SetSequenceNumber(kSequenceNumber);
+ sender_.impl_->SetStorePacketsStatus(true, 100);
+
+ FieldTrialBasedConfig field_trials;
+ RTPSenderVideo::Config video_config;
+ video_config.clock = time_controller_.GetClock();
+ video_config.rtp_sender = sender_.impl_->RtpSender();
+ video_config.field_trials = &field_trials;
+ sender_video_ = std::make_unique<RTPSenderVideo>(video_config);
+
+ memset(&codec_, 0, sizeof(VideoCodec));
+ codec_.plType = 100;
+ codec_.width = 320;
+ codec_.height = 180;
+
+ // Receive module.
+ EXPECT_EQ(0, receiver_.impl_->SetSendingStatus(false));
+ receiver_.impl_->SetSendingMediaStatus(false);
+ // Transport settings.
+ sender_.transport_.SetRtpRtcpModule(receiver_.impl_.get());
+ receiver_.transport_.SetRtpRtcpModule(sender_.impl_.get());
+ }
+
+ void AdvanceTimeMs(int64_t milliseconds) {
+ time_controller_.AdvanceTime(TimeDelta::Millis(milliseconds));
+ }
+
+ GlobalSimulatedTimeController time_controller_;
+ // test::RunLoop loop_;
+ // SimulatedClock clock_;
+ RtpRtcpModule sender_;
+ std::unique_ptr<RTPSenderVideo> sender_video_;
+ RtpRtcpModule receiver_;
+ VideoCodec codec_;
+
+ void SendFrame(const RtpRtcpModule* module,
+ RTPSenderVideo* sender,
+ uint8_t tid) {
+ RTPVideoHeaderVP8 vp8_header = {};
+ vp8_header.temporalIdx = tid;
+ RTPVideoHeader rtp_video_header;
+ rtp_video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ rtp_video_header.width = codec_.width;
+ rtp_video_header.height = codec_.height;
+ rtp_video_header.rotation = kVideoRotation_0;
+ rtp_video_header.content_type = VideoContentType::UNSPECIFIED;
+ rtp_video_header.playout_delay = {-1, -1};
+ rtp_video_header.is_first_packet_in_frame = true;
+ rtp_video_header.simulcastIdx = 0;
+ rtp_video_header.codec = kVideoCodecVP8;
+ rtp_video_header.video_type_header = vp8_header;
+ rtp_video_header.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false};
+
+ const uint8_t payload[100] = {0};
+ EXPECT_TRUE(module->impl_->OnSendingRtpFrame(0, 0, codec_.plType, true));
+ EXPECT_TRUE(sender->SendVideo(codec_.plType, VideoCodecType::kVideoCodecVP8,
+ 0, 0, payload, rtp_video_header, 0));
+ }
+
+ void IncomingRtcpNack(const RtpRtcpModule* module, uint16_t sequence_number) {
+ bool sender = module->impl_->SSRC() == kSenderSsrc;
+ rtcp::Nack nack;
+ uint16_t list[1];
+ list[0] = sequence_number;
+ const uint16_t kListLength = sizeof(list) / sizeof(list[0]);
+ nack.SetSenderSsrc(sender ? kReceiverSsrc : kSenderSsrc);
+ nack.SetMediaSsrc(sender ? kSenderSsrc : kReceiverSsrc);
+ nack.SetPacketIds(list, kListLength);
+ rtc::Buffer packet = nack.Build();
+ module->impl_->IncomingRtcpPacket(packet.data(), packet.size());
+ }
+};
+
+TEST_F(RtpRtcpImpl2Test, RetransmitsAllLayers) {
+ // Send frames.
+ EXPECT_EQ(0, sender_.RtpSent());
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); // kSequenceNumber
+ SendFrame(&sender_, sender_video_.get(),
+ kHigherLayerTid); // kSequenceNumber + 1
+ SendFrame(&sender_, sender_video_.get(),
+ kNoTemporalIdx); // kSequenceNumber + 2
+ EXPECT_EQ(3, sender_.RtpSent());
+ EXPECT_EQ(kSequenceNumber + 2, sender_.LastRtpSequenceNumber());
+
+ // Min required delay until retransmit = 5 + RTT ms (RTT = 0).
+ AdvanceTimeMs(5);
+
+ // Frame with kBaseLayerTid re-sent.
+ IncomingRtcpNack(&sender_, kSequenceNumber);
+ EXPECT_EQ(4, sender_.RtpSent());
+ EXPECT_EQ(kSequenceNumber, sender_.LastRtpSequenceNumber());
+ // Frame with kHigherLayerTid re-sent.
+ IncomingRtcpNack(&sender_, kSequenceNumber + 1);
+ EXPECT_EQ(5, sender_.RtpSent());
+ EXPECT_EQ(kSequenceNumber + 1, sender_.LastRtpSequenceNumber());
+ // Frame with kNoTemporalIdx re-sent.
+ IncomingRtcpNack(&sender_, kSequenceNumber + 2);
+ EXPECT_EQ(6, sender_.RtpSent());
+ EXPECT_EQ(kSequenceNumber + 2, sender_.LastRtpSequenceNumber());
+}
+
+TEST_F(RtpRtcpImpl2Test, Rtt) {
+ RtpPacketReceived packet;
+ packet.SetTimestamp(1);
+ packet.SetSequenceNumber(123);
+ packet.SetSsrc(kSenderSsrc);
+ packet.AllocatePayload(100 - 12);
+ receiver_.receive_statistics_->OnRtpPacket(packet);
+
+ // Send Frame before sending an SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ // Sender module should send an SR.
+ EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
+
+ // Receiver module should send a RR with a response to the last received SR.
+ AdvanceTimeMs(1000);
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
+
+ // Verify RTT.
+ int64_t rtt;
+ int64_t avg_rtt;
+ int64_t min_rtt;
+ int64_t max_rtt;
+ EXPECT_EQ(
+ 0, sender_.impl_->RTT(kReceiverSsrc, &rtt, &avg_rtt, &min_rtt, &max_rtt));
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, avg_rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, min_rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, max_rtt, 1);
+
+ // No RTT from other ssrc.
+ EXPECT_EQ(-1, sender_.impl_->RTT(kReceiverSsrc + 1, &rtt, &avg_rtt, &min_rtt,
+ &max_rtt));
+
+ // Verify RTT from rtt_stats config.
+ EXPECT_EQ(0, sender_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(0, sender_.impl_->rtt_ms());
+ AdvanceTimeMs(1000);
+
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, sender_.rtt_stats_.LastProcessedRtt(),
+ 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, sender_.impl_->rtt_ms(), 1);
+}
+
+TEST_F(RtpRtcpImpl2Test, SetRtcpXrRrtrStatus) {
+ EXPECT_FALSE(receiver_.impl_->RtcpXrRrtrStatus());
+ receiver_.impl_->SetRtcpXrRrtrStatus(true);
+ EXPECT_TRUE(receiver_.impl_->RtcpXrRrtrStatus());
+}
+
+TEST_F(RtpRtcpImpl2Test, RttForReceiverOnly) {
+ receiver_.impl_->SetRtcpXrRrtrStatus(true);
+
+ // Receiver module should send a Receiver time reference report (RTRR).
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
+
+ // Sender module should send a response to the last received RTRR (DLRR).
+ AdvanceTimeMs(1000);
+ // Send Frame before sending a SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
+
+ // Verify RTT.
+ EXPECT_EQ(0, receiver_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(0, receiver_.impl_->rtt_ms());
+ AdvanceTimeMs(1000);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs,
+ receiver_.rtt_stats_.LastProcessedRtt(), 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, receiver_.impl_->rtt_ms(), 1);
+}
+
+TEST_F(RtpRtcpImpl2Test, NoSrBeforeMedia) {
+ // Ignore fake transport delays in this test.
+ sender_.transport_.SimulateNetworkDelay(0, &time_controller_);
+ receiver_.transport_.SimulateNetworkDelay(0, &time_controller_);
+
+ sender_.impl_->Process();
+ EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms);
+
+ // Verify no SR is sent before media has been sent, RR should still be sent
+ // from the receiving module though.
+ AdvanceTimeMs(2000);
+ int64_t current_time = time_controller_.GetClock()->TimeInMilliseconds();
+ sender_.impl_->Process();
+ receiver_.impl_->Process();
+ EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms);
+ EXPECT_EQ(receiver_.RtcpSent().first_packet_time_ms, current_time);
+
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, current_time);
+}
+
+TEST_F(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) {
+ EXPECT_EQ(-1, receiver_.RtcpSent().first_packet_time_ms);
+ EXPECT_EQ(-1, sender_.RtcpReceived().first_packet_time_ms);
+ EXPECT_EQ(0U, sender_.RtcpReceived().nack_packets);
+ EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets);
+
+ // Receive module sends a NACK.
+ const uint16_t kNackLength = 1;
+ uint16_t nack_list[kNackLength] = {123};
+ EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, receiver_.RtcpSent().nack_packets);
+ EXPECT_GT(receiver_.RtcpSent().first_packet_time_ms, -1);
+
+ // Send module receives the NACK.
+ EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets);
+ EXPECT_GT(sender_.RtcpReceived().first_packet_time_ms, -1);
+}
+
+TEST_F(RtpRtcpImpl2Test, AddStreamDataCounters) {
+ StreamDataCounters rtp;
+ const int64_t kStartTimeMs = 1;
+ rtp.first_packet_time_ms = kStartTimeMs;
+ rtp.transmitted.packets = 1;
+ rtp.transmitted.payload_bytes = 1;
+ rtp.transmitted.header_bytes = 2;
+ rtp.transmitted.padding_bytes = 3;
+ EXPECT_EQ(rtp.transmitted.TotalBytes(), rtp.transmitted.payload_bytes +
+ rtp.transmitted.header_bytes +
+ rtp.transmitted.padding_bytes);
+
+ StreamDataCounters rtp2;
+ rtp2.first_packet_time_ms = -1;
+ rtp2.transmitted.packets = 10;
+ rtp2.transmitted.payload_bytes = 10;
+ rtp2.retransmitted.header_bytes = 4;
+ rtp2.retransmitted.payload_bytes = 5;
+ rtp2.retransmitted.padding_bytes = 6;
+ rtp2.retransmitted.packets = 7;
+ rtp2.fec.packets = 8;
+
+ StreamDataCounters sum = rtp;
+ sum.Add(rtp2);
+ EXPECT_EQ(kStartTimeMs, sum.first_packet_time_ms);
+ EXPECT_EQ(11U, sum.transmitted.packets);
+ EXPECT_EQ(11U, sum.transmitted.payload_bytes);
+ EXPECT_EQ(2U, sum.transmitted.header_bytes);
+ EXPECT_EQ(3U, sum.transmitted.padding_bytes);
+ EXPECT_EQ(4U, sum.retransmitted.header_bytes);
+ EXPECT_EQ(5U, sum.retransmitted.payload_bytes);
+ EXPECT_EQ(6U, sum.retransmitted.padding_bytes);
+ EXPECT_EQ(7U, sum.retransmitted.packets);
+ EXPECT_EQ(8U, sum.fec.packets);
+ EXPECT_EQ(sum.transmitted.TotalBytes(),
+ rtp.transmitted.TotalBytes() + rtp2.transmitted.TotalBytes());
+
+ StreamDataCounters rtp3;
+ rtp3.first_packet_time_ms = kStartTimeMs + 10;
+ sum.Add(rtp3);
+ EXPECT_EQ(kStartTimeMs, sum.first_packet_time_ms); // Holds oldest time.
+}
+
+TEST_F(RtpRtcpImpl2Test, SendsInitialNackList) {
+ // Send module sends a NACK.
+ const uint16_t kNackLength = 1;
+ uint16_t nack_list[kNackLength] = {123};
+ EXPECT_EQ(0U, sender_.RtcpSent().nack_packets);
+ // Send Frame before sending a compound RTCP that starts with SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123));
+}
+
+TEST_F(RtpRtcpImpl2Test, SendsExtendedNackList) {
+ // Send module sends a NACK.
+ const uint16_t kNackLength = 1;
+ uint16_t nack_list[kNackLength] = {123};
+ EXPECT_EQ(0U, sender_.RtcpSent().nack_packets);
+ // Send Frame before sending a compound RTCP that starts with SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123));
+
+ // Same list not re-send.
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123));
+
+ // Only extended list sent.
+ const uint16_t kNackExtLength = 2;
+ uint16_t nack_list_ext[kNackExtLength] = {123, 124};
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list_ext, kNackExtLength));
+ EXPECT_EQ(2U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(124));
+}
+
+TEST_F(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) {
+ sender_.transport_.SimulateNetworkDelay(0, &time_controller_);
+ // Send module sends a NACK.
+ const uint16_t kNackLength = 2;
+ uint16_t nack_list[kNackLength] = {123, 125};
+ EXPECT_EQ(0U, sender_.RtcpSent().nack_packets);
+ // Send Frame before sending a compound RTCP that starts with SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125));
+
+ // Same list not re-send, rtt interval has not passed.
+ const int kStartupRttMs = 100;
+ AdvanceTimeMs(kStartupRttMs);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+
+ // Rtt interval passed, full list sent.
+ AdvanceTimeMs(1);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(2U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125));
+}
+
+TEST_F(RtpRtcpImpl2Test, UniqueNackRequests) {
+ receiver_.transport_.SimulateNetworkDelay(0, &time_controller_);
+ EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets);
+ EXPECT_EQ(0U, receiver_.RtcpSent().nack_requests);
+ EXPECT_EQ(0U, receiver_.RtcpSent().unique_nack_requests);
+ EXPECT_EQ(0, receiver_.RtcpSent().UniqueNackRequestsInPercent());
+
+ // Receive module sends NACK request.
+ const uint16_t kNackLength = 4;
+ uint16_t nack_list[kNackLength] = {10, 11, 13, 18};
+ EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, receiver_.RtcpSent().nack_packets);
+ EXPECT_EQ(4U, receiver_.RtcpSent().nack_requests);
+ EXPECT_EQ(4U, receiver_.RtcpSent().unique_nack_requests);
+ EXPECT_THAT(receiver_.LastNackListSent(), ElementsAre(10, 11, 13, 18));
+
+ // Send module receives the request.
+ EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets);
+ EXPECT_EQ(4U, sender_.RtcpReceived().nack_requests);
+ EXPECT_EQ(4U, sender_.RtcpReceived().unique_nack_requests);
+ EXPECT_EQ(100, sender_.RtcpReceived().UniqueNackRequestsInPercent());
+
+ // Receive module sends new request with duplicated packets.
+ const int kStartupRttMs = 100;
+ AdvanceTimeMs(kStartupRttMs + 1);
+ const uint16_t kNackLength2 = 4;
+ uint16_t nack_list2[kNackLength2] = {11, 18, 20, 21};
+ EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list2, kNackLength2));
+ EXPECT_EQ(2U, receiver_.RtcpSent().nack_packets);
+ EXPECT_EQ(8U, receiver_.RtcpSent().nack_requests);
+ EXPECT_EQ(6U, receiver_.RtcpSent().unique_nack_requests);
+ EXPECT_THAT(receiver_.LastNackListSent(), ElementsAre(11, 18, 20, 21));
+
+ // Send module receives the request.
+ EXPECT_EQ(2U, sender_.RtcpReceived().nack_packets);
+ EXPECT_EQ(8U, sender_.RtcpReceived().nack_requests);
+ EXPECT_EQ(6U, sender_.RtcpReceived().unique_nack_requests);
+ EXPECT_EQ(75, sender_.RtcpReceived().UniqueNackRequestsInPercent());
+}
+
+TEST_F(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) {
+ const int kVideoReportInterval = 3000;
+
+ // Recreate sender impl with new configuration, and redo setup.
+ sender_.SetRtcpReportIntervalAndReset(kVideoReportInterval);
+ SetUp();
+
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+
+ // Initial state
+ sender_.impl_->Process();
+ EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, -1);
+ EXPECT_EQ(0u, sender_.transport_.NumRtcpSent());
+
+ // Move ahead to the last ms before a rtcp is expected, no action.
+ AdvanceTimeMs(kVideoReportInterval / 2 - 1);
+ sender_.impl_->Process();
+ EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, -1);
+ EXPECT_EQ(sender_.transport_.NumRtcpSent(), 0u);
+
+ // Move ahead to the first rtcp. Send RTCP.
+ AdvanceTimeMs(1);
+ sender_.impl_->Process();
+ EXPECT_GT(sender_.RtcpSent().first_packet_time_ms, -1);
+ EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u);
+
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+
+ // Move ahead to the last possible second before second rtcp is expected.
+ AdvanceTimeMs(kVideoReportInterval * 1 / 2 - 1);
+ sender_.impl_->Process();
+ EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u);
+
+ // Move ahead into the range of second rtcp, the second rtcp may be sent.
+ AdvanceTimeMs(1);
+ sender_.impl_->Process();
+ EXPECT_GE(sender_.transport_.NumRtcpSent(), 1u);
+
+ AdvanceTimeMs(kVideoReportInterval / 2);
+ sender_.impl_->Process();
+ EXPECT_GE(sender_.transport_.NumRtcpSent(), 1u);
+
+ // Move out the range of second rtcp, the second rtcp must have been sent.
+ AdvanceTimeMs(kVideoReportInterval / 2);
+ sender_.impl_->Process();
+ EXPECT_EQ(sender_.transport_.NumRtcpSent(), 2u);
+}
+
+TEST_F(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) {
+ const uint32_t kStartTimestamp = 1u;
+ SetUp();
+ sender_.impl_->SetStartTimestamp(kStartTimestamp);
+
+ PacedPacketInfo pacing_info;
+ RtpPacketToSend packet(nullptr);
+ packet.set_packet_type(RtpPacketToSend::Type::kVideo);
+ packet.SetSsrc(kSenderSsrc);
+
+ // Single-packet frame.
+ packet.SetTimestamp(1);
+ packet.SetSequenceNumber(1);
+ packet.set_first_packet_of_frame(true);
+ packet.SetMarker(true);
+ sender_.impl_->TrySendPacket(&packet, pacing_info);
+ AdvanceTimeMs(1);
+
+ std::vector<RtpSequenceNumberMap::Info> seqno_info =
+ sender_.impl_->GetSentRtpPacketInfos(std::vector<uint16_t>{1});
+
+ EXPECT_THAT(seqno_info, ElementsAre(RtpSequenceNumberMap::Info(
+ /*timestamp=*/1 - kStartTimestamp,
+ /*is_first=*/1,
+ /*is_last=*/1)));
+
+ // Three-packet frame.
+ packet.SetTimestamp(2);
+ packet.SetSequenceNumber(2);
+ packet.set_first_packet_of_frame(true);
+ packet.SetMarker(false);
+ sender_.impl_->TrySendPacket(&packet, pacing_info);
+
+ packet.SetSequenceNumber(3);
+ packet.set_first_packet_of_frame(false);
+ sender_.impl_->TrySendPacket(&packet, pacing_info);
+
+ packet.SetSequenceNumber(4);
+ packet.SetMarker(true);
+ sender_.impl_->TrySendPacket(&packet, pacing_info);
+
+ AdvanceTimeMs(1);
+
+ seqno_info =
+ sender_.impl_->GetSentRtpPacketInfos(std::vector<uint16_t>{2, 3, 4});
+
+ EXPECT_THAT(seqno_info, ElementsAre(RtpSequenceNumberMap::Info(
+ /*timestamp=*/2 - kStartTimestamp,
+ /*is_first=*/1,
+ /*is_last=*/0),
+ RtpSequenceNumberMap::Info(
+ /*timestamp=*/2 - kStartTimestamp,
+ /*is_first=*/0,
+ /*is_last=*/0),
+ RtpSequenceNumberMap::Info(
+ /*timestamp=*/2 - kStartTimestamp,
+ /*is_first=*/0,
+ /*is_last=*/1)));
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index e2595664f6..aefa91e23e 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -143,7 +143,7 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
private:
void CreateModuleImpl() {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.audio = false;
config.clock = clock_;
config.outgoing_transport = &transport_;
@@ -225,7 +225,7 @@ class RtpRtcpImplTest : public ::testing::Test {
const uint8_t payload[100] = {0};
EXPECT_TRUE(module->impl_->OnSendingRtpFrame(0, 0, codec_.plType, true));
EXPECT_TRUE(sender->SendVideo(codec_.plType, VideoCodecType::kVideoCodecVP8,
- 0, 0, payload, nullptr, rtp_video_header, 0));
+ 0, 0, payload, rtp_video_header, 0));
}
void IncomingRtcpNack(const RtpRtcpModule* module, uint16_t sequence_number) {
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h
new file mode 100644
index 0000000000..f763da244c
--- /dev/null
+++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h
@@ -0,0 +1,435 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_
+#define MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/frame_transformer_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/transport/webrtc_key_value_config.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "modules/rtp_rtcp/include/receive_statistics.h"
+#include "modules/rtp_rtcp/include/report_block_data.h"
+#include "modules/rtp_rtcp/include/rtp_packet_sender.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
+#include "modules/rtp_rtcp/source/video_fec_generator.h"
+#include "rtc_base/constructor_magic.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class FrameEncryptorInterface;
+class RateLimiter;
+class RemoteBitrateEstimator;
+class RtcEventLog;
+class RTPSender;
+class Transport;
+class VideoBitrateAllocationObserver;
+
+class RtpRtcpInterface : public RtcpFeedbackSenderInterface {
+ public:
+ struct Configuration {
+ Configuration() = default;
+ Configuration(Configuration&& rhs) = default;
+
+ // True for a audio version of the RTP/RTCP module object false will create
+ // a video version.
+ bool audio = false;
+ bool receiver_only = false;
+
+ // The clock to use to read time. If nullptr then system clock will be used.
+ Clock* clock = nullptr;
+
+ ReceiveStatisticsProvider* receive_statistics = nullptr;
+
+ // Transport object that will be called when packets are ready to be sent
+ // out on the network.
+ Transport* outgoing_transport = nullptr;
+
+ // Called when the receiver requests an intra frame.
+ RtcpIntraFrameObserver* intra_frame_callback = nullptr;
+
+ // Called when the receiver sends a loss notification.
+ RtcpLossNotificationObserver* rtcp_loss_notification_observer = nullptr;
+
+ // Called when we receive a changed estimate from the receiver of out
+ // stream.
+ RtcpBandwidthObserver* bandwidth_callback = nullptr;
+
+ NetworkStateEstimateObserver* network_state_estimate_observer = nullptr;
+ TransportFeedbackObserver* transport_feedback_callback = nullptr;
+ VideoBitrateAllocationObserver* bitrate_allocation_observer = nullptr;
+ RtcpRttStats* rtt_stats = nullptr;
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr;
+ // Called on receipt of RTCP report block from remote side.
+ // TODO(bugs.webrtc.org/10678): Remove RtcpStatisticsCallback in
+ // favor of ReportBlockDataObserver.
+ // TODO(bugs.webrtc.org/10679): Consider whether we want to use
+ // only getters or only callbacks. If we decide on getters, the
+ // ReportBlockDataObserver should also be removed in favor of
+ // GetLatestReportBlockData().
+ RtcpStatisticsCallback* rtcp_statistics_callback = nullptr;
+ RtcpCnameCallback* rtcp_cname_callback = nullptr;
+ ReportBlockDataObserver* report_block_data_observer = nullptr;
+
+ // Estimates the bandwidth available for a set of streams from the same
+ // client.
+ RemoteBitrateEstimator* remote_bitrate_estimator = nullptr;
+
+ // Spread any bursts of packets into smaller bursts to minimize packet loss.
+ RtpPacketSender* paced_sender = nullptr;
+
+ // Generates FEC packets.
+ // TODO(sprang): Wire up to RtpSenderEgress.
+ VideoFecGenerator* fec_generator = nullptr;
+
+ BitrateStatisticsObserver* send_bitrate_observer = nullptr;
+ SendSideDelayObserver* send_side_delay_observer = nullptr;
+ RtcEventLog* event_log = nullptr;
+ SendPacketObserver* send_packet_observer = nullptr;
+ RateLimiter* retransmission_rate_limiter = nullptr;
+ StreamDataCountersCallback* rtp_stats_callback = nullptr;
+
+ int rtcp_report_interval_ms = 0;
+
+ // Update network2 instead of pacer_exit field of video timing extension.
+ bool populate_network2_timestamp = false;
+
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer;
+
+ // E2EE Custom Video Frame Encryption
+ FrameEncryptorInterface* frame_encryptor = nullptr;
+ // Require all outgoing frames to be encrypted with a FrameEncryptor.
+ bool require_frame_encryption = false;
+
+ // Corresponds to extmap-allow-mixed in SDP negotiation.
+ bool extmap_allow_mixed = false;
+
+ // If true, the RTP sender will always annotate outgoing packets with
+ // MID and RID header extensions, if provided and negotiated.
+ // If false, the RTP sender will stop sending MID and RID header extensions,
+ // when it knows that the receiver is ready to demux based on SSRC. This is
+ // done by RTCP RR acking.
+ bool always_send_mid_and_rid = false;
+
+ // If set, field trials are read from |field_trials|, otherwise
+ // defaults to webrtc::FieldTrialBasedConfig.
+ const WebRtcKeyValueConfig* field_trials = nullptr;
+
+ // SSRCs for media and retransmission, respectively.
+ // FlexFec SSRC is fetched from |flexfec_sender|.
+ uint32_t local_media_ssrc = 0;
+ absl::optional<uint32_t> rtx_send_ssrc;
+
+ bool need_rtp_packet_infos = false;
+
+ // If true, the RTP packet history will select RTX packets based on
+ // heuristics such as send time, retransmission count etc, in order to
+ // make padding potentially more useful.
+ // If false, the last packet will always be picked. This may reduce CPU
+ // overhead.
+ bool enable_rtx_padding_prioritization = true;
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(Configuration);
+ };
+
+ // **************************************************************************
+ // Receiver functions
+ // **************************************************************************
+
+ virtual void IncomingRtcpPacket(const uint8_t* incoming_packet,
+ size_t incoming_packet_length) = 0;
+
+ virtual void SetRemoteSSRC(uint32_t ssrc) = 0;
+
+ // **************************************************************************
+ // Sender
+ // **************************************************************************
+
+ // Sets the maximum size of an RTP packet, including RTP headers.
+ virtual void SetMaxRtpPacketSize(size_t size) = 0;
+
+ // Returns max RTP packet size. Takes into account RTP headers and
+ // FEC/ULP/RED overhead (when FEC is enabled).
+ virtual size_t MaxRtpPacketSize() const = 0;
+
+ virtual void RegisterSendPayloadFrequency(int payload_type,
+ int payload_frequency) = 0;
+
+ // Unregisters a send payload.
+ // |payload_type| - payload type of codec
+ // Returns -1 on failure else 0.
+ virtual int32_t DeRegisterSendPayload(int8_t payload_type) = 0;
+
+ virtual void SetExtmapAllowMixed(bool extmap_allow_mixed) = 0;
+
+ // Register extension by uri, triggers CHECK on falure.
+ virtual void RegisterRtpHeaderExtension(absl::string_view uri, int id) = 0;
+
+ virtual int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) = 0;
+ virtual void DeregisterSendRtpHeaderExtension(absl::string_view uri) = 0;
+
+ // Returns true if RTP module is send media, and any of the extensions
+ // required for bandwidth estimation is registered.
+ virtual bool SupportsPadding() const = 0;
+ // Same as SupportsPadding(), but additionally requires that
+ // SetRtxSendStatus() has been called with the kRtxRedundantPayloads option
+ // enabled.
+ virtual bool SupportsRtxPayloadPadding() const = 0;
+
+ // Returns start timestamp.
+ virtual uint32_t StartTimestamp() const = 0;
+
+ // Sets start timestamp. Start timestamp is set to a random value if this
+ // function is never called.
+ virtual void SetStartTimestamp(uint32_t timestamp) = 0;
+
+ // Returns SequenceNumber.
+ virtual uint16_t SequenceNumber() const = 0;
+
+ // Sets SequenceNumber, default is a random number.
+ virtual void SetSequenceNumber(uint16_t seq) = 0;
+
+ virtual void SetRtpState(const RtpState& rtp_state) = 0;
+ virtual void SetRtxState(const RtpState& rtp_state) = 0;
+ virtual RtpState GetRtpState() const = 0;
+ virtual RtpState GetRtxState() const = 0;
+
+ // Returns SSRC.
+ virtual uint32_t SSRC() const = 0;
+
+ // Sets the value for sending in the RID (and Repaired) RTP header extension.
+ // RIDs are used to identify an RTP stream if SSRCs are not negotiated.
+ // If the RID and Repaired RID extensions are not registered, the RID will
+ // not be sent.
+ virtual void SetRid(const std::string& rid) = 0;
+
+ // Sets the value for sending in the MID RTP header extension.
+ // The MID RTP header extension should be registered for this to do anything.
+ // Once set, this value can not be changed or removed.
+ virtual void SetMid(const std::string& mid) = 0;
+
+ // Sets CSRC.
+ // |csrcs| - vector of CSRCs
+ virtual void SetCsrcs(const std::vector<uint32_t>& csrcs) = 0;
+
+ // Turns on/off sending RTX (RFC 4588). The modes can be set as a combination
+ // of values of the enumerator RtxMode.
+ virtual void SetRtxSendStatus(int modes) = 0;
+
+ // Returns status of sending RTX (RFC 4588). The returned value can be
+ // a combination of values of the enumerator RtxMode.
+ virtual int RtxSendStatus() const = 0;
+
+ // Returns the SSRC used for RTX if set, otherwise a nullopt.
+ virtual absl::optional<uint32_t> RtxSsrc() const = 0;
+
+ // Sets the payload type to use when sending RTX packets. Note that this
+ // doesn't enable RTX, only the payload type is set.
+ virtual void SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) = 0;
+
+ // Returns the FlexFEC SSRC, if there is one.
+ virtual absl::optional<uint32_t> FlexfecSsrc() const = 0;
+
+ // Sets sending status. Sends kRtcpByeCode when going from true to false.
+ // Returns -1 on failure else 0.
+ virtual int32_t SetSendingStatus(bool sending) = 0;
+
+ // Returns current sending status.
+ virtual bool Sending() const = 0;
+
+ // Starts/Stops media packets. On by default.
+ virtual void SetSendingMediaStatus(bool sending) = 0;
+
+ // Returns current media sending status.
+ virtual bool SendingMedia() const = 0;
+
+ // Returns whether audio is configured (i.e. Configuration::audio = true).
+ virtual bool IsAudioConfigured() const = 0;
+
+ // Indicate that the packets sent by this module should be counted towards the
+ // bitrate estimate since the stream participates in the bitrate allocation.
+ virtual void SetAsPartOfAllocation(bool part_of_allocation) = 0;
+
+ // TODO(sprang): Remove when all call sites have been moved to
+ // GetSendRates(). Fetches the current send bitrates in bits/s.
+ virtual void BitrateSent(uint32_t* total_rate,
+ uint32_t* video_rate,
+ uint32_t* fec_rate,
+ uint32_t* nack_rate) const = 0;
+
+ // Returns bitrate sent (post-pacing) per packet type.
+ virtual RtpSendRates GetSendRates() const = 0;
+
+ virtual RTPSender* RtpSender() = 0;
+ virtual const RTPSender* RtpSender() const = 0;
+
+ // Record that a frame is about to be sent. Returns true on success, and false
+ // if the module isn't ready to send.
+ virtual bool OnSendingRtpFrame(uint32_t timestamp,
+ int64_t capture_time_ms,
+ int payload_type,
+ bool force_sender_report) = 0;
+
+ // Try to send the provided packet. Returns true iff packet matches any of
+ // the SSRCs for this module (media/rtx/fec etc) and was forwarded to the
+ // transport.
+ virtual bool TrySendPacket(RtpPacketToSend* packet,
+ const PacedPacketInfo& pacing_info) = 0;
+
+ // Update the FEC protection parameters to use for delta- and key-frames.
+ // Only used when deferred FEC is active.
+ virtual void SetFecProtectionParams(
+ const FecProtectionParams& delta_params,
+ const FecProtectionParams& key_params) = 0;
+
+ // If deferred FEC generation is enabled, this method should be called after
+ // calling TrySendPacket(). Any generated FEC packets will be removed and
+ // returned from the FEC generator.
+ virtual std::vector<std::unique_ptr<RtpPacketToSend>> FetchFecPackets() = 0;
+
+ virtual void OnPacketsAcknowledged(
+ rtc::ArrayView<const uint16_t> sequence_numbers) = 0;
+
+ virtual std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
+ size_t target_size_bytes) = 0;
+
+ virtual std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const = 0;
+
+ // Returns an expected per packet overhead representing the main RTP header,
+ // any CSRCs, and the registered header extensions that are expected on all
+ // packets (i.e. disregarding things like abs capture time which is only
+ // populated on a subset of packets, but counting MID/RID type extensions
+ // when we expect to send them).
+ virtual size_t ExpectedPerPacketOverhead() const = 0;
+
+ // **************************************************************************
+ // RTCP
+ // **************************************************************************
+
+ // Returns RTCP status.
+ virtual RtcpMode RTCP() const = 0;
+
+ // Sets RTCP status i.e on(compound or non-compound)/off.
+ // |method| - RTCP method to use.
+ virtual void SetRTCPStatus(RtcpMode method) = 0;
+
+ // Sets RTCP CName (i.e unique identifier).
+ // Returns -1 on failure else 0.
+ virtual int32_t SetCNAME(const char* cname) = 0;
+
+ // Returns remote NTP.
+ // Returns -1 on failure else 0.
+ virtual int32_t RemoteNTP(uint32_t* received_ntp_secs,
+ uint32_t* received_ntp_frac,
+ uint32_t* rtcp_arrival_time_secs,
+ uint32_t* rtcp_arrival_time_frac,
+ uint32_t* rtcp_timestamp) const = 0;
+
+ // Returns current RTT (round-trip time) estimate.
+ // Returns -1 on failure else 0.
+ virtual int32_t RTT(uint32_t remote_ssrc,
+ int64_t* rtt,
+ int64_t* avg_rtt,
+ int64_t* min_rtt,
+ int64_t* max_rtt) const = 0;
+
+ // Returns the estimated RTT, with fallback to a default value.
+ virtual int64_t ExpectedRetransmissionTimeMs() const = 0;
+
+ // Forces a send of a RTCP packet. Periodic SR and RR are triggered via the
+ // process function.
+ // Returns -1 on failure else 0.
+ virtual int32_t SendRTCP(RTCPPacketType rtcp_packet_type) = 0;
+
+ // Returns send statistics for the RTP and RTX stream.
+ virtual void GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const = 0;
+
+ // Returns received RTCP report block.
+ // Returns -1 on failure else 0.
+ // TODO(https://crbug.com/webrtc/10678): Remove this in favor of
+ // GetLatestReportBlockData().
+ virtual int32_t RemoteRTCPStat(
+ std::vector<RTCPReportBlock>* receive_blocks) const = 0;
+ // A snapshot of Report Blocks with additional data of interest to statistics.
+ // Within this list, the sender-source SSRC pair is unique and per-pair the
+ // ReportBlockData represents the latest Report Block that was received for
+ // that pair.
+ virtual std::vector<ReportBlockData> GetLatestReportBlockData() const = 0;
+
+ // (XR) Sets Receiver Reference Time Report (RTTR) status.
+ virtual void SetRtcpXrRrtrStatus(bool enable) = 0;
+
+ // Returns current Receiver Reference Time Report (RTTR) status.
+ virtual bool RtcpXrRrtrStatus() const = 0;
+
+ // (REMB) Receiver Estimated Max Bitrate.
+ // Schedules sending REMB on next and following sender/receiver reports.
+ void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) override = 0;
+ // Stops sending REMB on next and following sender/receiver reports.
+ void UnsetRemb() override = 0;
+
+ // (NACK)
+
+ // Sends a Negative acknowledgement packet.
+ // Returns -1 on failure else 0.
+ // TODO(philipel): Deprecate this and start using SendNack instead, mostly
+ // because we want a function that actually send NACK for the specified
+ // packets.
+ virtual int32_t SendNACK(const uint16_t* nack_list, uint16_t size) = 0;
+
+ // Sends NACK for the packets specified.
+ // Note: This assumes the caller keeps track of timing and doesn't rely on
+ // the RTP module to do this.
+ virtual void SendNack(const std::vector<uint16_t>& sequence_numbers) = 0;
+
+ // Store the sent packets, needed to answer to a Negative acknowledgment
+ // requests.
+ virtual void SetStorePacketsStatus(bool enable, uint16_t numberToStore) = 0;
+
+ // Returns true if the module is configured to store packets.
+ virtual bool StorePackets() const = 0;
+
+ virtual void SetVideoBitrateAllocation(
+ const VideoBitrateAllocation& bitrate) = 0;
+
+ // **************************************************************************
+ // Video
+ // **************************************************************************
+
+ // Requests new key frame.
+ // using PLI, https://tools.ietf.org/html/rfc4585#section-6.3.1.1
+ void SendPictureLossIndication() { SendRTCP(kRtcpPli); }
+ // using FIR, https://tools.ietf.org/html/rfc5104#section-4.3.1.2
+ void SendFullIntraRequest() { SendRTCP(kRtcpFir); }
+
+ // Sends a LossNotification RTCP message.
+ // Returns -1 on failure else 0.
+ virtual int32_t SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) = 0;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_
diff --git a/modules/rtp_rtcp/source/rtp_sender.cc b/modules/rtp_rtcp/source/rtp_sender.cc
index 3d60552e9b..1193068057 100644
--- a/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/modules/rtp_rtcp/source/rtp_sender.cc
@@ -108,9 +108,7 @@ bool IsNonVolatile(RTPExtensionType type) {
case kRtpExtensionAbsoluteSendTime:
case kRtpExtensionTransportSequenceNumber:
case kRtpExtensionTransportSequenceNumber02:
- case kRtpExtensionFrameMarking:
case kRtpExtensionRtpStreamId:
- case kRtpExtensionRepairedRtpStreamId:
case kRtpExtensionMid:
case kRtpExtensionGenericFrameDescriptor00:
case kRtpExtensionGenericFrameDescriptor02:
@@ -121,6 +119,7 @@ bool IsNonVolatile(RTPExtensionType type) {
case kRtpExtensionPlayoutDelay:
case kRtpExtensionVideoContentType:
case kRtpExtensionVideoTiming:
+ case kRtpExtensionRepairedRtpStreamId:
case kRtpExtensionColorSpace:
return false;
case kRtpExtensionNone:
@@ -154,7 +153,7 @@ double GetMaxPaddingSizeFactor(const WebRtcKeyValueConfig* field_trials) {
} // namespace
-RTPSender::RTPSender(const RtpRtcp::Configuration& config,
+RTPSender::RTPSender(const RtpRtcpInterface::Configuration& config,
RtpPacketHistory* packet_history,
RtpPacketSender* packet_sender)
: clock_(config.clock),
@@ -224,13 +223,13 @@ rtc::ArrayView<const RtpExtensionSize> RTPSender::AudioExtensionSizes() {
}
void RTPSender::SetExtmapAllowMixed(bool extmap_allow_mixed) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
rtp_header_extension_map_.SetExtmapAllowMixed(extmap_allow_mixed);
}
int32_t RTPSender::RegisterRtpHeaderExtension(RTPExtensionType type,
uint8_t id) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
bool registered = rtp_header_extension_map_.RegisterByType(id, type);
supports_bwe_extension_ = HasBweExtension(rtp_header_extension_map_);
UpdateHeaderSizes();
@@ -238,7 +237,7 @@ int32_t RTPSender::RegisterRtpHeaderExtension(RTPExtensionType type,
}
bool RTPSender::RegisterRtpHeaderExtension(absl::string_view uri, int id) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
bool registered = rtp_header_extension_map_.RegisterByUri(id, uri);
supports_bwe_extension_ = HasBweExtension(rtp_header_extension_map_);
UpdateHeaderSizes();
@@ -246,12 +245,12 @@ bool RTPSender::RegisterRtpHeaderExtension(absl::string_view uri, int id) {
}
bool RTPSender::IsRtpHeaderExtensionRegistered(RTPExtensionType type) const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return rtp_header_extension_map_.IsRegistered(type);
}
int32_t RTPSender::DeregisterRtpHeaderExtension(RTPExtensionType type) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
rtp_header_extension_map_.Deregister(type);
supports_bwe_extension_ = HasBweExtension(rtp_header_extension_map_);
UpdateHeaderSizes();
@@ -259,7 +258,7 @@ int32_t RTPSender::DeregisterRtpHeaderExtension(RTPExtensionType type) {
}
void RTPSender::DeregisterRtpHeaderExtension(absl::string_view uri) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
rtp_header_extension_map_.Deregister(uri);
supports_bwe_extension_ = HasBweExtension(rtp_header_extension_map_);
UpdateHeaderSizes();
@@ -268,7 +267,7 @@ void RTPSender::DeregisterRtpHeaderExtension(absl::string_view uri) {
void RTPSender::SetMaxRtpPacketSize(size_t max_packet_size) {
RTC_DCHECK_GE(max_packet_size, 100);
RTC_DCHECK_LE(max_packet_size, IP_PACKET_SIZE);
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
max_packet_size_ = max_packet_size;
}
@@ -277,18 +276,18 @@ size_t RTPSender::MaxRtpPacketSize() const {
}
void RTPSender::SetRtxStatus(int mode) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
rtx_ = mode;
}
int RTPSender::RtxStatus() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return rtx_;
}
void RTPSender::SetRtxPayloadType(int payload_type,
int associated_payload_type) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
RTC_DCHECK_LE(payload_type, 127);
RTC_DCHECK_LE(associated_payload_type, 127);
if (payload_type < 0) {
@@ -347,7 +346,7 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id) {
}
void RTPSender::OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
bool update_required = !ssrc_has_acked_;
ssrc_has_acked_ = true;
if (update_required) {
@@ -357,7 +356,7 @@ void RTPSender::OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number) {
void RTPSender::OnReceivedAckOnRtxSsrc(
int64_t extended_highest_sequence_number) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
rtx_ssrc_has_acked_ = true;
}
@@ -377,12 +376,12 @@ void RTPSender::OnReceivedNack(
}
bool RTPSender::SupportsPadding() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return sending_media_ && supports_bwe_extension_;
}
bool RTPSender::SupportsRtxPayloadPadding() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return sending_media_ && supports_bwe_extension_ &&
(rtx_ & kRtxRedundantPayloads);
}
@@ -424,14 +423,14 @@ std::vector<std::unique_ptr<RtpPacketToSend>> RTPSender::GeneratePadding(
}
}
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
if (!sending_media_) {
return {};
}
size_t padding_bytes_in_packet;
const size_t max_payload_size =
- max_packet_size_ - FecOrPaddingPacketMaxRtpHeaderLength();
+ max_packet_size_ - max_padding_fec_packet_header_;
if (audio_configured_) {
// Allow smaller padding packets for audio.
padding_bytes_in_packet = rtc::SafeClamp<size_t>(
@@ -485,8 +484,11 @@ std::vector<std::unique_ptr<RtpPacketToSend>> RTPSender::GeneratePadding(
padding_packet->SetTimestamp(padding_packet->Timestamp() +
(now_ms - last_timestamp_time_ms_) *
kTimestampTicksPerMs);
- padding_packet->set_capture_time_ms(padding_packet->capture_time_ms() +
- (now_ms - last_timestamp_time_ms_));
+ if (padding_packet->capture_time_ms() > 0) {
+ padding_packet->set_capture_time_ms(
+ padding_packet->capture_time_ms() +
+ (now_ms - last_timestamp_time_ms_));
+ }
}
RTC_DCHECK(rtx_ssrc_);
padding_packet->SetSsrc(*rtx_ssrc_);
@@ -547,24 +549,24 @@ void RTPSender::EnqueuePackets(
}
size_t RTPSender::FecOrPaddingPacketMaxRtpHeaderLength() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return max_padding_fec_packet_header_;
}
size_t RTPSender::ExpectedPerPacketOverhead() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return max_media_packet_header_;
}
uint16_t RTPSender::AllocateSequenceNumber(uint16_t packets_to_send) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
uint16_t first_allocated_sequence_number = sequence_number_;
sequence_number_ += packets_to_send;
return first_allocated_sequence_number;
}
std::unique_ptr<RtpPacketToSend> RTPSender::AllocatePacket() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
// TODO(danilchap): Find better motivator and value for extra capacity.
// RtpPacketizer might slightly miscalulate needed size,
// SRTP may benefit from extra space in the buffer and do encryption in place
@@ -606,7 +608,7 @@ std::unique_ptr<RtpPacketToSend> RTPSender::AllocatePacket() const {
}
bool RTPSender::AssignSequenceNumber(RtpPacketToSend* packet) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
if (!sending_media_)
return false;
RTC_DCHECK(packet->Ssrc() == ssrc_);
@@ -625,12 +627,12 @@ bool RTPSender::AssignSequenceNumber(RtpPacketToSend* packet) {
}
void RTPSender::SetSendingMediaStatus(bool enabled) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
sending_media_ = enabled;
}
bool RTPSender::SendingMedia() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return sending_media_;
}
@@ -639,18 +641,18 @@ bool RTPSender::IsAudioConfigured() const {
}
void RTPSender::SetTimestampOffset(uint32_t timestamp) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
timestamp_offset_ = timestamp;
}
uint32_t RTPSender::TimestampOffset() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return timestamp_offset_;
}
void RTPSender::SetRid(const std::string& rid) {
// RID is used in simulcast scenario when multiple layers share the same mid.
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
RTC_DCHECK_LE(rid.length(), RtpStreamId::kMaxValueSizeBytes);
rid_ = rid;
UpdateHeaderSizes();
@@ -658,7 +660,7 @@ void RTPSender::SetRid(const std::string& rid) {
void RTPSender::SetMid(const std::string& mid) {
// This is configured via the API.
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
RTC_DCHECK_LE(mid.length(), RtpMid::kMaxValueSizeBytes);
mid_ = mid;
UpdateHeaderSizes();
@@ -666,7 +668,7 @@ void RTPSender::SetMid(const std::string& mid) {
void RTPSender::SetCsrcs(const std::vector<uint32_t>& csrcs) {
RTC_DCHECK_LE(csrcs.size(), kRtpCsrcSize);
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
csrcs_ = csrcs;
UpdateHeaderSizes();
}
@@ -674,7 +676,7 @@ void RTPSender::SetCsrcs(const std::vector<uint32_t>& csrcs) {
void RTPSender::SetSequenceNumber(uint16_t seq) {
bool updated_sequence_number = false;
{
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
sequence_number_forced_ = true;
if (sequence_number_ != seq) {
updated_sequence_number = true;
@@ -690,7 +692,7 @@ void RTPSender::SetSequenceNumber(uint16_t seq) {
}
uint16_t RTPSender::SequenceNumber() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return sequence_number_;
}
@@ -748,7 +750,7 @@ std::unique_ptr<RtpPacketToSend> RTPSender::BuildRtxPacket(
// Add original RTP header.
{
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
if (!sending_media_)
return nullptr;
@@ -814,7 +816,7 @@ std::unique_ptr<RtpPacketToSend> RTPSender::BuildRtxPacket(
}
void RTPSender::SetRtpState(const RtpState& rtp_state) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
sequence_number_ = rtp_state.sequence_number;
sequence_number_forced_ = true;
timestamp_offset_ = rtp_state.start_timestamp;
@@ -826,7 +828,7 @@ void RTPSender::SetRtpState(const RtpState& rtp_state) {
}
RtpState RTPSender::GetRtpState() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
RtpState state;
state.sequence_number = sequence_number_;
@@ -839,13 +841,13 @@ RtpState RTPSender::GetRtpState() const {
}
void RTPSender::SetRtxRtpState(const RtpState& rtp_state) {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
sequence_number_rtx_ = rtp_state.sequence_number;
rtx_ssrc_has_acked_ = rtp_state.ssrc_has_acked;
}
RtpState RTPSender::GetRtxRtpState() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
RtpState state;
state.sequence_number = sequence_number_rtx_;
@@ -856,7 +858,7 @@ RtpState RTPSender::GetRtxRtpState() const {
}
int64_t RTPSender::LastTimestampTimeMs() const {
- rtc::CritScope lock(&send_critsect_);
+ MutexLock lock(&send_mutex_);
return last_timestamp_time_ms_;
}
diff --git a/modules/rtp_rtcp/source/rtp_sender.h b/modules/rtp_rtcp/source/rtp_sender.h
index a14c3ae1a8..fd1a8da75a 100644
--- a/modules/rtp_rtcp/source/rtp_sender.h
+++ b/modules/rtp_rtcp/source/rtp_sender.h
@@ -25,15 +25,15 @@
#include "modules/rtp_rtcp/include/flexfec_sender.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/include/rtp_packet_sender.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_packet_history.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/deprecation.h"
#include "rtc_base/random.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -45,105 +45,130 @@ class RtpPacketToSend;
class RTPSender {
public:
- RTPSender(const RtpRtcp::Configuration& config,
+ RTPSender(const RtpRtcpInterface::Configuration& config,
RtpPacketHistory* packet_history,
RtpPacketSender* packet_sender);
~RTPSender();
- void SetSendingMediaStatus(bool enabled);
- bool SendingMedia() const;
- bool IsAudioConfigured() const;
+ void SetSendingMediaStatus(bool enabled) RTC_LOCKS_EXCLUDED(send_mutex_);
+ bool SendingMedia() const RTC_LOCKS_EXCLUDED(send_mutex_);
+ bool IsAudioConfigured() const RTC_LOCKS_EXCLUDED(send_mutex_);
- uint32_t TimestampOffset() const;
- void SetTimestampOffset(uint32_t timestamp);
+ uint32_t TimestampOffset() const RTC_LOCKS_EXCLUDED(send_mutex_);
+ void SetTimestampOffset(uint32_t timestamp) RTC_LOCKS_EXCLUDED(send_mutex_);
- void SetRid(const std::string& rid);
+ void SetRid(const std::string& rid) RTC_LOCKS_EXCLUDED(send_mutex_);
- void SetMid(const std::string& mid);
+ void SetMid(const std::string& mid) RTC_LOCKS_EXCLUDED(send_mutex_);
- uint16_t SequenceNumber() const;
- void SetSequenceNumber(uint16_t seq);
+ uint16_t SequenceNumber() const RTC_LOCKS_EXCLUDED(send_mutex_);
+ void SetSequenceNumber(uint16_t seq) RTC_LOCKS_EXCLUDED(send_mutex_);
- void SetCsrcs(const std::vector<uint32_t>& csrcs);
+ void SetCsrcs(const std::vector<uint32_t>& csrcs)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
- void SetMaxRtpPacketSize(size_t max_packet_size);
+ void SetMaxRtpPacketSize(size_t max_packet_size)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
- void SetExtmapAllowMixed(bool extmap_allow_mixed);
+ void SetExtmapAllowMixed(bool extmap_allow_mixed)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// RTP header extension
- int32_t RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id);
- bool RegisterRtpHeaderExtension(absl::string_view uri, int id);
- bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) const;
- int32_t DeregisterRtpHeaderExtension(RTPExtensionType type);
- void DeregisterRtpHeaderExtension(absl::string_view uri);
-
- bool SupportsPadding() const;
- bool SupportsRtxPayloadPadding() const;
+ int32_t RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
+ bool RegisterRtpHeaderExtension(absl::string_view uri, int id)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
+ bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) const
+ RTC_LOCKS_EXCLUDED(send_mutex_);
+ int32_t DeregisterRtpHeaderExtension(RTPExtensionType type)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
+ void DeregisterRtpHeaderExtension(absl::string_view uri)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
+
+ bool SupportsPadding() const RTC_LOCKS_EXCLUDED(send_mutex_);
+ bool SupportsRtxPayloadPadding() const RTC_LOCKS_EXCLUDED(send_mutex_);
std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
size_t target_size_bytes,
- bool media_has_been_sent);
+ bool media_has_been_sent) RTC_LOCKS_EXCLUDED(send_mutex_);
// NACK.
void OnReceivedNack(const std::vector<uint16_t>& nack_sequence_numbers,
- int64_t avg_rtt);
+ int64_t avg_rtt) RTC_LOCKS_EXCLUDED(send_mutex_);
- int32_t ReSendPacket(uint16_t packet_id);
+ int32_t ReSendPacket(uint16_t packet_id) RTC_LOCKS_EXCLUDED(send_mutex_);
// ACK.
- void OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number);
- void OnReceivedAckOnRtxSsrc(int64_t extended_highest_sequence_number);
+ void OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
+ void OnReceivedAckOnRtxSsrc(int64_t extended_highest_sequence_number)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// RTX.
- void SetRtxStatus(int mode);
- int RtxStatus() const;
- absl::optional<uint32_t> RtxSsrc() const { return rtx_ssrc_; }
+ void SetRtxStatus(int mode) RTC_LOCKS_EXCLUDED(send_mutex_);
+ int RtxStatus() const RTC_LOCKS_EXCLUDED(send_mutex_);
+ absl::optional<uint32_t> RtxSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) {
+ return rtx_ssrc_;
+ }
- void SetRtxPayloadType(int payload_type, int associated_payload_type);
+ void SetRtxPayloadType(int payload_type, int associated_payload_type)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// Size info for header extensions used by FEC packets.
- static rtc::ArrayView<const RtpExtensionSize> FecExtensionSizes();
+ static rtc::ArrayView<const RtpExtensionSize> FecExtensionSizes()
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// Size info for header extensions used by video packets.
- static rtc::ArrayView<const RtpExtensionSize> VideoExtensionSizes();
+ static rtc::ArrayView<const RtpExtensionSize> VideoExtensionSizes()
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// Size info for header extensions used by audio packets.
- static rtc::ArrayView<const RtpExtensionSize> AudioExtensionSizes();
+ static rtc::ArrayView<const RtpExtensionSize> AudioExtensionSizes()
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// Create empty packet, fills ssrc, csrcs and reserve place for header
// extensions RtpSender updates before sending.
- std::unique_ptr<RtpPacketToSend> AllocatePacket() const;
+ std::unique_ptr<RtpPacketToSend> AllocatePacket() const
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// Allocate sequence number for provided packet.
// Save packet's fields to generate padding that doesn't break media stream.
// Return false if sending was turned off.
- bool AssignSequenceNumber(RtpPacketToSend* packet);
+ bool AssignSequenceNumber(RtpPacketToSend* packet)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// Maximum header overhead per fec/padding packet.
- size_t FecOrPaddingPacketMaxRtpHeaderLength() const;
+ size_t FecOrPaddingPacketMaxRtpHeaderLength() const
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// Expected header overhead per media packet.
- size_t ExpectedPerPacketOverhead() const;
- uint16_t AllocateSequenceNumber(uint16_t packets_to_send);
+ size_t ExpectedPerPacketOverhead() const RTC_LOCKS_EXCLUDED(send_mutex_);
+ uint16_t AllocateSequenceNumber(uint16_t packets_to_send)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// Including RTP headers.
- size_t MaxRtpPacketSize() const;
+ size_t MaxRtpPacketSize() const RTC_LOCKS_EXCLUDED(send_mutex_);
- uint32_t SSRC() const { return ssrc_; }
+ uint32_t SSRC() const RTC_LOCKS_EXCLUDED(send_mutex_) { return ssrc_; }
- absl::optional<uint32_t> FlexfecSsrc() const { return flexfec_ssrc_; }
+ absl::optional<uint32_t> FlexfecSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) {
+ return flexfec_ssrc_;
+ }
// Sends packet to |transport_| or to the pacer, depending on configuration.
// TODO(bugs.webrtc.org/XXX): Remove in favor of EnqueuePackets().
- bool SendToNetwork(std::unique_ptr<RtpPacketToSend> packet);
+ bool SendToNetwork(std::unique_ptr<RtpPacketToSend> packet)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
// Pass a set of packets to RtpPacketSender instance, for paced or immediate
// sending to the network.
- void EnqueuePackets(std::vector<std::unique_ptr<RtpPacketToSend>> packets);
+ void EnqueuePackets(std::vector<std::unique_ptr<RtpPacketToSend>> packets)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
- void SetRtpState(const RtpState& rtp_state);
- RtpState GetRtpState() const;
- void SetRtxRtpState(const RtpState& rtp_state);
- RtpState GetRtxRtpState() const;
+ void SetRtpState(const RtpState& rtp_state) RTC_LOCKS_EXCLUDED(send_mutex_);
+ RtpState GetRtpState() const RTC_LOCKS_EXCLUDED(send_mutex_);
+ void SetRtxRtpState(const RtpState& rtp_state)
+ RTC_LOCKS_EXCLUDED(send_mutex_);
+ RtpState GetRtxRtpState() const RTC_LOCKS_EXCLUDED(send_mutex_);
- int64_t LastTimestampTimeMs() const;
+ int64_t LastTimestampTimeMs() const RTC_LOCKS_EXCLUDED(send_mutex_);
private:
std::unique_ptr<RtpPacketToSend> BuildRtxPacket(
@@ -151,10 +176,10 @@ class RTPSender {
bool IsFecPacket(const RtpPacketToSend& packet) const;
- void UpdateHeaderSizes() RTC_EXCLUSIVE_LOCKS_REQUIRED(send_critsect_);
+ void UpdateHeaderSizes() RTC_EXCLUSIVE_LOCKS_REQUIRED(send_mutex_);
Clock* const clock_;
- Random random_ RTC_GUARDED_BY(send_critsect_);
+ Random random_ RTC_GUARDED_BY(send_mutex_);
const bool audio_configured_;
@@ -168,42 +193,41 @@ class RTPSender {
RtpPacketHistory* const packet_history_;
RtpPacketSender* const paced_sender_;
- rtc::CriticalSection send_critsect_;
+ mutable Mutex send_mutex_;
- bool sending_media_ RTC_GUARDED_BY(send_critsect_);
+ bool sending_media_ RTC_GUARDED_BY(send_mutex_);
size_t max_packet_size_;
- int8_t last_payload_type_ RTC_GUARDED_BY(send_critsect_);
+ int8_t last_payload_type_ RTC_GUARDED_BY(send_mutex_);
- RtpHeaderExtensionMap rtp_header_extension_map_
- RTC_GUARDED_BY(send_critsect_);
- size_t max_media_packet_header_ RTC_GUARDED_BY(send_critsect_);
- size_t max_padding_fec_packet_header_ RTC_GUARDED_BY(send_critsect_);
+ RtpHeaderExtensionMap rtp_header_extension_map_ RTC_GUARDED_BY(send_mutex_);
+ size_t max_media_packet_header_ RTC_GUARDED_BY(send_mutex_);
+ size_t max_padding_fec_packet_header_ RTC_GUARDED_BY(send_mutex_);
// RTP variables
- uint32_t timestamp_offset_ RTC_GUARDED_BY(send_critsect_);
- bool sequence_number_forced_ RTC_GUARDED_BY(send_critsect_);
- uint16_t sequence_number_ RTC_GUARDED_BY(send_critsect_);
- uint16_t sequence_number_rtx_ RTC_GUARDED_BY(send_critsect_);
+ uint32_t timestamp_offset_ RTC_GUARDED_BY(send_mutex_);
+ bool sequence_number_forced_ RTC_GUARDED_BY(send_mutex_);
+ uint16_t sequence_number_ RTC_GUARDED_BY(send_mutex_);
+ uint16_t sequence_number_rtx_ RTC_GUARDED_BY(send_mutex_);
// RID value to send in the RID or RepairedRID header extension.
- std::string rid_ RTC_GUARDED_BY(send_critsect_);
+ std::string rid_ RTC_GUARDED_BY(send_mutex_);
// MID value to send in the MID header extension.
- std::string mid_ RTC_GUARDED_BY(send_critsect_);
+ std::string mid_ RTC_GUARDED_BY(send_mutex_);
// Should we send MID/RID even when ACKed? (see below).
const bool always_send_mid_and_rid_;
// Track if any ACK has been received on the SSRC and RTX SSRC to indicate
// when to stop sending the MID and RID header extensions.
- bool ssrc_has_acked_ RTC_GUARDED_BY(send_critsect_);
- bool rtx_ssrc_has_acked_ RTC_GUARDED_BY(send_critsect_);
- uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(send_critsect_);
- int64_t capture_time_ms_ RTC_GUARDED_BY(send_critsect_);
- int64_t last_timestamp_time_ms_ RTC_GUARDED_BY(send_critsect_);
- bool last_packet_marker_bit_ RTC_GUARDED_BY(send_critsect_);
- std::vector<uint32_t> csrcs_ RTC_GUARDED_BY(send_critsect_);
- int rtx_ RTC_GUARDED_BY(send_critsect_);
+ bool ssrc_has_acked_ RTC_GUARDED_BY(send_mutex_);
+ bool rtx_ssrc_has_acked_ RTC_GUARDED_BY(send_mutex_);
+ uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(send_mutex_);
+ int64_t capture_time_ms_ RTC_GUARDED_BY(send_mutex_);
+ int64_t last_timestamp_time_ms_ RTC_GUARDED_BY(send_mutex_);
+ bool last_packet_marker_bit_ RTC_GUARDED_BY(send_mutex_);
+ std::vector<uint32_t> csrcs_ RTC_GUARDED_BY(send_mutex_);
+ int rtx_ RTC_GUARDED_BY(send_mutex_);
// Mapping rtx_payload_type_map_[associated] = rtx.
- std::map<int8_t, int8_t> rtx_payload_type_map_ RTC_GUARDED_BY(send_critsect_);
- bool supports_bwe_extension_ RTC_GUARDED_BY(send_critsect_);
+ std::map<int8_t, int8_t> rtx_payload_type_map_ RTC_GUARDED_BY(send_mutex_);
+ bool supports_bwe_extension_ RTC_GUARDED_BY(send_mutex_);
RateLimiter* const retransmission_rate_limiter_;
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.cc b/modules/rtp_rtcp/source/rtp_sender_audio.cc
index c8d83db297..d15e7458ac 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -66,7 +66,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(absl::string_view payload_name,
const size_t channels,
const uint32_t rate) {
if (absl::EqualsIgnoreCase(payload_name, "cn")) {
- rtc::CritScope cs(&send_audio_critsect_);
+ MutexLock lock(&send_audio_mutex_);
// we can have multiple CNG payload types
switch (frequency) {
case 8000:
@@ -85,14 +85,14 @@ int32_t RTPSenderAudio::RegisterAudioPayload(absl::string_view payload_name,
return -1;
}
} else if (absl::EqualsIgnoreCase(payload_name, "telephone-event")) {
- rtc::CritScope cs(&send_audio_critsect_);
+ MutexLock lock(&send_audio_mutex_);
// Don't add it to the list
// we dont want to allow send with a DTMF payloadtype
dtmf_payload_type_ = payload_type;
dtmf_payload_freq_ = frequency;
return 0;
} else if (payload_name == "audio") {
- rtc::CritScope cs(&send_audio_critsect_);
+ MutexLock lock(&send_audio_mutex_);
encoder_rtp_timestamp_frequency_ = frequency;
return 0;
}
@@ -100,7 +100,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(absl::string_view payload_name,
}
bool RTPSenderAudio::MarkerBit(AudioFrameType frame_type, int8_t payload_type) {
- rtc::CritScope cs(&send_audio_critsect_);
+ MutexLock lock(&send_audio_mutex_);
// for audio true for first packet in a speech burst
bool marker_bit = false;
if (last_payload_type_ != payload_type) {
@@ -174,7 +174,7 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type,
uint32_t dtmf_payload_freq = 0;
absl::optional<uint32_t> encoder_rtp_timestamp_frequency;
{
- rtc::CritScope cs(&send_audio_critsect_);
+ MutexLock lock(&send_audio_mutex_);
audio_level_dbov = audio_level_dbov_;
dtmf_payload_freq = dtmf_payload_freq_;
encoder_rtp_timestamp_frequency = encoder_rtp_timestamp_frequency_;
@@ -296,7 +296,7 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type,
return false;
{
- rtc::CritScope cs(&send_audio_critsect_);
+ MutexLock lock(&send_audio_mutex_);
last_payload_type_ = payload_type;
}
TRACE_EVENT_ASYNC_END2("webrtc", "Audio", rtp_timestamp, "timestamp",
@@ -316,7 +316,7 @@ int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dbov) {
if (level_dbov > 127) {
return -1;
}
- rtc::CritScope cs(&send_audio_critsect_);
+ MutexLock lock(&send_audio_mutex_);
audio_level_dbov_ = level_dbov;
return 0;
}
@@ -327,7 +327,7 @@ int32_t RTPSenderAudio::SendTelephoneEvent(uint8_t key,
uint8_t level) {
DtmfQueue::Event event;
{
- rtc::CritScope lock(&send_audio_critsect_);
+ MutexLock lock(&send_audio_mutex_);
if (dtmf_payload_type_ < 0) {
// TelephoneEvent payloadtype not configured
return -1;
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.h b/modules/rtp_rtcp/source/rtp_sender_audio.h
index c2d8074a60..3d3ca52c87 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -22,8 +22,8 @@
#include "modules/rtp_rtcp/source/dtmf_queue.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/one_time_event.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
@@ -74,13 +74,13 @@ class RTPSenderAudio {
Clock* const clock_ = nullptr;
RTPSender* const rtp_sender_ = nullptr;
- rtc::CriticalSection send_audio_critsect_;
+ Mutex send_audio_mutex_;
// DTMF.
bool dtmf_event_is_on_ = false;
bool dtmf_event_first_packet_sent_ = false;
- int8_t dtmf_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1;
- uint32_t dtmf_payload_freq_ RTC_GUARDED_BY(send_audio_critsect_) = 8000;
+ int8_t dtmf_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1;
+ uint32_t dtmf_payload_freq_ RTC_GUARDED_BY(send_audio_mutex_) = 8000;
uint32_t dtmf_timestamp_ = 0;
uint32_t dtmf_length_samples_ = 0;
int64_t dtmf_time_last_sent_ = 0;
@@ -89,20 +89,20 @@ class RTPSenderAudio {
DtmfQueue dtmf_queue_;
// VAD detection, used for marker bit.
- bool inband_vad_active_ RTC_GUARDED_BY(send_audio_critsect_) = false;
- int8_t cngnb_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1;
- int8_t cngwb_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1;
- int8_t cngswb_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1;
- int8_t cngfb_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1;
- int8_t last_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1;
+ bool inband_vad_active_ RTC_GUARDED_BY(send_audio_mutex_) = false;
+ int8_t cngnb_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1;
+ int8_t cngwb_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1;
+ int8_t cngswb_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1;
+ int8_t cngfb_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1;
+ int8_t last_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1;
// Audio level indication.
// (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/)
- uint8_t audio_level_dbov_ RTC_GUARDED_BY(send_audio_critsect_) = 0;
+ uint8_t audio_level_dbov_ RTC_GUARDED_BY(send_audio_mutex_) = 0;
OneTimeEvent first_packet_sent_;
absl::optional<uint32_t> encoder_rtp_timestamp_frequency_
- RTC_GUARDED_BY(send_audio_critsect_);
+ RTC_GUARDED_BY(send_audio_mutex_);
AbsoluteCaptureTimeSender absolute_capture_time_sender_;
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
index 3e35f42bff..1583ab04c0 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
@@ -18,6 +18,7 @@
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -67,8 +68,8 @@ class RtpSenderAudioTest : public ::testing::Test {
public:
RtpSenderAudioTest()
: fake_clock_(kStartTime),
- rtp_module_(RtpRtcp::Create([&] {
- RtpRtcp::Configuration config;
+ rtp_module_(ModuleRtpRtcpImpl2::Create([&] {
+ RtpRtcpInterface::Configuration config;
config.audio = true;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
@@ -81,7 +82,7 @@ class RtpSenderAudioTest : public ::testing::Test {
SimulatedClock fake_clock_;
LoopbackTransportTest transport_;
- std::unique_ptr<RtpRtcp> rtp_module_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_module_;
RTPSenderAudio rtp_sender_audio_;
};
diff --git a/modules/rtp_rtcp/source/rtp_sender_egress.cc b/modules/rtp_rtcp/source/rtp_sender_egress.cc
index 77803deda9..c89b9e42ce 100644
--- a/modules/rtp_rtcp/source/rtp_sender_egress.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_egress.cc
@@ -10,6 +10,7 @@
#include "modules/rtp_rtcp/source/rtp_sender_egress.h"
+#include <algorithm>
#include <limits>
#include <memory>
#include <utility>
@@ -17,8 +18,8 @@
#include "absl/strings/match.h"
#include "api/transport/field_trial_based_config.h"
#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h"
-#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h"
#include "rtc_base/logging.h"
+#include "rtc_base/task_utils/to_queued_task.h"
namespace webrtc {
namespace {
@@ -26,6 +27,8 @@ constexpr uint32_t kTimestampTicksPerMs = 90;
constexpr int kSendSideDelayWindowMs = 1000;
constexpr int kBitrateStatisticsWindowMs = 1000;
constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13;
+constexpr TimeDelta kUpdateInterval =
+ TimeDelta::Millis(kBitrateStatisticsWindowMs);
bool IsEnabled(absl::string_view name,
const WebRtcKeyValueConfig* field_trials) {
@@ -36,26 +39,51 @@ bool IsEnabled(absl::string_view name,
} // namespace
RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender(
- RtpSenderEgress* sender)
- : transport_sequence_number_(0), sender_(sender) {}
+ RtpSenderEgress* sender,
+ SequenceNumberAssigner* sequence_number_assigner)
+ : transport_sequence_number_(0),
+ sender_(sender),
+ sequence_number_assigner_(sequence_number_assigner) {
+ RTC_DCHECK(sequence_number_assigner_);
+}
RtpSenderEgress::NonPacedPacketSender::~NonPacedPacketSender() = default;
void RtpSenderEgress::NonPacedPacketSender::EnqueuePackets(
std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
for (auto& packet : packets) {
- if (!packet->SetExtension<TransportSequenceNumber>(
- ++transport_sequence_number_)) {
- --transport_sequence_number_;
- }
- packet->ReserveExtension<TransmissionOffset>();
- packet->ReserveExtension<AbsoluteSendTime>();
+ PrepareForSend(packet.get());
sender_->SendPacket(packet.get(), PacedPacketInfo());
}
+ auto fec_packets = sender_->FetchFecPackets();
+ if (!fec_packets.empty()) {
+ // Don't generate sequence numbers for flexfec, they are already running on
+ // an internally maintained sequence.
+ const bool generate_sequence_numbers = !sender_->FlexFecSsrc().has_value();
+
+ for (auto& packet : fec_packets) {
+ if (generate_sequence_numbers) {
+ sequence_number_assigner_->AssignSequenceNumber(packet.get());
+ }
+ PrepareForSend(packet.get());
+ }
+ EnqueuePackets(std::move(fec_packets));
+ }
}
-RtpSenderEgress::RtpSenderEgress(const RtpRtcp::Configuration& config,
+void RtpSenderEgress::NonPacedPacketSender::PrepareForSend(
+ RtpPacketToSend* packet) {
+ if (!packet->SetExtension<TransportSequenceNumber>(
+ ++transport_sequence_number_)) {
+ --transport_sequence_number_;
+ }
+ packet->ReserveExtension<TransmissionOffset>();
+ packet->ReserveExtension<AbsoluteSendTime>();
+}
+
+RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config,
RtpPacketHistory* packet_history)
- : ssrc_(config.local_media_ssrc),
+ : worker_queue_(TaskQueueBase::Current()),
+ ssrc_(config.local_media_ssrc),
rtx_ssrc_(config.rtx_send_ssrc),
flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc()
: absl::nullopt),
@@ -66,8 +94,14 @@ RtpSenderEgress::RtpSenderEgress(const RtpRtcp::Configuration& config,
packet_history_(packet_history),
transport_(config.outgoing_transport),
event_log_(config.event_log),
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
is_audio_(config.audio),
+#endif
need_rtp_packet_infos_(config.need_rtp_packet_infos),
+ fec_generator_(
+ IsEnabled("WebRTC-DeferredFecGeneration", config.field_trials)
+ ? config.fec_generator
+ : nullptr),
transport_feedback_observer_(config.transport_feedback_callback),
send_side_delay_observer_(config.send_side_delay_observer),
send_packet_observer_(config.send_packet_observer),
@@ -84,54 +118,91 @@ RtpSenderEgress::RtpSenderEgress(const RtpRtcp::Configuration& config,
rtp_sequence_number_map_(need_rtp_packet_infos_
? std::make_unique<RtpSequenceNumberMap>(
kRtpSequenceNumberMapMaxEntries)
- : nullptr) {}
+ : nullptr) {
+ RTC_DCHECK(worker_queue_);
+ pacer_checker_.Detach();
+ if (bitrate_callback_) {
+ update_task_ = RepeatingTaskHandle::DelayedStart(worker_queue_,
+ kUpdateInterval, [this]() {
+ PeriodicUpdate();
+ return kUpdateInterval;
+ });
+ }
+}
+
+RtpSenderEgress::~RtpSenderEgress() {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ update_task_.Stop();
+}
void RtpSenderEgress::SendPacket(RtpPacketToSend* packet,
const PacedPacketInfo& pacing_info) {
+ RTC_DCHECK_RUN_ON(&pacer_checker_);
RTC_DCHECK(packet);
- const uint32_t packet_ssrc = packet->Ssrc();
RTC_DCHECK(packet->packet_type().has_value());
RTC_DCHECK(HasCorrectSsrc(*packet));
- int64_t now_ms = clock_->TimeInMilliseconds();
- if (is_audio_) {
-#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
- BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioTotBitrate_kbps", now_ms,
- GetSendRates().Sum().kbps(), packet_ssrc);
- BWE_TEST_LOGGING_PLOT_WITH_SSRC(
- 1, "AudioNackBitrate_kbps", now_ms,
- GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(),
- packet_ssrc);
-#endif
- } else {
+ const uint32_t packet_ssrc = packet->Ssrc();
+ const int64_t now_ms = clock_->TimeInMilliseconds();
+
#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
- BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoTotBitrate_kbps", now_ms,
- GetSendRates().Sum().kbps(), packet_ssrc);
- BWE_TEST_LOGGING_PLOT_WITH_SSRC(
- 1, "VideoNackBitrate_kbps", now_ms,
- GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(),
- packet_ssrc);
+ worker_queue_->PostTask(
+ ToQueuedTask(task_safety_, [this, now_ms, packet_ssrc]() {
+ BweTestLoggingPlot(now_ms, packet_ssrc);
+ }));
#endif
+
+ if (need_rtp_packet_infos_ &&
+ packet->packet_type() == RtpPacketToSend::Type::kVideo) {
+ worker_queue_->PostTask(ToQueuedTask(
+ task_safety_,
+ [this, packet_timestamp = packet->Timestamp(),
+ is_first_packet_of_frame = packet->is_first_packet_of_frame(),
+ is_last_packet_of_frame = packet->Marker(),
+ sequence_number = packet->SequenceNumber()]() {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ // Last packet of a frame, add it to sequence number info map.
+ const uint32_t timestamp = packet_timestamp - timestamp_offset_;
+ rtp_sequence_number_map_->InsertPacket(
+ sequence_number,
+ RtpSequenceNumberMap::Info(timestamp, is_first_packet_of_frame,
+ is_last_packet_of_frame));
+ }));
}
- PacketOptions options;
- {
- rtc::CritScope lock(&lock_);
- options.included_in_allocation = force_part_of_allocation_;
+ if (fec_generator_ && packet->fec_protect_packet()) {
+ // Deferred fec generation is used, add packet to generator.
+ RTC_DCHECK(fec_generator_);
+ RTC_DCHECK(packet->packet_type() == RtpPacketMediaType::kVideo);
+ absl::optional<std::pair<FecProtectionParams, FecProtectionParams>>
+ new_fec_params;
+ {
+ MutexLock lock(&lock_);
+ new_fec_params.swap(pending_fec_params_);
+ }
+ if (new_fec_params) {
+ fec_generator_->SetProtectionParameters(new_fec_params->first,
+ new_fec_params->second);
+ }
+ if (packet->is_red()) {
+ RtpPacketToSend unpacked_packet(*packet);
+
+ const rtc::CopyOnWriteBuffer buffer = packet->Buffer();
+ // Grab media payload type from RED header.
+ const size_t headers_size = packet->headers_size();
+ unpacked_packet.SetPayloadType(buffer[headers_size]);
- if (need_rtp_packet_infos_ &&
- packet->packet_type() == RtpPacketToSend::Type::kVideo) {
- RTC_DCHECK(rtp_sequence_number_map_);
- // Last packet of a frame, add it to sequence number info map.
- const uint32_t timestamp = packet->Timestamp() - timestamp_offset_;
- bool is_first_packet_of_frame = packet->is_first_packet_of_frame();
- bool is_last_packet_of_frame = packet->Marker();
-
- rtp_sequence_number_map_->InsertPacket(
- packet->SequenceNumber(),
- RtpSequenceNumberMap::Info(timestamp, is_first_packet_of_frame,
- is_last_packet_of_frame));
+ // Copy the media payload into the unpacked buffer.
+ uint8_t* payload_buffer =
+ unpacked_packet.SetPayloadSize(packet->payload_size() - 1);
+ std::copy(&packet->payload()[0] + 1,
+ &packet->payload()[0] + packet->payload_size(), payload_buffer);
+
+ fec_generator_->AddPacketAndGenerateFec(unpacked_packet);
+ } else {
+ // If not RED encapsulated - we can just insert packet directly.
+ fec_generator_->AddPacketAndGenerateFec(*packet);
}
}
@@ -163,6 +234,12 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet,
const bool is_media = packet->packet_type() == RtpPacketMediaType::kAudio ||
packet->packet_type() == RtpPacketMediaType::kVideo;
+ PacketOptions options;
+ {
+ MutexLock lock(&lock_);
+ options.included_in_allocation = force_part_of_allocation_;
+ }
+
// Downstream code actually uses this flag to distinguish between media and
// everything else.
options.is_retransmit = !is_media;
@@ -195,26 +272,35 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet,
}
if (send_success) {
- rtc::CritScope lock(&lock_);
- UpdateRtpStats(*packet);
+ // |media_has_been_sent_| is used by RTPSender to figure out if it can send
+ // padding in the absence of transport-cc or abs-send-time.
+ // In those cases media must be sent first to set a reference timestamp.
media_has_been_sent_ = true;
- }
-}
-
-void RtpSenderEgress::ProcessBitrateAndNotifyObservers() {
- if (!bitrate_callback_)
- return;
- rtc::CritScope lock(&lock_);
- RtpSendRates send_rates = GetSendRates();
- bitrate_callback_->Notify(
- send_rates.Sum().bps(),
- send_rates[RtpPacketMediaType::kRetransmission].bps(), ssrc_);
+ // TODO(sprang): Add support for FEC protecting all header extensions, add
+ // media packet to generator here instead.
+
+ RTC_DCHECK(packet->packet_type().has_value());
+ RtpPacketMediaType packet_type = *packet->packet_type();
+ RtpPacketCounter counter(*packet);
+ size_t size = packet->size();
+ worker_queue_->PostTask(
+ ToQueuedTask(task_safety_, [this, now_ms, packet_ssrc, packet_type,
+ counter = std::move(counter), size]() {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ UpdateRtpStats(now_ms, packet_ssrc, packet_type, std::move(counter),
+ size);
+ }));
+ }
}
RtpSendRates RtpSenderEgress::GetSendRates() const {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
const int64_t now_ms = clock_->TimeInMilliseconds();
+ return GetSendRatesLocked(now_ms);
+}
+
+RtpSendRates RtpSenderEgress::GetSendRatesLocked(int64_t now_ms) const {
RtpSendRates current_rates;
for (size_t i = 0; i < kNumMediaTypes; ++i) {
RtpPacketMediaType type = static_cast<RtpPacketMediaType>(i);
@@ -226,34 +312,37 @@ RtpSendRates RtpSenderEgress::GetSendRates() const {
void RtpSenderEgress::GetDataCounters(StreamDataCounters* rtp_stats,
StreamDataCounters* rtx_stats) const {
- rtc::CritScope lock(&lock_);
+ // TODO(bugs.webrtc.org/11581): make sure rtx_rtp_stats_ and rtp_stats_ are
+ // only touched on the worker thread.
+ MutexLock lock(&lock_);
*rtp_stats = rtp_stats_;
*rtx_stats = rtx_rtp_stats_;
}
void RtpSenderEgress::ForceIncludeSendPacketsInAllocation(
bool part_of_allocation) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
force_part_of_allocation_ = part_of_allocation;
}
bool RtpSenderEgress::MediaHasBeenSent() const {
- rtc::CritScope lock(&lock_);
+ RTC_DCHECK_RUN_ON(&pacer_checker_);
return media_has_been_sent_;
}
void RtpSenderEgress::SetMediaHasBeenSent(bool media_sent) {
- rtc::CritScope lock(&lock_);
+ RTC_DCHECK_RUN_ON(&pacer_checker_);
media_has_been_sent_ = media_sent;
}
void RtpSenderEgress::SetTimestampOffset(uint32_t timestamp) {
- rtc::CritScope lock(&lock_);
+ RTC_DCHECK_RUN_ON(worker_queue_);
timestamp_offset_ = timestamp;
}
std::vector<RtpSequenceNumberMap::Info> RtpSenderEgress::GetSentRtpPacketInfos(
rtc::ArrayView<const uint16_t> sequence_numbers) const {
+ RTC_DCHECK_RUN_ON(worker_queue_);
RTC_DCHECK(!sequence_numbers.empty());
if (!need_rtp_packet_infos_) {
return std::vector<RtpSequenceNumberMap::Info>();
@@ -262,7 +351,6 @@ std::vector<RtpSequenceNumberMap::Info> RtpSenderEgress::GetSentRtpPacketInfos(
std::vector<RtpSequenceNumberMap::Info> results;
results.reserve(sequence_numbers.size());
- rtc::CritScope cs(&lock_);
for (uint16_t sequence_number : sequence_numbers) {
const auto& info = rtp_sequence_number_map_->Get(sequence_number);
if (!info) {
@@ -276,6 +364,24 @@ std::vector<RtpSequenceNumberMap::Info> RtpSenderEgress::GetSentRtpPacketInfos(
return results;
}
+void RtpSenderEgress::SetFecProtectionParameters(
+ const FecProtectionParams& delta_params,
+ const FecProtectionParams& key_params) {
+ // TODO(sprang): Post task to pacer queue instead, one pacer is fully
+ // migrated to a task queue.
+ MutexLock lock(&lock_);
+ pending_fec_params_.emplace(delta_params, key_params);
+}
+
+std::vector<std::unique_ptr<RtpPacketToSend>>
+RtpSenderEgress::FetchFecPackets() {
+ RTC_DCHECK_RUN_ON(&pacer_checker_);
+ if (fec_generator_) {
+ return fec_generator_->GetFecPackets();
+ }
+ return {};
+}
+
bool RtpSenderEgress::HasCorrectSsrc(const RtpPacketToSend& packet) const {
switch (*packet.packet_type()) {
case RtpPacketMediaType::kAudio:
@@ -324,7 +430,7 @@ void RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms,
int max_delay_ms = 0;
uint64_t total_packet_send_delay_ms = 0;
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
// Compute the max and average of the recent capture-to-send delays.
// The time complexity of the current approach depends on the distribution
// of the delay values. This could be done more efficiently.
@@ -430,32 +536,82 @@ bool RtpSenderEgress::SendPacketToNetwork(const RtpPacketToSend& packet,
return true;
}
-void RtpSenderEgress::UpdateRtpStats(const RtpPacketToSend& packet) {
- int64_t now_ms = clock_->TimeInMilliseconds();
+void RtpSenderEgress::UpdateRtpStats(int64_t now_ms,
+ uint32_t packet_ssrc,
+ RtpPacketMediaType packet_type,
+ RtpPacketCounter counter,
+ size_t packet_size) {
+ RTC_DCHECK_RUN_ON(worker_queue_);
- StreamDataCounters* counters =
- packet.Ssrc() == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_;
+ // TODO(bugs.webrtc.org/11581): send_rates_ should be touched only on the
+ // worker thread.
+ RtpSendRates send_rates;
+ {
+ MutexLock lock(&lock_);
- if (counters->first_packet_time_ms == -1) {
- counters->first_packet_time_ms = now_ms;
- }
+ // TODO(bugs.webrtc.org/11581): make sure rtx_rtp_stats_ and rtp_stats_ are
+ // only touched on the worker thread.
+ StreamDataCounters* counters =
+ packet_ssrc == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_;
- if (packet.packet_type() == RtpPacketMediaType::kForwardErrorCorrection) {
- counters->fec.AddPacket(packet);
+ if (counters->first_packet_time_ms == -1) {
+ counters->first_packet_time_ms = now_ms;
+ }
+
+ if (packet_type == RtpPacketMediaType::kForwardErrorCorrection) {
+ counters->fec.Add(counter);
+ } else if (packet_type == RtpPacketMediaType::kRetransmission) {
+ counters->retransmitted.Add(counter);
+ }
+ counters->transmitted.Add(counter);
+
+ send_rates_[static_cast<size_t>(packet_type)].Update(packet_size, now_ms);
+ if (bitrate_callback_) {
+ send_rates = GetSendRatesLocked(now_ms);
+ }
+
+ if (rtp_stats_callback_) {
+ rtp_stats_callback_->DataCountersUpdated(*counters, packet_ssrc);
+ }
}
- if (packet.packet_type() == RtpPacketMediaType::kRetransmission) {
- counters->retransmitted.AddPacket(packet);
+ // The bitrate_callback_ and rtp_stats_callback_ pointers in practice point
+ // to the same object, so these callbacks could be consolidated into one.
+ if (bitrate_callback_) {
+ bitrate_callback_->Notify(
+ send_rates.Sum().bps(),
+ send_rates[RtpPacketMediaType::kRetransmission].bps(), ssrc_);
}
- counters->transmitted.AddPacket(packet);
+}
+
+void RtpSenderEgress::PeriodicUpdate() {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK(bitrate_callback_);
+ RtpSendRates send_rates = GetSendRates();
+ bitrate_callback_->Notify(
+ send_rates.Sum().bps(),
+ send_rates[RtpPacketMediaType::kRetransmission].bps(), ssrc_);
+}
- RTC_DCHECK(packet.packet_type().has_value());
- send_rates_[static_cast<size_t>(*packet.packet_type())].Update(packet.size(),
- now_ms);
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+void RtpSenderEgress::BweTestLoggingPlot(int64_t now_ms, uint32_t packet_ssrc) {
+ RTC_DCHECK_RUN_ON(worker_queue_);
- if (rtp_stats_callback_) {
- rtp_stats_callback_->DataCountersUpdated(*counters, packet.Ssrc());
+ const auto rates = GetSendRates();
+ if (is_audio_) {
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioTotBitrate_kbps", now_ms,
+ rates.Sum().kbps(), packet_ssrc);
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(
+ 1, "AudioNackBitrate_kbps", now_ms,
+ rates[RtpPacketMediaType::kRetransmission].kbps(), packet_ssrc);
+ } else {
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoTotBitrate_kbps", now_ms,
+ rates.Sum().kbps(), packet_ssrc);
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(
+ 1, "VideoNackBitrate_kbps", now_ms,
+ rates[RtpPacketMediaType::kRetransmission].kbps(), packet_ssrc);
}
}
+#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_sender_egress.h b/modules/rtp_rtcp/source/rtp_sender_egress.h
index 298f57eff0..8e36425f29 100644
--- a/modules/rtp_rtcp/source/rtp_sender_egress.h
+++ b/modules/rtp_rtcp/source/rtp_sender_egress.h
@@ -13,19 +13,25 @@
#include <map>
#include <memory>
+#include <utility>
#include <vector>
#include "absl/types/optional.h"
#include "api/call/transport.h"
#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/task_queue/task_queue_base.h"
#include "api/units/data_rate.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_packet_history.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -36,35 +42,40 @@ class RtpSenderEgress {
// without passing through an actual paced sender.
class NonPacedPacketSender : public RtpPacketSender {
public:
- explicit NonPacedPacketSender(RtpSenderEgress* sender);
+ NonPacedPacketSender(RtpSenderEgress* sender,
+ SequenceNumberAssigner* sequence_number_assigner);
virtual ~NonPacedPacketSender();
void EnqueuePackets(
std::vector<std::unique_ptr<RtpPacketToSend>> packets) override;
private:
+ void PrepareForSend(RtpPacketToSend* packet);
uint16_t transport_sequence_number_;
RtpSenderEgress* const sender_;
+ SequenceNumberAssigner* sequence_number_assigner_;
};
- RtpSenderEgress(const RtpRtcp::Configuration& config,
+ RtpSenderEgress(const RtpRtcpInterface::Configuration& config,
RtpPacketHistory* packet_history);
- ~RtpSenderEgress() = default;
+ ~RtpSenderEgress();
- void SendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info);
+ void SendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info)
+ RTC_LOCKS_EXCLUDED(lock_);
uint32_t Ssrc() const { return ssrc_; }
absl::optional<uint32_t> RtxSsrc() const { return rtx_ssrc_; }
absl::optional<uint32_t> FlexFecSsrc() const { return flexfec_ssrc_; }
- void ProcessBitrateAndNotifyObservers();
- RtpSendRates GetSendRates() const;
+ RtpSendRates GetSendRates() const RTC_LOCKS_EXCLUDED(lock_);
void GetDataCounters(StreamDataCounters* rtp_stats,
- StreamDataCounters* rtx_stats) const;
+ StreamDataCounters* rtx_stats) const
+ RTC_LOCKS_EXCLUDED(lock_);
- void ForceIncludeSendPacketsInAllocation(bool part_of_allocation);
- bool MediaHasBeenSent() const;
- void SetMediaHasBeenSent(bool media_sent);
- void SetTimestampOffset(uint32_t timestamp);
+ void ForceIncludeSendPacketsInAllocation(bool part_of_allocation)
+ RTC_LOCKS_EXCLUDED(lock_);
+ bool MediaHasBeenSent() const RTC_LOCKS_EXCLUDED(lock_);
+ void SetMediaHasBeenSent(bool media_sent) RTC_LOCKS_EXCLUDED(lock_);
+ void SetTimestampOffset(uint32_t timestamp) RTC_LOCKS_EXCLUDED(lock_);
// For each sequence number in |sequence_number|, recall the last RTP packet
// which bore it - its timestamp and whether it was the first and/or last
@@ -72,7 +83,12 @@ class RtpSenderEgress {
// recalled, return a vector with all of them (in corresponding order).
// If any could not be recalled, return an empty vector.
std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
- rtc::ArrayView<const uint16_t> sequence_numbers) const;
+ rtc::ArrayView<const uint16_t> sequence_numbers) const
+ RTC_LOCKS_EXCLUDED(lock_);
+
+ void SetFecProtectionParameters(const FecProtectionParams& delta_params,
+ const FecProtectionParams& key_params);
+ std::vector<std::unique_ptr<RtpPacketToSend>> FetchFecPackets();
private:
// Maps capture time in milliseconds to send-side delay in milliseconds.
@@ -80,6 +96,8 @@ class RtpSenderEgress {
// time.
typedef std::map<int64_t, int> SendDelayMap;
+ RtpSendRates GetSendRatesLocked(int64_t now_ms) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
bool HasCorrectSsrc(const RtpPacketToSend& packet) const;
void AddPacketToTransportFeedback(uint16_t packet_id,
const RtpPacketToSend& packet,
@@ -95,9 +113,21 @@ class RtpSenderEgress {
bool SendPacketToNetwork(const RtpPacketToSend& packet,
const PacketOptions& options,
const PacedPacketInfo& pacing_info);
- void UpdateRtpStats(const RtpPacketToSend& packet)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+ void UpdateRtpStats(int64_t now_ms,
+ uint32_t packet_ssrc,
+ RtpPacketMediaType packet_type,
+ RtpPacketCounter counter,
+ size_t packet_size);
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+ void BweTestLoggingPlot(int64_t now_ms, uint32_t packet_ssrc);
+#endif
+
+ // Called on a timer, once a second, on the worker_queue_.
+ void PeriodicUpdate();
+
+ TaskQueueBase* const worker_queue_;
+ SequenceChecker pacer_checker_;
const uint32_t ssrc_;
const absl::optional<uint32_t> rtx_ssrc_;
const absl::optional<uint32_t> flexfec_ssrc_;
@@ -107,8 +137,11 @@ class RtpSenderEgress {
RtpPacketHistory* const packet_history_;
Transport* const transport_;
RtcEventLog* const event_log_;
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
const bool is_audio_;
+#endif
const bool need_rtp_packet_infos_;
+ VideoFecGenerator* const fec_generator_ RTC_GUARDED_BY(pacer_checker_);
TransportFeedbackObserver* const transport_feedback_observer_;
SendSideDelayObserver* const send_side_delay_observer_;
@@ -116,10 +149,10 @@ class RtpSenderEgress {
StreamDataCountersCallback* const rtp_stats_callback_;
BitrateStatisticsObserver* const bitrate_callback_;
- rtc::CriticalSection lock_;
- bool media_has_been_sent_ RTC_GUARDED_BY(lock_);
+ mutable Mutex lock_;
+ bool media_has_been_sent_ RTC_GUARDED_BY(pacer_checker_);
bool force_part_of_allocation_ RTC_GUARDED_BY(lock_);
- uint32_t timestamp_offset_ RTC_GUARDED_BY(lock_);
+ uint32_t timestamp_offset_ RTC_GUARDED_BY(worker_queue_);
SendDelayMap send_delays_ RTC_GUARDED_BY(lock_);
SendDelayMap::const_iterator max_delay_it_ RTC_GUARDED_BY(lock_);
@@ -130,13 +163,17 @@ class RtpSenderEgress {
StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(lock_);
// One element per value in RtpPacketMediaType, with index matching value.
std::vector<RateStatistics> send_rates_ RTC_GUARDED_BY(lock_);
+ absl::optional<std::pair<FecProtectionParams, FecProtectionParams>>
+ pending_fec_params_ RTC_GUARDED_BY(lock_);
// Maps sent packets' sequence numbers to a tuple consisting of:
// 1. The timestamp, without the randomizing offset mandated by the RFC.
// 2. Whether the packet was the first in its frame.
// 3. Whether the packet was the last in its frame.
const std::unique_ptr<RtpSequenceNumberMap> rtp_sequence_number_map_
- RTC_GUARDED_BY(lock_);
+ RTC_GUARDED_BY(worker_queue_);
+ RepeatingTaskHandle update_task_ RTC_GUARDED_BY(worker_queue_);
+ ScopedTaskSafety task_safety_;
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index 474810a88a..c19fbe8863 100644
--- a/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -32,14 +32,17 @@
#include "modules/rtp_rtcp/source/rtp_sender_egress.h"
#include "modules/rtp_rtcp/source/rtp_sender_video.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "modules/rtp_rtcp/source/video_fec_generator.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/rate_limiter.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "test/field_trial.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/mock_transport.h"
#include "test/rtp_header_parser.h"
+#include "test/time_controller/simulated_time_controller.h"
namespace webrtc {
@@ -140,8 +143,10 @@ MATCHER_P(SameRtcEventTypeAs, value, "") {
}
struct TestConfig {
- explicit TestConfig(bool with_overhead) : with_overhead(with_overhead) {}
+ TestConfig(bool with_overhead, bool deferred_fec)
+ : with_overhead(with_overhead), deferred_fec(deferred_fec) {}
bool with_overhead = false;
+ bool deferred_fec = false;
};
class MockRtpPacketPacer : public RtpPacketSender {
@@ -149,33 +154,32 @@ class MockRtpPacketPacer : public RtpPacketSender {
MockRtpPacketPacer() {}
virtual ~MockRtpPacketPacer() {}
- MOCK_METHOD1(EnqueuePackets,
- void(std::vector<std::unique_ptr<RtpPacketToSend>>));
-
- MOCK_METHOD2(CreateProbeCluster, void(int bitrate_bps, int cluster_id));
-
- MOCK_METHOD0(Pause, void());
- MOCK_METHOD0(Resume, void());
- MOCK_METHOD1(SetCongestionWindow,
- void(absl::optional<int64_t> congestion_window_bytes));
- MOCK_METHOD1(UpdateOutstandingData, void(int64_t outstanding_bytes));
- MOCK_METHOD1(SetAccountForAudioPackets, void(bool account_for_audio));
+ MOCK_METHOD(void,
+ EnqueuePackets,
+ (std::vector<std::unique_ptr<RtpPacketToSend>>),
+ (override));
};
class MockSendSideDelayObserver : public SendSideDelayObserver {
public:
- MOCK_METHOD4(SendSideDelayUpdated, void(int, int, uint64_t, uint32_t));
+ MOCK_METHOD(void,
+ SendSideDelayUpdated,
+ (int, int, uint64_t, uint32_t),
+ (override));
};
class MockSendPacketObserver : public SendPacketObserver {
public:
- MOCK_METHOD3(OnSendPacket, void(uint16_t, int64_t, uint32_t));
+ MOCK_METHOD(void, OnSendPacket, (uint16_t, int64_t, uint32_t), (override));
};
class MockTransportFeedbackObserver : public TransportFeedbackObserver {
public:
- MOCK_METHOD1(OnAddPacket, void(const RtpPacketSendInfo&));
- MOCK_METHOD1(OnTransportFeedback, void(const rtcp::TransportFeedback&));
+ MOCK_METHOD(void, OnAddPacket, (const RtpPacketSendInfo&), (override));
+ MOCK_METHOD(void,
+ OnTransportFeedback,
+ (const rtcp::TransportFeedback&),
+ (override));
};
class StreamDataTestCallback : public StreamDataCountersCallback {
@@ -209,30 +213,82 @@ class StreamDataTestCallback : public StreamDataCountersCallback {
}
};
+class TaskQueuePacketSender : public RtpPacketSender {
+ public:
+ TaskQueuePacketSender(TimeController* time_controller,
+ std::unique_ptr<RtpPacketSender> packet_sender)
+ : time_controller_(time_controller),
+ packet_sender_(std::move(packet_sender)),
+ queue_(time_controller_->CreateTaskQueueFactory()->CreateTaskQueue(
+ "PacerQueue",
+ TaskQueueFactory::Priority::NORMAL)) {}
+
+ void EnqueuePackets(
+ std::vector<std::unique_ptr<RtpPacketToSend>> packets) override {
+ queue_->PostTask(ToQueuedTask([sender = packet_sender_.get(),
+ packets_ = std::move(packets)]() mutable {
+ sender->EnqueuePackets(std::move(packets_));
+ }));
+ // Trigger task we just enqueued to be executed by updating the simulated
+ // time controller.
+ time_controller_->AdvanceTime(TimeDelta::Zero());
+ }
+
+ TaskQueueBase* task_queue() const { return queue_.get(); }
+
+ TimeController* const time_controller_;
+ std::unique_ptr<RtpPacketSender> packet_sender_;
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> queue_;
+};
+
// Mimics ModuleRtpRtcp::RtpSenderContext.
// TODO(sprang): Split up unit tests and test these components individually
// wherever possible.
-struct RtpSenderContext {
- explicit RtpSenderContext(const RtpRtcp::Configuration& config)
- : packet_history_(config.clock, config.enable_rtx_padding_prioritization),
+struct RtpSenderContext : public SequenceNumberAssigner {
+ RtpSenderContext(const RtpRtcpInterface::Configuration& config,
+ TimeController* time_controller)
+ : time_controller_(time_controller),
+ packet_history_(config.clock, config.enable_rtx_padding_prioritization),
packet_sender_(config, &packet_history_),
- non_paced_sender_(&packet_sender_),
- packet_generator_(
- config,
- &packet_history_,
- config.paced_sender ? config.paced_sender : &non_paced_sender_) {}
+ pacer_(time_controller,
+ std::make_unique<RtpSenderEgress::NonPacedPacketSender>(
+ &packet_sender_,
+ this)),
+ packet_generator_(config,
+ &packet_history_,
+ config.paced_sender ? config.paced_sender : &pacer_) {
+ }
+ void AssignSequenceNumber(RtpPacketToSend* packet) override {
+ packet_generator_.AssignSequenceNumber(packet);
+ }
+ // Inject packet straight into RtpSenderEgress without passing through the
+ // pacer, but while still running on the pacer task queue.
+ void InjectPacket(std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& packet_info) {
+ pacer_.task_queue()->PostTask(
+ ToQueuedTask([sender_ = &packet_sender_, packet_ = std::move(packet),
+ packet_info]() mutable {
+ sender_->SendPacket(packet_.get(), packet_info);
+ }));
+ time_controller_->AdvanceTime(TimeDelta::Zero());
+ }
+ TimeController* time_controller_;
RtpPacketHistory packet_history_;
RtpSenderEgress packet_sender_;
- RtpSenderEgress::NonPacedPacketSender non_paced_sender_;
+ TaskQueuePacketSender pacer_;
RTPSender packet_generator_;
};
class FieldTrialConfig : public WebRtcKeyValueConfig {
public:
- FieldTrialConfig() : overhead_enabled_(false), max_padding_factor_(1200) {}
+ FieldTrialConfig()
+ : overhead_enabled_(false),
+ deferred_fec_(false),
+ max_padding_factor_(1200) {}
~FieldTrialConfig() override {}
void SetOverHeadEnabled(bool enabled) { overhead_enabled_ = enabled; }
+ void UseDeferredFec(bool enabled) { deferred_fec_ = enabled; }
void SetMaxPaddingFactor(double factor) { max_padding_factor_ = factor; }
std::string Lookup(absl::string_view key) const override {
@@ -243,12 +299,15 @@ class FieldTrialConfig : public WebRtcKeyValueConfig {
return ssb.str();
} else if (key == "WebRTC-SendSideBwe-WithOverhead") {
return overhead_enabled_ ? "Enabled" : "Disabled";
+ } else if (key == "WebRTC-DeferredFecGeneration") {
+ return deferred_fec_ ? "Enabled" : "Disabled";
}
return "";
}
private:
bool overhead_enabled_;
+ bool deferred_fec_;
double max_padding_factor_;
};
@@ -257,8 +316,9 @@ class FieldTrialConfig : public WebRtcKeyValueConfig {
class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
protected:
RtpSenderTest()
- : fake_clock_(kStartTime),
- retransmission_rate_limiter_(&fake_clock_, 1000),
+ : time_controller_(Timestamp::Millis(kStartTime)),
+ clock_(time_controller_.GetClock()),
+ retransmission_rate_limiter_(clock_, 1000),
flexfec_sender_(0,
kFlexFecSsrc,
kSsrc,
@@ -266,9 +326,10 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
std::vector<RtpExtension>(),
std::vector<RtpExtensionSize>(),
nullptr,
- &fake_clock_),
+ clock_),
kMarkerBit(true) {
field_trials_.SetOverHeadEnabled(GetParam().with_overhead);
+ field_trials_.UseDeferredFec(GetParam().deferred_fec);
}
void SetUp() override { SetUpRtpSender(true, false, false); }
@@ -286,12 +347,20 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
void SetUpRtpSender(bool pacer,
bool populate_network2,
bool always_send_mid_and_rid) {
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ SetUpRtpSender(pacer, populate_network2, always_send_mid_and_rid,
+ &flexfec_sender_);
+ }
+
+ void SetUpRtpSender(bool pacer,
+ bool populate_network2,
+ bool always_send_mid_and_rid,
+ VideoFecGenerator* fec_generator) {
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.rtx_send_ssrc = kRtxSsrc;
- config.fec_generator = &flexfec_sender_;
+ config.fec_generator = fec_generator;
config.event_log = &mock_rtc_event_log_;
config.send_packet_observer = &send_packet_observer_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
@@ -301,12 +370,14 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
config.always_send_mid_and_rid = always_send_mid_and_rid;
config.field_trials = &field_trials_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
rtp_sender()->SetSequenceNumber(kSeqNum);
rtp_sender()->SetTimestampOffset(0);
}
- SimulatedClock fake_clock_;
+ GlobalSimulatedTimeController time_controller_;
+ Clock* const clock_;
NiceMock<MockRtcEventLog> mock_rtc_event_log_;
MockRtpPacketPacer mock_paced_sender_;
StrictMock<MockSendPacketObserver> send_packet_observer_;
@@ -350,7 +421,7 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
}
std::unique_ptr<RtpPacketToSend> SendGenericPacket() {
- const int64_t kCaptureTimeMs = fake_clock_.TimeInMilliseconds();
+ const int64_t kCaptureTimeMs = clock_->TimeInMilliseconds();
return SendPacket(kCaptureTimeMs, sizeof(kPayloadData));
}
@@ -359,7 +430,7 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
for (auto& packet :
rtp_sender()->GeneratePadding(target_size_bytes, true)) {
generated_bytes += packet->payload_size() + packet->padding_size();
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
}
return generated_bytes;
}
@@ -482,15 +553,16 @@ TEST_P(RtpSenderTestWithoutPacer, AssignSequenceNumberMayAllowPaddingOnVideo) {
TEST_P(RtpSenderTest, AssignSequenceNumberAllowsPaddingOnAudio) {
MockTransport transport;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.audio = true;
- config.clock = &fake_clock_;
+ config.clock = clock_;
config.outgoing_transport = &transport;
config.paced_sender = &mock_paced_sender_;
config.local_media_ssrc = kSsrc;
config.event_log = &mock_rtc_event_log_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
rtp_sender()->SetTimestampOffset(0);
@@ -532,15 +604,16 @@ TEST_P(RtpSenderTestWithoutPacer,
TransportFeedbackObserverGetsCorrectByteCount) {
constexpr size_t kRtpOverheadBytesPerPacket = 12 + 8;
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.transport_feedback_callback = &feedback_observer_;
config.event_log = &mock_rtc_event_log_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
config.field_trials = &field_trials_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension(
kRtpExtensionTransportSequenceNumber,
@@ -567,15 +640,16 @@ TEST_P(RtpSenderTestWithoutPacer,
}
TEST_P(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) {
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.transport_feedback_callback = &feedback_observer_;
config.event_log = &mock_rtc_event_log_;
config.send_packet_observer = &send_packet_observer_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension(
kRtpExtensionTransportSequenceNumber,
@@ -606,15 +680,16 @@ TEST_P(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) {
}
TEST_P(RtpSenderTestWithoutPacer, PacketOptionsNoRetransmission) {
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.transport_feedback_callback = &feedback_observer_;
config.event_log = &mock_rtc_event_log_;
config.send_packet_observer = &send_packet_observer_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
SendGenericPacket();
@@ -661,17 +736,18 @@ TEST_P(RtpSenderTestWithoutPacer, DoesnSetIncludedInAllocationByDefault) {
TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
StrictMock<MockSendSideDelayObserver> send_side_delay_observer_;
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.send_side_delay_observer = &send_side_delay_observer_;
config.event_log = &mock_rtc_event_log_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
- video_config.clock = &fake_clock_;
+ video_config.clock = clock_;
video_config.rtp_sender = rtp_sender();
video_config.field_trials = &field_trials;
RTPSenderVideo rtp_sender_video(video_config);
@@ -688,12 +764,12 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
EXPECT_CALL(send_side_delay_observer_,
SendSideDelayUpdated(10, 10, 10, kSsrc))
.Times(1);
- int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
- fake_clock_.AdvanceTimeMilliseconds(10);
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
+ time_controller_.AdvanceTime(TimeDelta::Millis(10));
video_header.frame_type = VideoFrameType::kVideoFrameKey;
EXPECT_TRUE(rtp_sender_video.SendVideo(
kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp,
- capture_time_ms, kPayloadData, nullptr, video_header,
+ capture_time_ms, kPayloadData, video_header,
kDefaultExpectedRetransmissionTimeMs));
// Send another packet with 20 ms delay. The average, max and total should be
@@ -701,11 +777,11 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
EXPECT_CALL(send_side_delay_observer_,
SendSideDelayUpdated(15, 20, 30, kSsrc))
.Times(1);
- fake_clock_.AdvanceTimeMilliseconds(10);
+ time_controller_.AdvanceTime(TimeDelta::Millis(10));
video_header.frame_type = VideoFrameType::kVideoFrameKey;
EXPECT_TRUE(rtp_sender_video.SendVideo(
kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp,
- capture_time_ms, kPayloadData, nullptr, video_header,
+ capture_time_ms, kPayloadData, video_header,
kDefaultExpectedRetransmissionTimeMs));
// Send another packet at the same time, which replaces the last packet.
@@ -714,25 +790,25 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
// TODO(terelius): Is is not clear that this is the right behavior.
EXPECT_CALL(send_side_delay_observer_, SendSideDelayUpdated(5, 10, 30, kSsrc))
.Times(1);
- capture_time_ms = fake_clock_.TimeInMilliseconds();
+ capture_time_ms = clock_->TimeInMilliseconds();
video_header.frame_type = VideoFrameType::kVideoFrameKey;
EXPECT_TRUE(rtp_sender_video.SendVideo(
kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp,
- capture_time_ms, kPayloadData, nullptr, video_header,
+ capture_time_ms, kPayloadData, video_header,
kDefaultExpectedRetransmissionTimeMs));
// Send a packet 1 second later. The earlier packets should have timed
// out, so both max and average should be the delay of this packet. The total
// keeps increasing.
- fake_clock_.AdvanceTimeMilliseconds(1000);
- capture_time_ms = fake_clock_.TimeInMilliseconds();
- fake_clock_.AdvanceTimeMilliseconds(1);
+ time_controller_.AdvanceTime(TimeDelta::Millis(1000));
+ capture_time_ms = clock_->TimeInMilliseconds();
+ time_controller_.AdvanceTime(TimeDelta::Millis(1));
EXPECT_CALL(send_side_delay_observer_, SendSideDelayUpdated(1, 1, 31, kSsrc))
.Times(1);
video_header.frame_type = VideoFrameType::kVideoFrameKey;
EXPECT_TRUE(rtp_sender_video.SendVideo(
kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp,
- capture_time_ms, kPayloadData, nullptr, video_header,
+ capture_time_ms, kPayloadData, video_header,
kDefaultExpectedRetransmissionTimeMs));
}
@@ -748,8 +824,8 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendPacketUpdated) {
}
TEST_P(RtpSenderTest, SendsPacketsWithTransportSequenceNumber) {
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.paced_sender = &mock_paced_sender_;
config.local_media_ssrc = kSsrc;
@@ -757,7 +833,8 @@ TEST_P(RtpSenderTest, SendsPacketsWithTransportSequenceNumber) {
config.event_log = &mock_rtc_event_log_;
config.send_packet_observer = &send_packet_observer_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
rtp_sender()->SetSequenceNumber(kSeqNum);
rtp_sender_context_->packet_history_.SetStorePacketsStatus(
@@ -788,7 +865,7 @@ TEST_P(RtpSenderTest, SendsPacketsWithTransportSequenceNumber) {
packet->set_packet_type(RtpPacketMediaType::kVideo);
// Transport sequence number is set by PacketRouter, before SendPacket().
packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
uint16_t transport_seq_no;
EXPECT_TRUE(
@@ -803,7 +880,7 @@ TEST_P(RtpSenderTest, WritesPacerExitToTimingExtension) {
RtpPacketHistory::StorageMode::kStoreAndCull, 10);
EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension(
kRtpExtensionVideoTiming, kVideoTimingExtensionId));
- int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
auto packet = rtp_sender()->AllocatePacket();
packet->SetPayloadType(kPayload);
packet->SetMarker(true);
@@ -821,8 +898,8 @@ TEST_P(RtpSenderTest, WritesPacerExitToTimingExtension) {
&RtpPacketToSend::Ssrc, kSsrc)))));
EXPECT_TRUE(
rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
- fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(1, transport_.packets_sent());
EXPECT_EQ(packet_size, transport_.last_sent_packet().size());
@@ -838,7 +915,7 @@ TEST_P(RtpSenderTest, WritesNetwork2ToTimingExtensionWithPacer) {
RtpPacketHistory::StorageMode::kStoreAndCull, 10);
EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension(
kRtpExtensionVideoTiming, kVideoTimingExtensionId));
- int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
auto packet = rtp_sender()->AllocatePacket();
packet->SetPayloadType(kPayload);
packet->SetMarker(true);
@@ -858,8 +935,8 @@ TEST_P(RtpSenderTest, WritesNetwork2ToTimingExtensionWithPacer) {
&RtpPacketToSend::Ssrc, kSsrc)))));
EXPECT_TRUE(
rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
- fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(1, transport_.packets_sent());
EXPECT_EQ(packet_size, transport_.last_sent_packet().size());
@@ -877,7 +954,7 @@ TEST_P(RtpSenderTest, WritesNetwork2ToTimingExtensionWithoutPacer) {
kRtpExtensionVideoTiming, kVideoTimingExtensionId));
auto packet = rtp_sender()->AllocatePacket();
packet->SetMarker(true);
- packet->set_capture_time_ms(fake_clock_.TimeInMilliseconds());
+ packet->set_capture_time_ms(clock_->TimeInMilliseconds());
const VideoSendTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true};
packet->SetExtension<VideoTimingExtension>(kVideoTiming);
packet->set_allow_retransmission(true);
@@ -885,7 +962,7 @@ TEST_P(RtpSenderTest, WritesNetwork2ToTimingExtensionWithoutPacer) {
packet->set_packet_type(RtpPacketMediaType::kVideo);
const int kPropagateTimeMs = 10;
- fake_clock_.AdvanceTimeMilliseconds(kPropagateTimeMs);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kPropagateTimeMs));
EXPECT_TRUE(rtp_sender()->SendToNetwork(std::move(packet)));
@@ -908,7 +985,7 @@ TEST_P(RtpSenderTest, TrafficSmoothingWithExtensions) {
EXPECT_EQ(0,
rtp_sender()->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
- int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
auto packet =
BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, capture_time_ms);
size_t packet_size = packet->size();
@@ -924,8 +1001,8 @@ TEST_P(RtpSenderTest, TrafficSmoothingWithExtensions) {
EXPECT_TRUE(
rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
EXPECT_EQ(0, transport_.packets_sent());
- fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
// Process send bucket. Packet should now be sent.
EXPECT_EQ(1, transport_.packets_sent());
@@ -937,7 +1014,7 @@ TEST_P(RtpSenderTest, TrafficSmoothingWithExtensions) {
// Verify transmission time offset.
EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset);
uint64_t expected_send_time =
- ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds());
+ ConvertMsToAbsSendTime(clock_->TimeInMilliseconds());
EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
}
@@ -953,7 +1030,7 @@ TEST_P(RtpSenderTest, TrafficSmoothingRetransmits) {
EXPECT_EQ(0,
rtp_sender()->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
- int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
auto packet =
BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, capture_time_ms);
size_t packet_size = packet->size();
@@ -969,13 +1046,14 @@ TEST_P(RtpSenderTest, TrafficSmoothingRetransmits) {
EXPECT_TRUE(
rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
// Immediately process send bucket and send packet.
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::make_unique<RtpPacketToSend>(*packet),
+ PacedPacketInfo());
EXPECT_EQ(1, transport_.packets_sent());
// Retransmit packet.
const int kStoredTimeInMs = 100;
- fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
EXPECT_CALL(mock_rtc_event_log_,
LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing)));
@@ -988,7 +1066,7 @@ TEST_P(RtpSenderTest, TrafficSmoothingRetransmits) {
Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum))))));
EXPECT_EQ(static_cast<int>(packet_size), rtp_sender()->ReSendPacket(kSeqNum));
EXPECT_EQ(1, transport_.packets_sent());
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
// Process send bucket. Packet should now be sent.
EXPECT_EQ(2, transport_.packets_sent());
@@ -1000,7 +1078,7 @@ TEST_P(RtpSenderTest, TrafficSmoothingRetransmits) {
// Verify transmission time offset.
EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset);
uint64_t expected_send_time =
- ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds());
+ ConvertMsToAbsSendTime(clock_->TimeInMilliseconds());
EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
}
@@ -1029,7 +1107,7 @@ TEST_P(RtpSenderTest, SendPadding) {
webrtc::RTPHeader rtp_header;
- int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
auto packet =
BuildRtpPacket(kPayload, kMarkerBit, timestamp, capture_time_ms);
const uint32_t media_packet_timestamp = timestamp;
@@ -1048,8 +1126,8 @@ TEST_P(RtpSenderTest, SendPadding) {
EXPECT_TRUE(
rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
EXPECT_EQ(total_packets_sent, transport_.packets_sent());
- fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
++seq_num;
// Packet should now be sent. This test doesn't verify the regular video
@@ -1081,14 +1159,14 @@ TEST_P(RtpSenderTest, SendPadding) {
int offset = timestamp - media_packet_timestamp;
EXPECT_EQ(offset, rtp_header.extension.transmissionTimeOffset);
uint64_t expected_send_time =
- ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds());
+ ConvertMsToAbsSendTime(clock_->TimeInMilliseconds());
EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
- fake_clock_.AdvanceTimeMilliseconds(kPaddingPeriodMs);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kPaddingPeriodMs));
timestamp += 90 * kPaddingPeriodMs;
}
// Send a regular video packet again.
- capture_time_ms = fake_clock_.TimeInMilliseconds();
+ capture_time_ms = clock_->TimeInMilliseconds();
packet = BuildRtpPacket(kPayload, kMarkerBit, timestamp, capture_time_ms);
packet_size = packet->size();
@@ -1101,7 +1179,7 @@ TEST_P(RtpSenderTest, SendPadding) {
Pointee(Property(&RtpPacketToSend::SequenceNumber, seq_num))))));
EXPECT_TRUE(
rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
// Process send bucket.
EXPECT_EQ(++total_packets_sent, transport_.packets_sent());
@@ -1114,7 +1192,7 @@ TEST_P(RtpSenderTest, SendPadding) {
// Verify transmission time offset. This packet is sent without delay.
EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset);
uint64_t expected_send_time =
- ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds());
+ ConvertMsToAbsSendTime(clock_->TimeInMilliseconds());
EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
}
@@ -1137,7 +1215,7 @@ TEST_P(RtpSenderTest, OnSendPacketUpdated) {
auto packet = SendGenericPacket();
packet->set_packet_type(RtpPacketMediaType::kVideo);
packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(1, transport_.packets_sent());
}
@@ -1159,7 +1237,7 @@ TEST_P(RtpSenderTest, OnSendPacketNotUpdatedForRetransmits) {
auto packet = SendGenericPacket();
packet->set_packet_type(RtpPacketMediaType::kRetransmission);
packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(1, transport_.packets_sent());
EXPECT_TRUE(transport_.last_options_.is_retransmit);
@@ -1170,7 +1248,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendGenericVideo) {
const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
- video_config.clock = &fake_clock_;
+ video_config.clock = clock_;
video_config.rtp_sender = rtp_sender();
video_config.field_trials = &field_trials;
RTPSenderVideo rtp_sender_video(video_config);
@@ -1180,7 +1258,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendGenericVideo) {
RTPVideoHeader video_header;
video_header.frame_type = VideoFrameType::kVideoFrameKey;
ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321,
- payload, nullptr, video_header,
+ payload, video_header,
kDefaultExpectedRetransmissionTimeMs));
auto sent_payload = transport_.last_sent_packet().payload();
@@ -1196,7 +1274,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendGenericVideo) {
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321,
- payload, nullptr, video_header,
+ payload, video_header,
kDefaultExpectedRetransmissionTimeMs));
sent_payload = transport_.last_sent_packet().payload();
@@ -1212,7 +1290,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendRawVideo) {
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
- video_config.clock = &fake_clock_;
+ video_config.clock = clock_;
video_config.rtp_sender = rtp_sender();
video_config.field_trials = &field_trials;
RTPSenderVideo rtp_sender_video(video_config);
@@ -1221,7 +1299,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendRawVideo) {
RTPVideoHeader video_header;
video_header.frame_type = VideoFrameType::kVideoFrameKey;
ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, absl::nullopt, 1234,
- 4321, payload, nullptr, video_header,
+ 4321, payload, video_header,
kDefaultExpectedRetransmissionTimeMs));
auto sent_payload = transport_.last_sent_packet().payload();
@@ -1237,11 +1315,11 @@ TEST_P(RtpSenderTest, SendFlexfecPackets) {
const std::vector<RtpExtensionSize> kNoRtpExtensionSizes;
FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexFecSsrc, kSsrc, kNoMid,
kNoRtpExtensions, kNoRtpExtensionSizes,
- nullptr /* rtp_state */, &fake_clock_);
+ nullptr /* rtp_state */, clock_);
// Reset |rtp_sender_| to use FlexFEC.
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.paced_sender = &mock_paced_sender_;
config.local_media_ssrc = kSsrc;
@@ -1249,7 +1327,9 @@ TEST_P(RtpSenderTest, SendFlexfecPackets) {
config.event_log = &mock_rtc_event_log_;
config.send_packet_observer = &send_packet_observer_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ config.field_trials = &field_trials_;
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
rtp_sender()->SetSequenceNumber(kSeqNum);
rtp_sender_context_->packet_history_.SetStorePacketsStatus(
@@ -1257,9 +1337,13 @@ TEST_P(RtpSenderTest, SendFlexfecPackets) {
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
- video_config.clock = &fake_clock_;
+ video_config.clock = clock_;
video_config.rtp_sender = rtp_sender();
- video_config.fec_generator = &flexfec_sender;
+ if (!GetParam().deferred_fec) {
+ video_config.fec_generator = &flexfec_sender;
+ }
+ video_config.fec_type = flexfec_sender.GetFecType();
+ video_config.fec_overhead_bytes = flexfec_sender.MaxPacketOverhead();
video_config.fec_type = flexfec_sender.GetFecType();
video_config.fec_overhead_bytes = flexfec_sender.MaxPacketOverhead();
video_config.field_trials = &field_trials;
@@ -1275,46 +1359,55 @@ TEST_P(RtpSenderTest, SendFlexfecPackets) {
uint16_t flexfec_seq_num;
RTPVideoHeader video_header;
- std::unique_ptr<RtpPacketToSend> media_packet;
- std::unique_ptr<RtpPacketToSend> fec_packet;
-
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
- .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
- for (auto& packet : packets) {
- if (packet->packet_type() == RtpPacketMediaType::kVideo) {
- EXPECT_EQ(packet->Ssrc(), kSsrc);
- EXPECT_EQ(packet->SequenceNumber(), kSeqNum);
- media_packet = std::move(packet);
- } else {
- EXPECT_EQ(packet->packet_type(),
+ std::unique_ptr<RtpPacketToSend> media_packet;
+ std::unique_ptr<RtpPacketToSend> fec_packet;
+
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ for (auto& packet : packets) {
+ if (packet->packet_type() == RtpPacketMediaType::kVideo) {
+ EXPECT_EQ(packet->Ssrc(), kSsrc);
+ EXPECT_EQ(packet->SequenceNumber(), kSeqNum);
+ media_packet = std::move(packet);
+ if (GetParam().deferred_fec) {
+ // Simulate RtpSenderEgress adding packet to fec generator.
+ flexfec_sender.AddPacketAndGenerateFec(*media_packet);
+ auto fec_packets = flexfec_sender.GetFecPackets();
+ EXPECT_EQ(fec_packets.size(), 1u);
+ fec_packet = std::move(fec_packets[0]);
+ EXPECT_EQ(fec_packet->packet_type(),
RtpPacketMediaType::kForwardErrorCorrection);
- EXPECT_EQ(packet->Ssrc(), kFlexFecSsrc);
- fec_packet = std::move(packet);
+ EXPECT_EQ(fec_packet->Ssrc(), kFlexFecSsrc);
}
+ } else {
+ EXPECT_EQ(packet->packet_type(),
+ RtpPacketMediaType::kForwardErrorCorrection);
+ fec_packet = std::move(packet);
+ EXPECT_EQ(fec_packet->Ssrc(), kFlexFecSsrc);
}
- });
+ }
+ });
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video.SendVideo(
- kMediaPayloadType, kCodecType, kTimestamp,
- fake_clock_.TimeInMilliseconds(), kPayloadData, nullptr, video_header,
- kDefaultExpectedRetransmissionTimeMs));
- ASSERT_TRUE(media_packet != nullptr);
- ASSERT_TRUE(fec_packet != nullptr);
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ EXPECT_TRUE(rtp_sender_video.SendVideo(
+ kMediaPayloadType, kCodecType, kTimestamp, clock_->TimeInMilliseconds(),
+ kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs));
+ ASSERT_TRUE(media_packet != nullptr);
+ ASSERT_TRUE(fec_packet != nullptr);
- flexfec_seq_num = fec_packet->SequenceNumber();
- rtp_egress()->SendPacket(media_packet.get(), PacedPacketInfo());
- rtp_egress()->SendPacket(fec_packet.get(), PacedPacketInfo());
+ flexfec_seq_num = fec_packet->SequenceNumber();
+ rtp_sender_context_->InjectPacket(std::move(media_packet), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(fec_packet), PacedPacketInfo());
- ASSERT_EQ(2, transport_.packets_sent());
- const RtpPacketReceived& sent_media_packet = transport_.sent_packets_[0];
- EXPECT_EQ(kMediaPayloadType, sent_media_packet.PayloadType());
- EXPECT_EQ(kSeqNum, sent_media_packet.SequenceNumber());
- EXPECT_EQ(kSsrc, sent_media_packet.Ssrc());
- const RtpPacketReceived& sent_flexfec_packet = transport_.sent_packets_[1];
- EXPECT_EQ(kFlexfecPayloadType, sent_flexfec_packet.PayloadType());
- EXPECT_EQ(flexfec_seq_num, sent_flexfec_packet.SequenceNumber());
- EXPECT_EQ(kFlexFecSsrc, sent_flexfec_packet.Ssrc());
+ ASSERT_EQ(2, transport_.packets_sent());
+ const RtpPacketReceived& sent_media_packet = transport_.sent_packets_[0];
+ EXPECT_EQ(kMediaPayloadType, sent_media_packet.PayloadType());
+ EXPECT_EQ(kSeqNum, sent_media_packet.SequenceNumber());
+ EXPECT_EQ(kSsrc, sent_media_packet.Ssrc());
+ const RtpPacketReceived& sent_flexfec_packet = transport_.sent_packets_[1];
+ EXPECT_EQ(kFlexfecPayloadType, sent_flexfec_packet.PayloadType());
+ EXPECT_EQ(flexfec_seq_num, sent_flexfec_packet.SequenceNumber());
+ EXPECT_EQ(kFlexFecSsrc, sent_flexfec_packet.Ssrc());
}
TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) {
@@ -1326,26 +1419,30 @@ TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) {
const std::vector<RtpExtensionSize> kNoRtpExtensionSizes;
FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexFecSsrc, kSsrc, kNoMid,
kNoRtpExtensions, kNoRtpExtensionSizes,
- nullptr /* rtp_state */, &fake_clock_);
+ nullptr /* rtp_state */, clock_);
// Reset |rtp_sender_| to use FlexFEC.
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.fec_generator = &flexfec_sender;
config.event_log = &mock_rtc_event_log_;
config.send_packet_observer = &send_packet_observer_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ config.field_trials = &field_trials_;
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
rtp_sender()->SetSequenceNumber(kSeqNum);
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
- video_config.clock = &fake_clock_;
+ video_config.clock = clock_;
video_config.rtp_sender = rtp_sender();
- video_config.fec_generator = &flexfec_sender;
+ if (!GetParam().deferred_fec) {
+ video_config.fec_generator = &flexfec_sender;
+ }
video_config.fec_type = flexfec_sender.GetFecType();
video_config.fec_overhead_bytes = flexfec_sender_.MaxPacketOverhead();
video_config.field_trials = &field_trials;
@@ -1356,7 +1453,11 @@ TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) {
params.fec_rate = 15;
params.max_fec_frames = 1;
params.fec_mask_type = kFecMaskRandom;
- flexfec_sender.SetProtectionParameters(params, params);
+ if (GetParam().deferred_fec) {
+ rtp_egress()->SetFecProtectionParameters(params, params);
+ } else {
+ flexfec_sender.SetProtectionParameters(params, params);
+ }
EXPECT_CALL(mock_rtc_event_log_,
LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing)))
@@ -1364,9 +1465,8 @@ TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) {
RTPVideoHeader video_header;
video_header.frame_type = VideoFrameType::kVideoFrameKey;
EXPECT_TRUE(rtp_sender_video.SendVideo(
- kMediaPayloadType, kCodecType, kTimestamp,
- fake_clock_.TimeInMilliseconds(), kPayloadData, nullptr, video_header,
- kDefaultExpectedRetransmissionTimeMs));
+ kMediaPayloadType, kCodecType, kTimestamp, clock_->TimeInMilliseconds(),
+ kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs));
ASSERT_EQ(2, transport_.packets_sent());
const RtpPacketReceived& media_packet = transport_.sent_packets_[0];
@@ -1659,27 +1759,18 @@ TEST_P(RtpSenderTest, FecOverheadRate) {
const std::vector<RtpExtensionSize> kNoRtpExtensionSizes;
FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexFecSsrc, kSsrc, kNoMid,
kNoRtpExtensions, kNoRtpExtensionSizes,
- nullptr /* rtp_state */, &fake_clock_);
+ nullptr /* rtp_state */, clock_);
- // Reset |rtp_sender_| to use FlexFEC.
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
- config.outgoing_transport = &transport_;
- config.paced_sender = &mock_paced_sender_;
- config.local_media_ssrc = kSsrc;
- config.fec_generator = &flexfec_sender;
- config.event_log = &mock_rtc_event_log_;
- config.send_packet_observer = &send_packet_observer_;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
-
- rtp_sender()->SetSequenceNumber(kSeqNum);
+ // Reset |rtp_sender_| to use this FlexFEC instance.
+ SetUpRtpSender(false, false, false, &flexfec_sender);
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
- video_config.clock = &fake_clock_;
+ video_config.clock = clock_;
video_config.rtp_sender = rtp_sender();
- video_config.fec_generator = &flexfec_sender;
+ if (!GetParam().deferred_fec) {
+ video_config.fec_generator = &flexfec_sender;
+ }
video_config.fec_type = flexfec_sender.GetFecType();
video_config.fec_overhead_bytes = flexfec_sender.MaxPacketOverhead();
video_config.field_trials = &field_trials;
@@ -1689,22 +1780,24 @@ TEST_P(RtpSenderTest, FecOverheadRate) {
params.fec_rate = 15;
params.max_fec_frames = 1;
params.fec_mask_type = kFecMaskRandom;
- flexfec_sender.SetProtectionParameters(params, params);
+ if (GetParam().deferred_fec) {
+ rtp_egress()->SetFecProtectionParameters(params, params);
+ } else {
+ flexfec_sender.SetProtectionParameters(params, params);
+ }
constexpr size_t kNumMediaPackets = 10;
constexpr size_t kNumFecPackets = kNumMediaPackets;
constexpr int64_t kTimeBetweenPacketsMs = 10;
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets).Times(kNumMediaPackets);
for (size_t i = 0; i < kNumMediaPackets; ++i) {
RTPVideoHeader video_header;
video_header.frame_type = VideoFrameType::kVideoFrameKey;
EXPECT_TRUE(rtp_sender_video.SendVideo(
- kMediaPayloadType, kCodecType, kTimestamp,
- fake_clock_.TimeInMilliseconds(), kPayloadData, nullptr, video_header,
- kDefaultExpectedRetransmissionTimeMs));
+ kMediaPayloadType, kCodecType, kTimestamp, clock_->TimeInMilliseconds(),
+ kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs));
- fake_clock_.AdvanceTimeMilliseconds(kTimeBetweenPacketsMs);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kTimeBetweenPacketsMs));
}
constexpr size_t kRtpHeaderLength = 12;
constexpr size_t kFlexfecHeaderLength = 20;
@@ -1712,9 +1805,20 @@ TEST_P(RtpSenderTest, FecOverheadRate) {
constexpr size_t kPayloadLength = sizeof(kPayloadData);
constexpr size_t kPacketLength = kRtpHeaderLength + kFlexfecHeaderLength +
kGenericCodecHeaderLength + kPayloadLength;
- EXPECT_NEAR(kNumFecPackets * kPacketLength * 8 /
- (kNumFecPackets * kTimeBetweenPacketsMs / 1000.0f),
- flexfec_sender.CurrentFecRate().bps<double>(), 500);
+
+ if (GetParam().deferred_fec) {
+ EXPECT_NEAR(
+ kNumFecPackets * kPacketLength * 8 /
+ (kNumFecPackets * kTimeBetweenPacketsMs / 1000.0f),
+ rtp_egress()
+ ->GetSendRates()[RtpPacketMediaType::kForwardErrorCorrection]
+ .bps<double>(),
+ 500);
+ } else {
+ EXPECT_NEAR(kNumFecPackets * kPacketLength * 8 /
+ (kNumFecPackets * kTimeBetweenPacketsMs / 1000.0f),
+ flexfec_sender.CurrentFecRate().bps<double>(), 500);
+ }
}
TEST_P(RtpSenderTest, BitrateCallbacks) {
@@ -1743,17 +1847,18 @@ TEST_P(RtpSenderTest, BitrateCallbacks) {
uint32_t retransmit_bitrate_;
} callback;
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.send_bitrate_observer = &callback;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
- video_config.clock = &fake_clock_;
+ video_config.clock = clock_;
video_config.rtp_sender = rtp_sender();
video_config.field_trials = &field_trials;
RTPSenderVideo rtp_sender_video(video_config);
@@ -1775,28 +1880,23 @@ TEST_P(RtpSenderTest, BitrateCallbacks) {
RtpPacketHistory::StorageMode::kStoreAndCull, 1);
uint32_t ssrc = rtp_sender()->SSRC();
- // Initial process call so we get a new time window.
- rtp_egress()->ProcessBitrateAndNotifyObservers();
-
// Send a few frames.
RTPVideoHeader video_header;
for (uint32_t i = 0; i < kNumPackets; ++i) {
video_header.frame_type = VideoFrameType::kVideoFrameKey;
ASSERT_TRUE(rtp_sender_video.SendVideo(
- kPayloadType, kCodecType, 1234, 4321, payload, nullptr, video_header,
+ kPayloadType, kCodecType, 1234, 4321, payload, video_header,
kDefaultExpectedRetransmissionTimeMs));
- fake_clock_.AdvanceTimeMilliseconds(kPacketInterval);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kPacketInterval));
}
- rtp_egress()->ProcessBitrateAndNotifyObservers();
-
// We get one call for every stats updated, thus two calls since both the
// stream stats and the retransmit stats are updated once.
- EXPECT_EQ(2u, callback.num_calls_);
+ EXPECT_EQ(kNumPackets, callback.num_calls_);
EXPECT_EQ(ssrc, callback.ssrc_);
const uint32_t kTotalPacketSize = kPacketOverhead + sizeof(payload);
// Bitrate measured over delta between last and first timestamp, plus one.
- const uint32_t kExpectedWindowMs = kNumPackets * kPacketInterval + 1;
+ const uint32_t kExpectedWindowMs = (kNumPackets - 1) * kPacketInterval + 1;
const uint32_t kExpectedBitsAccumulated = kTotalPacketSize * kNumPackets * 8;
const uint32_t kExpectedRateBps =
(kExpectedBitsAccumulated * 1000 + (kExpectedWindowMs / 2)) /
@@ -1809,7 +1909,7 @@ TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) {
const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
- video_config.clock = &fake_clock_;
+ video_config.clock = clock_;
video_config.rtp_sender = rtp_sender();
video_config.field_trials = &field_trials;
RTPSenderVideo rtp_sender_video(video_config);
@@ -1822,7 +1922,7 @@ TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) {
RTPVideoHeader video_header;
video_header.frame_type = VideoFrameType::kVideoFrameKey;
ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321,
- payload, nullptr, video_header,
+ payload, video_header,
kDefaultExpectedRetransmissionTimeMs));
StreamDataCounters expected;
expected.transmitted.payload_bytes = 6;
@@ -1862,15 +1962,17 @@ TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacksUlpfec) {
const uint8_t kUlpfecPayloadType = 97;
const uint8_t kPayloadType = 127;
const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
- FieldTrialBasedConfig field_trials;
- UlpfecGenerator ulpfec_generator(kRedPayloadType, kUlpfecPayloadType,
- &fake_clock_);
+
+ UlpfecGenerator ulpfec_generator(kRedPayloadType, kUlpfecPayloadType, clock_);
+ SetUpRtpSender(false, false, false, &ulpfec_generator);
RTPSenderVideo::Config video_config;
- video_config.clock = &fake_clock_;
+ video_config.clock = clock_;
video_config.rtp_sender = rtp_sender();
- video_config.field_trials = &field_trials;
+ video_config.field_trials = &field_trials_;
video_config.red_payload_type = kRedPayloadType;
- video_config.fec_generator = &ulpfec_generator;
+ if (!GetParam().deferred_fec) {
+ video_config.fec_generator = &ulpfec_generator;
+ }
video_config.fec_type = ulpfec_generator.GetFecType();
video_config.fec_overhead_bytes = ulpfec_generator.MaxPacketOverhead();
RTPSenderVideo rtp_sender_video(video_config);
@@ -1887,10 +1989,14 @@ TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacksUlpfec) {
fec_params.fec_mask_type = kFecMaskRandom;
fec_params.fec_rate = 1;
fec_params.max_fec_frames = 1;
- ulpfec_generator.SetProtectionParameters(fec_params, fec_params);
+ if (GetParam().deferred_fec) {
+ rtp_egress()->SetFecProtectionParameters(fec_params, fec_params);
+ } else {
+ ulpfec_generator.SetProtectionParameters(fec_params, fec_params);
+ }
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321,
- payload, nullptr, video_header,
+ payload, video_header,
kDefaultExpectedRetransmissionTimeMs));
expected.transmitted.payload_bytes = 28;
expected.transmitted.header_bytes = 24;
@@ -1949,12 +2055,12 @@ TEST_P(RtpSenderTestWithoutPacer, RespectsNackBitrateLimit) {
std::vector<uint16_t> sequence_numbers;
for (int32_t i = 0; i < kNumPackets; ++i) {
sequence_numbers.push_back(kStartSequenceNumber + i);
- fake_clock_.AdvanceTimeMilliseconds(1);
- SendPacket(fake_clock_.TimeInMilliseconds(), kPacketSize);
+ time_controller_.AdvanceTime(TimeDelta::Millis(1));
+ SendPacket(clock_->TimeInMilliseconds(), kPacketSize);
}
EXPECT_EQ(kNumPackets, transport_.packets_sent());
- fake_clock_.AdvanceTimeMilliseconds(1000 - kNumPackets);
+ time_controller_.AdvanceTime(TimeDelta::Millis(1000 - kNumPackets));
// Resending should work - brings the bandwidth up to the limit.
// NACK bitrate is capped to the same bitrate as the encoder, since the max
@@ -1963,7 +2069,7 @@ TEST_P(RtpSenderTestWithoutPacer, RespectsNackBitrateLimit) {
EXPECT_EQ(kNumPackets * 2, transport_.packets_sent());
// Must be at least 5ms in between retransmission attempts.
- fake_clock_.AdvanceTimeMilliseconds(5);
+ time_controller_.AdvanceTime(TimeDelta::Millis(5));
// Resending should not work, bandwidth exceeded.
rtp_sender()->OnReceivedNack(sequence_numbers, 0);
@@ -1971,12 +2077,13 @@ TEST_P(RtpSenderTestWithoutPacer, RespectsNackBitrateLimit) {
}
TEST_P(RtpSenderTest, UpdatingCsrcsUpdatedOverhead) {
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
// Base RTP overhead is 12B.
EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
@@ -1987,12 +2094,13 @@ TEST_P(RtpSenderTest, UpdatingCsrcsUpdatedOverhead) {
}
TEST_P(RtpSenderTest, OnOverheadChanged) {
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
// Base RTP overhead is 12B.
EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
@@ -2006,12 +2114,13 @@ TEST_P(RtpSenderTest, OnOverheadChanged) {
}
TEST_P(RtpSenderTest, CountMidOnlyUntilAcked) {
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
// Base RTP overhead is 12B.
EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
@@ -2032,95 +2141,120 @@ TEST_P(RtpSenderTest, CountMidOnlyUntilAcked) {
EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
}
+TEST_P(RtpSenderTest, DontCountVolatileExtensionsIntoOverhead) {
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
+ config.outgoing_transport = &transport_;
+ config.local_media_ssrc = kSsrc;
+ config.retransmission_rate_limiter = &retransmission_rate_limiter_;
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
+
+ // Base RTP overhead is 12B.
+ EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
+
+ rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionInbandComfortNoise, 1);
+ rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteCaptureTime, 2);
+ rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionVideoRotation, 3);
+ rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionPlayoutDelay, 4);
+ rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionVideoContentType, 5);
+ rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionVideoTiming, 6);
+ rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionRepairedRtpStreamId, 7);
+ rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionColorSpace, 8);
+
+ // Still only 12B counted since can't count on above being sent.
+ EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
+}
+
TEST_P(RtpSenderTest, SendPacketMatchesVideo) {
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_packet_type(RtpPacketMediaType::kVideo);
// Verify sent with correct SSRC.
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetSsrc(kSsrc);
packet->set_packet_type(RtpPacketMediaType::kVideo);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(transport_.packets_sent(), 1);
}
TEST_P(RtpSenderTest, SendPacketMatchesAudio) {
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_packet_type(RtpPacketMediaType::kAudio);
// Verify sent with correct SSRC.
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetSsrc(kSsrc);
packet->set_packet_type(RtpPacketMediaType::kAudio);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(transport_.packets_sent(), 1);
}
TEST_P(RtpSenderTest, SendPacketMatchesRetransmissions) {
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_packet_type(RtpPacketMediaType::kRetransmission);
// Verify sent with correct SSRC (non-RTX).
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetSsrc(kSsrc);
packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(transport_.packets_sent(), 1);
// RTX retransmission.
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetSsrc(kRtxSsrc);
packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(transport_.packets_sent(), 2);
}
TEST_P(RtpSenderTest, SendPacketMatchesPadding) {
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_packet_type(RtpPacketMediaType::kPadding);
// Verify sent with correct SSRC (non-RTX).
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetSsrc(kSsrc);
packet->set_packet_type(RtpPacketMediaType::kPadding);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(transport_.packets_sent(), 1);
// RTX padding.
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetSsrc(kRtxSsrc);
packet->set_packet_type(RtpPacketMediaType::kPadding);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(transport_.packets_sent(), 2);
}
TEST_P(RtpSenderTest, SendPacketMatchesFlexfec) {
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
// Verify sent with correct SSRC.
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetSsrc(kFlexFecSsrc);
packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(transport_.packets_sent(), 1);
}
TEST_P(RtpSenderTest, SendPacketMatchesUlpfec) {
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
// Verify sent with correct SSRC.
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetSsrc(kSsrc);
packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(transport_.packets_sent(), 1);
}
@@ -2128,37 +2262,41 @@ TEST_P(RtpSenderTest, SendPacketHandlesRetransmissionHistory) {
rtp_sender_context_->packet_history_.SetStorePacketsStatus(
RtpPacketHistory::StorageMode::kStoreAndCull, 10);
+ // Ignore calls to EnqueuePackets() for this test.
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets).WillRepeatedly(Return());
+
// Build a media packet and send it.
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
const uint16_t media_sequence_number = packet->SequenceNumber();
packet->set_packet_type(RtpPacketMediaType::kVideo);
packet->set_allow_retransmission(true);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
// Simulate retransmission request.
- fake_clock_.AdvanceTimeMilliseconds(30);
+ time_controller_.AdvanceTime(TimeDelta::Millis(30));
EXPECT_GT(rtp_sender()->ReSendPacket(media_sequence_number), 0);
// Packet already pending, retransmission not allowed.
- fake_clock_.AdvanceTimeMilliseconds(30);
+ time_controller_.AdvanceTime(TimeDelta::Millis(30));
EXPECT_EQ(rtp_sender()->ReSendPacket(media_sequence_number), 0);
// Packet exiting pacer, mark as not longer pending.
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
EXPECT_NE(packet->SequenceNumber(), media_sequence_number);
packet->set_packet_type(RtpPacketMediaType::kRetransmission);
packet->SetSsrc(kRtxSsrc);
packet->set_retransmitted_sequence_number(media_sequence_number);
packet->set_allow_retransmission(false);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ uint16_t seq_no = packet->SequenceNumber();
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
// Retransmissions allowed again.
- fake_clock_.AdvanceTimeMilliseconds(30);
+ time_controller_.AdvanceTime(TimeDelta::Millis(30));
EXPECT_GT(rtp_sender()->ReSendPacket(media_sequence_number), 0);
// Retransmission of RTX packet should not be allowed.
- EXPECT_EQ(rtp_sender()->ReSendPacket(packet->SequenceNumber()), 0);
+ EXPECT_EQ(rtp_sender()->ReSendPacket(seq_no), 0);
}
TEST_P(RtpSenderTest, SendPacketUpdatesExtensions) {
@@ -2174,21 +2312,21 @@ TEST_P(RtpSenderTest, SendPacketUpdatesExtensions) {
0);
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
- packet->set_packetization_finish_time_ms(fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
+ packet->set_packetization_finish_time_ms(clock_->TimeInMilliseconds());
const int32_t kDiffMs = 10;
- fake_clock_.AdvanceTimeMilliseconds(kDiffMs);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs));
packet->set_packet_type(RtpPacketMediaType::kVideo);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
const RtpPacketReceived& received_packet = transport_.last_sent_packet();
EXPECT_EQ(received_packet.GetExtension<TransmissionOffset>(), kDiffMs * 90);
EXPECT_EQ(received_packet.GetExtension<AbsoluteSendTime>(),
- AbsoluteSendTime::MsTo24Bits(fake_clock_.TimeInMilliseconds()));
+ AbsoluteSendTime::MsTo24Bits(clock_->TimeInMilliseconds()));
VideoSendTiming timing;
EXPECT_TRUE(received_packet.GetExtension<VideoTimingExtension>(&timing));
@@ -2202,12 +2340,12 @@ TEST_P(RtpSenderTest, SendPacketSetsPacketOptions) {
kTransportSequenceNumberExtensionId),
0);
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetExtension<TransportSequenceNumber>(kPacketId);
packet->set_packet_type(RtpPacketMediaType::kVideo);
EXPECT_CALL(send_packet_observer_, OnSendPacket);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_EQ(transport_.last_options_.packet_id, kPacketId);
EXPECT_TRUE(transport_.last_options_.included_in_allocation);
@@ -2215,10 +2353,10 @@ TEST_P(RtpSenderTest, SendPacketSetsPacketOptions) {
EXPECT_FALSE(transport_.last_options_.is_retransmit);
// Send another packet as retransmission, verify options are populated.
- packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->SetExtension<TransportSequenceNumber>(kPacketId + 1);
packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
EXPECT_TRUE(transport_.last_options_.is_retransmit);
}
@@ -2227,8 +2365,8 @@ TEST_P(RtpSenderTest, SendPacketUpdatesStats) {
StrictMock<MockSendSideDelayObserver> send_side_delay_observer;
- RtpRtcp::Configuration config;
- config.clock = &fake_clock_;
+ RtpRtcpInterface::Configuration config;
+ config.clock = clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.rtx_send_ssrc = kRtxSsrc;
@@ -2236,35 +2374,36 @@ TEST_P(RtpSenderTest, SendPacketUpdatesStats) {
config.send_side_delay_observer = &send_side_delay_observer;
config.event_log = &mock_rtc_event_log_;
config.send_packet_observer = &send_packet_observer_;
- rtp_sender_context_ = std::make_unique<RtpSenderContext>(config);
+ rtp_sender_context_ =
+ std::make_unique<RtpSenderContext>(config, &time_controller_);
ASSERT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension(
kRtpExtensionTransportSequenceNumber,
kTransportSequenceNumberExtensionId));
- const int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
+ const int64_t capture_time_ms = clock_->TimeInMilliseconds();
std::unique_ptr<RtpPacketToSend> video_packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
video_packet->set_packet_type(RtpPacketMediaType::kVideo);
video_packet->SetPayloadSize(kPayloadSize);
video_packet->SetExtension<TransportSequenceNumber>(1);
std::unique_ptr<RtpPacketToSend> rtx_packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
rtx_packet->SetSsrc(kRtxSsrc);
rtx_packet->set_packet_type(RtpPacketMediaType::kRetransmission);
rtx_packet->SetPayloadSize(kPayloadSize);
rtx_packet->SetExtension<TransportSequenceNumber>(2);
std::unique_ptr<RtpPacketToSend> fec_packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
fec_packet->SetSsrc(kFlexFecSsrc);
fec_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
fec_packet->SetPayloadSize(kPayloadSize);
fec_packet->SetExtension<TransportSequenceNumber>(3);
const int64_t kDiffMs = 25;
- fake_clock_.AdvanceTimeMilliseconds(kDiffMs);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs));
EXPECT_CALL(send_side_delay_observer,
SendSideDelayUpdated(kDiffMs, kDiffMs, kDiffMs, kSsrc));
@@ -2274,15 +2413,15 @@ TEST_P(RtpSenderTest, SendPacketUpdatesStats) {
EXPECT_CALL(send_packet_observer_, OnSendPacket(1, capture_time_ms, kSsrc));
- rtp_egress()->SendPacket(video_packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(video_packet), PacedPacketInfo());
// Send packet observer not called for padding/retransmissions.
EXPECT_CALL(send_packet_observer_, OnSendPacket(2, _, _)).Times(0);
- rtp_egress()->SendPacket(rtx_packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(rtx_packet), PacedPacketInfo());
EXPECT_CALL(send_packet_observer_,
OnSendPacket(3, capture_time_ms, kFlexFecSsrc));
- rtp_egress()->SendPacket(fec_packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(fec_packet), PacedPacketInfo());
StreamDataCounters rtp_stats;
StreamDataCounters rtx_stats;
@@ -2314,12 +2453,12 @@ TEST_P(RtpSenderTest, GeneratedPaddingHasBweExtensions) {
// Send a payload packet first, to enable padding and populate the packet
// history.
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_allow_retransmission(true);
packet->SetPayloadSize(kMinPaddingSize);
packet->set_packet_type(RtpPacketMediaType::kVideo);
EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
// Generate a plain padding packet, check that extensions are registered.
std::vector<std::unique_ptr<RtpPacketToSend>> generated_packets =
@@ -2332,7 +2471,8 @@ TEST_P(RtpSenderTest, GeneratedPaddingHasBweExtensions) {
EXPECT_TRUE(plain_padding->HasExtension<TransmissionOffset>());
// Verify all header extensions have been written.
- rtp_egress()->SendPacket(plain_padding.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(plain_padding),
+ PacedPacketInfo());
const auto& sent_plain_padding = transport_.last_sent_packet();
EXPECT_TRUE(sent_plain_padding.HasExtension<TransportSequenceNumber>());
EXPECT_TRUE(sent_plain_padding.HasExtension<AbsoluteSendTime>());
@@ -2353,7 +2493,8 @@ TEST_P(RtpSenderTest, GeneratedPaddingHasBweExtensions) {
EXPECT_TRUE(payload_padding->HasExtension<TransmissionOffset>());
// Verify all header extensions have been written.
- rtp_egress()->SendPacket(payload_padding.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(payload_padding),
+ PacedPacketInfo());
const auto& sent_payload_padding = transport_.last_sent_packet();
EXPECT_TRUE(sent_payload_padding.HasExtension<TransportSequenceNumber>());
EXPECT_TRUE(sent_payload_padding.HasExtension<AbsoluteSendTime>());
@@ -2379,14 +2520,14 @@ TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) {
const size_t kPayloadPacketSize = kMinPaddingSize;
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_allow_retransmission(true);
packet->SetPayloadSize(kPayloadPacketSize);
packet->set_packet_type(RtpPacketMediaType::kVideo);
// Send a dummy video packet so it ends up in the packet history.
EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
// Generated padding has large enough budget that the video packet should be
// retransmitted as padding.
@@ -2434,12 +2575,12 @@ TEST_P(RtpSenderTest, LimitsPayloadPaddingSize) {
// Send a dummy video packet so it ends up in the packet history.
const size_t kPayloadPacketSize = 1234u;
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_allow_retransmission(true);
packet->SetPayloadSize(kPayloadPacketSize);
packet->set_packet_type(RtpPacketMediaType::kVideo);
EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
// Smallest target size that will result in the sent packet being returned as
// padding.
@@ -2478,12 +2619,12 @@ TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) {
// Send a dummy video packet so it ends up in the packet history. Since we
// are not using RTX, it should never be used as padding.
std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds());
+ BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_allow_retransmission(true);
packet->SetPayloadSize(kPayloadPacketSize);
packet->set_packet_type(RtpPacketMediaType::kVideo);
EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
// Payload padding not available without RTX, only generate plain padding on
// the media SSRC.
@@ -2508,7 +2649,7 @@ TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) {
EXPECT_TRUE(packet->HasExtension<TransmissionOffset>());
// Verify all header extensions are received.
- rtp_egress()->SendPacket(packet.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
webrtc::RTPHeader rtp_header;
transport_.last_sent_packet().GetHeader(&rtp_header);
EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime);
@@ -2572,54 +2713,55 @@ TEST_P(RtpSenderTest, SetsCaptureTimeAndPopulatesTransmissionOffset) {
const uint32_t kTimestampTicksPerMs = 90;
const int64_t kOffsetMs = 10;
- auto packet =
- BuildRtpPacket(kPayload, kMarkerBit, fake_clock_.TimeInMilliseconds(),
- kMissingCaptureTimeMs);
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- packet->ReserveExtension<TransmissionOffset>();
- packet->AllocatePayload(sizeof(kPayloadData));
-
- std::unique_ptr<RtpPacketToSend> packet_to_pace;
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
- .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
- EXPECT_EQ(packets.size(), 1u);
- EXPECT_GT(packets[0]->capture_time_ms(), 0);
- packet_to_pace = std::move(packets[0]);
- });
-
- packet->set_allow_retransmission(true);
- EXPECT_TRUE(rtp_sender()->SendToNetwork(std::move(packet)));
+ auto packet =
+ BuildRtpPacket(kPayload, kMarkerBit, clock_->TimeInMilliseconds(),
+ kMissingCaptureTimeMs);
+ packet->set_packet_type(RtpPacketMediaType::kVideo);
+ packet->ReserveExtension<TransmissionOffset>();
+ packet->AllocatePayload(sizeof(kPayloadData));
- fake_clock_.AdvanceTimeMilliseconds(kOffsetMs);
+ std::unique_ptr<RtpPacketToSend> packet_to_pace;
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ EXPECT_EQ(packets.size(), 1u);
+ EXPECT_GT(packets[0]->capture_time_ms(), 0);
+ packet_to_pace = std::move(packets[0]);
+ });
- rtp_egress()->SendPacket(packet_to_pace.get(), PacedPacketInfo());
+ packet->set_allow_retransmission(true);
+ EXPECT_TRUE(rtp_sender()->SendToNetwork(std::move(packet)));
- EXPECT_EQ(1, transport_.packets_sent());
- absl::optional<int32_t> transmission_time_extension =
- transport_.sent_packets_.back().GetExtension<TransmissionOffset>();
- ASSERT_TRUE(transmission_time_extension.has_value());
- EXPECT_EQ(*transmission_time_extension, kOffsetMs * kTimestampTicksPerMs);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kOffsetMs));
- // Retransmit packet. The RTX packet should get the same capture time as the
- // original packet, so offset is delta from original packet to now.
- fake_clock_.AdvanceTimeMilliseconds(kOffsetMs);
+ rtp_sender_context_->InjectPacket(std::move(packet_to_pace),
+ PacedPacketInfo());
- std::unique_ptr<RtpPacketToSend> rtx_packet_to_pace;
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
- .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
- EXPECT_GT(packets[0]->capture_time_ms(), 0);
- rtx_packet_to_pace = std::move(packets[0]);
- });
-
- EXPECT_GT(rtp_sender()->ReSendPacket(kSeqNum), 0);
- rtp_egress()->SendPacket(rtx_packet_to_pace.get(), PacedPacketInfo());
+ EXPECT_EQ(1, transport_.packets_sent());
+ absl::optional<int32_t> transmission_time_extension =
+ transport_.sent_packets_.back().GetExtension<TransmissionOffset>();
+ ASSERT_TRUE(transmission_time_extension.has_value());
+ EXPECT_EQ(*transmission_time_extension, kOffsetMs * kTimestampTicksPerMs);
+
+ // Retransmit packet. The RTX packet should get the same capture time as the
+ // original packet, so offset is delta from original packet to now.
+ time_controller_.AdvanceTime(TimeDelta::Millis(kOffsetMs));
+
+ std::unique_ptr<RtpPacketToSend> rtx_packet_to_pace;
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ EXPECT_GT(packets[0]->capture_time_ms(), 0);
+ rtx_packet_to_pace = std::move(packets[0]);
+ });
+
+ EXPECT_GT(rtp_sender()->ReSendPacket(kSeqNum), 0);
+ rtp_sender_context_->InjectPacket(std::move(rtx_packet_to_pace),
+ PacedPacketInfo());
- EXPECT_EQ(2, transport_.packets_sent());
- transmission_time_extension =
- transport_.sent_packets_.back().GetExtension<TransmissionOffset>();
- ASSERT_TRUE(transmission_time_extension.has_value());
- EXPECT_EQ(*transmission_time_extension,
- 2 * kOffsetMs * kTimestampTicksPerMs);
+ EXPECT_EQ(2, transport_.packets_sent());
+ transmission_time_extension =
+ transport_.sent_packets_.back().GetExtension<TransmissionOffset>();
+ ASSERT_TRUE(transmission_time_extension.has_value());
+ EXPECT_EQ(*transmission_time_extension, 2 * kOffsetMs * kTimestampTicksPerMs);
}
TEST_P(RtpSenderTestWithoutPacer, ClearHistoryOnSequenceNumberCange) {
@@ -2641,13 +2783,13 @@ TEST_P(RtpSenderTestWithoutPacer, ClearHistoryOnSequenceNumberCange) {
// Advance time and make sure it can be retransmitted, even if we try to set
// the ssrc the what it already is.
rtp_sender()->SetSequenceNumber(rtp_sender()->SequenceNumber());
- fake_clock_.AdvanceTimeMilliseconds(kRtt);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kRtt));
EXPECT_GT(rtp_sender()->ReSendPacket(packet_seqence_number), 0);
// Change the sequence number, then move the time and try to retransmit again.
// The old packet should now be gone.
rtp_sender()->SetSequenceNumber(rtp_sender()->SequenceNumber() - 1);
- fake_clock_.AdvanceTimeMilliseconds(kRtt);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kRtt));
EXPECT_EQ(rtp_sender()->ReSendPacket(packet_seqence_number), 0);
}
@@ -2669,24 +2811,29 @@ TEST_P(RtpSenderTest, IgnoresNackAfterDisablingMedia) {
});
SendGenericPacket();
- rtp_egress()->SendPacket(packet_to_pace.get(), PacedPacketInfo());
+ rtp_sender_context_->InjectPacket(std::move(packet_to_pace),
+ PacedPacketInfo());
ASSERT_EQ(1u, transport_.sent_packets_.size());
// Disable media sending and try to retransmit the packet, it should fail.
rtp_sender()->SetSendingMediaStatus(false);
- fake_clock_.AdvanceTimeMilliseconds(kRtt);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kRtt));
EXPECT_LT(rtp_sender()->ReSendPacket(kSeqNum), 0);
}
INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead,
RtpSenderTest,
- ::testing::Values(TestConfig{false},
- TestConfig{true}));
+ ::testing::Values(TestConfig{false, false},
+ TestConfig{false, true},
+ TestConfig{true, false},
+ TestConfig{false, false}));
INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead,
RtpSenderTestWithoutPacer,
- ::testing::Values(TestConfig{false},
- TestConfig{true}));
+ ::testing::Values(TestConfig{false, false},
+ TestConfig{false, true},
+ TestConfig{true, false},
+ TestConfig{false, false}));
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc
index b903b9f001..0f3e8b9966 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -144,7 +144,8 @@ RTPSenderVideo::RTPSenderVideo(const Config& config)
RTPSenderVideoFrameTransformerDelegate>(
this,
config.frame_transformer,
- rtp_sender_->SSRC())
+ rtp_sender_->SSRC(),
+ config.send_transport_queue)
: nullptr) {
if (frame_transformer_delegate_)
frame_transformer_delegate_->Init();
@@ -174,7 +175,7 @@ void RTPSenderVideo::LogAndSendToNetwork(
#endif
{
- rtc::CritScope cs(&stats_crit_);
+ MutexLock lock(&stats_mutex_);
size_t packetized_payload_size = 0;
for (const auto& packet : packets) {
if (*packet->packet_type() == RtpPacketMediaType::kVideo) {
@@ -182,7 +183,8 @@ void RTPSenderVideo::LogAndSendToNetwork(
packetized_payload_size += packet->payload_size();
}
}
- // AV1 packetizer may produce less packetized bytes than unpacketized.
+ // AV1 and H264 packetizers may produce less packetized bytes than
+ // unpacketized.
if (packetized_payload_size >= unpacketized_payload_size) {
packetization_overhead_bitrate_.Update(
packetized_payload_size - unpacketized_payload_size,
@@ -251,8 +253,6 @@ void RTPSenderVideo::SetVideoStructureUnderLock(
video_structure_ =
std::make_unique<FrameDependencyStructure>(*video_structure);
video_structure_->structure_id = structure_id;
- // TODO(bugs.webrtc.org/10342): Support chains.
- video_structure_->num_chains = 0;
}
void RTPSenderVideo::AddRtpHeaderExtensions(
@@ -313,14 +313,6 @@ void RTPSenderVideo::AddRtpHeaderExtensions(
packet->SetExtension<AbsoluteCaptureTimeExtension>(*absolute_capture_time);
}
- if (video_header.codec == kVideoCodecH264 &&
- video_header.frame_marking.temporal_id != kNoTemporalIdx) {
- FrameMarking frame_marking = video_header.frame_marking;
- frame_marking.start_of_frame = first_packet;
- frame_marking.end_of_frame = last_packet;
- packet->SetExtension<FrameMarkingExtension>(frame_marking);
- }
-
if (video_header.generic) {
bool extension_is_set = false;
if (video_structure_ != nullptr) {
@@ -336,12 +328,18 @@ void RTPSenderVideo::AddRtpHeaderExtensions(
descriptor.frame_dependencies.frame_diffs.push_back(
video_header.generic->frame_id - dep);
}
+ descriptor.frame_dependencies.chain_diffs =
+ video_header.generic->chain_diffs;
descriptor.frame_dependencies.decode_target_indications =
video_header.generic->decode_target_indications;
RTC_DCHECK_EQ(
descriptor.frame_dependencies.decode_target_indications.size(),
video_structure_->num_decode_targets);
+ if (first_packet) {
+ descriptor.active_decode_targets_bitmask =
+ active_decode_targets_tracker_.ActiveDecodeTargetsBitmask();
+ }
// To avoid extra structure copy, temporary share ownership of the
// video_structure with the dependency descriptor.
if (video_header.frame_type == VideoFrameType::kVideoFrameKey &&
@@ -350,7 +348,8 @@ void RTPSenderVideo::AddRtpHeaderExtensions(
absl::WrapUnique(video_structure_.get());
}
extension_is_set = packet->SetExtension<RtpDependencyDescriptorExtension>(
- *video_structure_, descriptor);
+ *video_structure_,
+ active_decode_targets_tracker_.ActiveChainsBitmask(), descriptor);
// Remove the temporary shared ownership.
descriptor.attached_structure.release();
@@ -394,7 +393,6 @@ bool RTPSenderVideo::SendVideo(
uint32_t rtp_timestamp,
int64_t capture_time_ms,
rtc::ArrayView<const uint8_t> payload,
- const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional<int64_t> expected_retransmission_time_ms) {
#if RTC_TRACE_EVENTS_ENABLED
@@ -422,6 +420,14 @@ bool RTPSenderVideo::SendVideo(
playout_delay_pending_ = true;
}
+ if (video_structure_ != nullptr && video_header.generic) {
+ active_decode_targets_tracker_.OnFrame(
+ video_structure_->decode_target_protected_by_chain,
+ video_header.generic->active_decode_targets,
+ video_header.frame_type == VideoFrameType::kVideoFrameKey,
+ video_header.generic->frame_id, video_header.generic->chain_diffs);
+ }
+
// Maximum size of packet including rtp headers.
// Extra space left in case packet will be resent using fec or rtx.
int packet_capacity = rtp_sender_->MaxRtpPacketSize() - FecPacketOverhead() -
@@ -522,8 +528,8 @@ bool RTPSenderVideo::SendVideo(
"one is required since require_frame_encryptor is set";
}
- std::unique_ptr<RtpPacketizer> packetizer = RtpPacketizer::Create(
- codec_type, payload, limits, video_header, fragmentation);
+ std::unique_ptr<RtpPacketizer> packetizer =
+ RtpPacketizer::Create(codec_type, payload, limits, video_header);
// TODO(bugs.webrtc.org/10714): retransmission_settings_ should generally be
// replaced by expected_retransmission_time_ms.has_value(). For now, though,
@@ -535,16 +541,6 @@ bool RTPSenderVideo::SendVideo(
: false;
const size_t num_packets = packetizer->NumPackets();
- size_t unpacketized_payload_size;
- if (fragmentation && fragmentation->fragmentationVectorSize > 0) {
- unpacketized_payload_size = 0;
- for (uint16_t i = 0; i < fragmentation->fragmentationVectorSize; ++i) {
- unpacketized_payload_size += fragmentation->fragmentationLength[i];
- }
- } else {
- unpacketized_payload_size = payload.size();
- }
-
if (num_packets == 0)
return false;
@@ -592,8 +588,8 @@ bool RTPSenderVideo::SendVideo(
if (fec_generator_) {
fec_generator_->AddPacketAndGenerateFec(*packet);
} else {
- // TODO(sprang): When deferred FEC generation is enabled, just mark the
- // packet as protected here.
+ // Deferred FEC generation, just mark packet.
+ packet->set_fec_protect_packet(true);
}
}
@@ -601,6 +597,7 @@ bool RTPSenderVideo::SendVideo(
std::unique_ptr<RtpPacketToSend> red_packet(new RtpPacketToSend(*packet));
BuildRedPayload(*packet, red_packet.get());
red_packet->SetPayloadType(*red_payload_type_);
+ red_packet->set_is_red(true);
// Send |red_packet| instead of |packet| for allocated sequence number.
red_packet->set_packet_type(RtpPacketMediaType::kVideo);
@@ -636,7 +633,7 @@ bool RTPSenderVideo::SendVideo(
}
}
- LogAndSendToNetwork(std::move(rtp_packets), unpacketized_payload_size);
+ LogAndSendToNetwork(std::move(rtp_packets), payload.size());
// Update details about the last sent frame.
last_rotation_ = video_header.rotation;
@@ -671,27 +668,26 @@ bool RTPSenderVideo::SendEncodedImage(
absl::optional<VideoCodecType> codec_type,
uint32_t rtp_timestamp,
const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional<int64_t> expected_retransmission_time_ms) {
if (frame_transformer_delegate_) {
// The frame will be sent async once transformed.
return frame_transformer_delegate_->TransformFrame(
- payload_type, codec_type, rtp_timestamp, encoded_image, fragmentation,
- video_header, expected_retransmission_time_ms);
+ payload_type, codec_type, rtp_timestamp, encoded_image, video_header,
+ expected_retransmission_time_ms);
}
return SendVideo(payload_type, codec_type, rtp_timestamp,
- encoded_image.capture_time_ms_, encoded_image, fragmentation,
- video_header, expected_retransmission_time_ms);
+ encoded_image.capture_time_ms_, encoded_image, video_header,
+ expected_retransmission_time_ms);
}
uint32_t RTPSenderVideo::VideoBitrateSent() const {
- rtc::CritScope cs(&stats_crit_);
+ MutexLock lock(&stats_mutex_);
return video_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0);
}
uint32_t RTPSenderVideo::PacketizationOverheadBps() const {
- rtc::CritScope cs(&stats_crit_);
+ MutexLock lock(&stats_mutex_);
return packetization_overhead_bitrate_.Rate(clock_->TimeInMilliseconds())
.value_or(0);
}
@@ -703,7 +699,7 @@ bool RTPSenderVideo::AllowRetransmission(
if (retransmission_settings == kRetransmitOff)
return false;
- rtc::CritScope cs(&stats_crit_);
+ MutexLock lock(&stats_mutex_);
// Media packet storage.
if ((retransmission_settings & kConditionallyRetransmitHigherLayers) &&
UpdateConditionalRetransmit(temporal_id,
@@ -735,12 +731,7 @@ uint8_t RTPSenderVideo::GetTemporalId(const RTPVideoHeader& header) {
}
uint8_t operator()(const absl::monostate&) { return kNoTemporalIdx; }
};
- switch (header.codec) {
- case kVideoCodecH264:
- return header.frame_marking.temporal_id;
- default:
- return absl::visit(TemporalIdGetter(), header.video_type_header);
- }
+ return absl::visit(TemporalIdGetter(), header.video_type_header);
}
bool RTPSenderVideo::UpdateConditionalRetransmit(
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.h b/modules/rtp_rtcp/source/rtp_sender_video.h
index bf5f181823..57f8fcc7ac 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -20,26 +20,29 @@
#include "api/array_view.h"
#include "api/frame_transformer_interface.h"
#include "api/scoped_refptr.h"
+#include "api/task_queue/task_queue_base.h"
#include "api/transport/rtp/dependency_descriptor.h"
#include "api/video/video_codec_type.h"
#include "api/video/video_frame_type.h"
-#include "modules/include/module_common_types.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/absolute_capture_time_sender.h"
+#include "modules/rtp_rtcp/source/active_decode_targets_helper.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
#include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
#include "modules/rtp_rtcp/source/video_fec_generator.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecation.h"
#include "rtc_base/one_time_event.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
+class RTPFragmentationHeader;
class FrameEncryptorInterface;
class RtpPacketizer;
class RtpPacketToSend;
@@ -81,12 +84,26 @@ class RTPSenderVideo {
absl::optional<int> red_payload_type;
const WebRtcKeyValueConfig* field_trials = nullptr;
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer;
+ TaskQueueBase* send_transport_queue = nullptr;
};
explicit RTPSenderVideo(const Config& config);
virtual ~RTPSenderVideo();
+ RTC_DEPRECATED
+ bool SendVideo(int payload_type,
+ absl::optional<VideoCodecType> codec_type,
+ uint32_t rtp_timestamp,
+ int64_t capture_time_ms,
+ rtc::ArrayView<const uint8_t> payload,
+ const RTPFragmentationHeader* /*fragmentation*/,
+ RTPVideoHeader video_header,
+ absl::optional<int64_t> expected_retransmission_time_ms) {
+ return SendVideo(payload_type, codec_type, rtp_timestamp, capture_time_ms,
+ payload, video_header, expected_retransmission_time_ms);
+ }
+
// expected_retransmission_time_ms.has_value() -> retransmission allowed.
// Calls to this method is assumed to be externally serialized.
bool SendVideo(int payload_type,
@@ -94,7 +111,6 @@ class RTPSenderVideo {
uint32_t rtp_timestamp,
int64_t capture_time_ms,
rtc::ArrayView<const uint8_t> payload,
- const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional<int64_t> expected_retransmission_time_ms);
@@ -103,7 +119,6 @@ class RTPSenderVideo {
absl::optional<VideoCodecType> codec_type,
uint32_t rtp_timestamp,
const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional<int64_t> expected_retransmission_time_ms);
@@ -159,7 +174,7 @@ class RTPSenderVideo {
bool UpdateConditionalRetransmit(uint8_t temporal_id,
int64_t expected_retransmission_time_ms)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(stats_crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(stats_mutex_);
void MaybeUpdateCurrentPlayoutDelay(const RTPVideoHeader& header)
RTC_EXCLUSIVE_LOCKS_REQUIRED(send_checker_);
@@ -185,20 +200,20 @@ class RTPSenderVideo {
bool playout_delay_pending_;
// Should never be held when calling out of this class.
- rtc::CriticalSection crit_;
+ Mutex mutex_;
const absl::optional<int> red_payload_type_;
VideoFecGenerator* const fec_generator_;
absl::optional<VideoFecGenerator::FecType> fec_type_;
const size_t fec_overhead_bytes_; // Per packet max FEC overhead.
- rtc::CriticalSection stats_crit_;
+ mutable Mutex stats_mutex_;
// Bitrate used for video payload and RTP headers.
- RateStatistics video_bitrate_ RTC_GUARDED_BY(stats_crit_);
- RateStatistics packetization_overhead_bitrate_ RTC_GUARDED_BY(stats_crit_);
+ RateStatistics video_bitrate_ RTC_GUARDED_BY(stats_mutex_);
+ RateStatistics packetization_overhead_bitrate_ RTC_GUARDED_BY(stats_mutex_);
std::map<int, TemporalLayerStats> frame_stats_by_temporal_layer_
- RTC_GUARDED_BY(stats_crit_);
+ RTC_GUARDED_BY(stats_mutex_);
OneTimeEvent first_frame_sent_;
@@ -212,6 +227,9 @@ class RTPSenderVideo {
const bool generic_descriptor_auth_experiment_;
AbsoluteCaptureTimeSender absolute_capture_time_sender_;
+ // Tracks updates to the active decode targets and decides when active decode
+ // targets bitmask should be attached to the dependency descriptor.
+ ActiveDecodeTargetsHelper active_decode_targets_tracker_;
const rtc::scoped_refptr<RTPSenderVideoFrameTransformerDelegate>
frame_transformer_delegate_;
diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
index 25ebd1b64c..786e46777a 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
@@ -21,15 +21,6 @@
namespace webrtc {
namespace {
-std::unique_ptr<RTPFragmentationHeader> CreateFragmentationHeader(
- const RTPFragmentationHeader* fragmentation_header) {
- if (!fragmentation_header)
- return nullptr;
- auto ret = std::make_unique<RTPFragmentationHeader>();
- ret->CopyFrom(*fragmentation_header);
- return ret;
-}
-
class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
public:
TransformableVideoSenderFrame(
@@ -38,20 +29,18 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
int payload_type,
absl::optional<VideoCodecType> codec_type,
uint32_t rtp_timestamp,
- const RTPFragmentationHeader* fragmentation_header,
absl::optional<int64_t> expected_retransmission_time_ms,
uint32_t ssrc)
: encoded_data_(encoded_image.GetEncodedData()),
header_(video_header),
+ metadata_(header_),
frame_type_(encoded_image._frameType),
payload_type_(payload_type),
codec_type_(codec_type),
timestamp_(rtp_timestamp),
capture_time_ms_(encoded_image.capture_time_ms_),
expected_retransmission_time_ms_(expected_retransmission_time_ms),
- ssrc_(ssrc),
- fragmentation_header_(CreateFragmentationHeader(fragmentation_header)) {
- }
+ ssrc_(ssrc) {}
~TransformableVideoSenderFrame() override = default;
@@ -75,15 +64,13 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
return RtpDescriptorAuthentication(header_);
}
+ const VideoFrameMetadata& GetMetadata() const override { return metadata_; }
+
const RTPVideoHeader& GetHeader() const { return header_; }
int GetPayloadType() const { return payload_type_; }
absl::optional<VideoCodecType> GetCodecType() const { return codec_type_; }
int64_t GetCaptureTimeMs() const { return capture_time_ms_; }
- RTPFragmentationHeader* GetFragmentationHeader() const {
- return fragmentation_header_.get();
- }
-
const absl::optional<int64_t>& GetExpectedRetransmissionTimeMs() const {
return expected_retransmission_time_ms_;
}
@@ -91,6 +78,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
private:
rtc::scoped_refptr<EncodedImageBufferInterface> encoded_data_;
const RTPVideoHeader header_;
+ const VideoFrameMetadata metadata_;
const VideoFrameType frame_type_;
const int payload_type_;
const absl::optional<VideoCodecType> codec_type_ = absl::nullopt;
@@ -98,17 +86,18 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
const int64_t capture_time_ms_;
const absl::optional<int64_t> expected_retransmission_time_ms_;
const uint32_t ssrc_;
- const std::unique_ptr<RTPFragmentationHeader> fragmentation_header_;
};
} // namespace
RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate(
RTPSenderVideo* sender,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
- uint32_t ssrc)
+ uint32_t ssrc,
+ TaskQueueBase* send_transport_queue)
: sender_(sender),
frame_transformer_(std::move(frame_transformer)),
- ssrc_(ssrc) {}
+ ssrc_(ssrc),
+ send_transport_queue_(send_transport_queue) {}
void RTPSenderVideoFrameTransformerDelegate::Init() {
frame_transformer_->RegisterTransformedFrameSinkCallback(
@@ -120,20 +109,25 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame(
absl::optional<VideoCodecType> codec_type,
uint32_t rtp_timestamp,
const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional<int64_t> expected_retransmission_time_ms) {
- if (!encoder_queue_)
- encoder_queue_ = TaskQueueBase::Current();
+ if (!encoder_queue_) {
+ // Save the current task queue to post the transformed frame for sending
+ // once it is transformed. When there is no current task queue, i.e.
+ // encoding is done on an external thread (for example in the case of
+ // hardware encoders), use the send transport queue instead.
+ TaskQueueBase* current = TaskQueueBase::Current();
+ encoder_queue_ = current ? current : send_transport_queue_;
+ }
frame_transformer_->Transform(std::make_unique<TransformableVideoSenderFrame>(
encoded_image, video_header, payload_type, codec_type, rtp_timestamp,
- fragmentation, expected_retransmission_time_ms, ssrc_));
+ expected_retransmission_time_ms, ssrc_));
return true;
}
void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame(
std::unique_ptr<TransformableFrameInterface> frame) {
- rtc::CritScope lock(&sender_lock_);
+ MutexLock lock(&sender_lock_);
// The encoder queue gets destroyed after the sender; as long as the sender is
// alive, it's safe to post.
@@ -149,7 +143,7 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame(
void RTPSenderVideoFrameTransformerDelegate::SendVideo(
std::unique_ptr<TransformableFrameInterface> transformed_frame) const {
RTC_CHECK(encoder_queue_->IsCurrent());
- rtc::CritScope lock(&sender_lock_);
+ MutexLock lock(&sender_lock_);
if (!sender_)
return;
auto* transformed_video_frame =
@@ -160,14 +154,13 @@ void RTPSenderVideoFrameTransformerDelegate::SendVideo(
transformed_video_frame->GetTimestamp(),
transformed_video_frame->GetCaptureTimeMs(),
transformed_video_frame->GetData(),
- transformed_video_frame->GetFragmentationHeader(),
transformed_video_frame->GetHeader(),
transformed_video_frame->GetExpectedRetransmissionTimeMs());
}
void RTPSenderVideoFrameTransformerDelegate::SetVideoStructureUnderLock(
const FrameDependencyStructure* video_structure) {
- rtc::CritScope lock(&sender_lock_);
+ MutexLock lock(&sender_lock_);
RTC_CHECK(sender_);
sender_->SetVideoStructureUnderLock(video_structure);
}
@@ -176,7 +169,7 @@ void RTPSenderVideoFrameTransformerDelegate::Reset() {
frame_transformer_->UnregisterTransformedFrameSinkCallback(ssrc_);
frame_transformer_ = nullptr;
{
- rtc::CritScope lock(&sender_lock_);
+ MutexLock lock(&sender_lock_);
sender_ = nullptr;
}
}
diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h
index 29ac9e4e1c..a14ce3a81e 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h
+++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h
@@ -16,7 +16,7 @@
#include "api/frame_transformer_interface.h"
#include "api/scoped_refptr.h"
#include "api/task_queue/task_queue_base.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -30,7 +30,8 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
RTPSenderVideoFrameTransformerDelegate(
RTPSenderVideo* sender,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
- uint32_t ssrc);
+ uint32_t ssrc,
+ TaskQueueBase* send_transport_queue);
void Init();
@@ -39,7 +40,6 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
absl::optional<VideoCodecType> codec_type,
uint32_t rtp_timestamp,
const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional<int64_t> expected_retransmission_time_ms);
@@ -64,11 +64,12 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
~RTPSenderVideoFrameTransformerDelegate() override = default;
private:
- rtc::CriticalSection sender_lock_;
+ mutable Mutex sender_lock_;
RTPSenderVideo* sender_ RTC_GUARDED_BY(sender_lock_);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_;
const uint32_t ssrc_;
TaskQueueBase* encoder_queue_ = nullptr;
+ TaskQueueBase* send_transport_queue_;
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc
index 21c4da05ab..6a049ceb7a 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc
@@ -15,6 +15,7 @@
#include <utility>
#include <vector>
+#include "absl/memory/memory.h"
#include "api/test/mock_frame_encryptor.h"
#include "api/transport/field_trial_based_config.h"
#include "api/transport/rtp/dependency_descriptor.h"
@@ -23,7 +24,6 @@
#include "common_video/generic_frame_descriptor/generic_frame_info.h"
#include "modules/rtp_rtcp/include/rtp_cvo.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
@@ -32,6 +32,7 @@
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/rate_limiter.h"
@@ -45,6 +46,7 @@ namespace webrtc {
namespace {
using ::testing::_;
+using ::testing::ContainerEq;
using ::testing::ElementsAre;
using ::testing::ElementsAreArray;
using ::testing::IsEmpty;
@@ -57,7 +59,6 @@ using ::testing::WithArgs;
enum : int { // The first valid value is 1.
kAbsoluteSendTimeExtensionId = 1,
- kFrameMarkingExtensionId,
kGenericDescriptorId,
kDependencyDescriptorId,
kTransmissionTimeOffsetExtensionId,
@@ -93,8 +94,6 @@ class LoopbackTransportTest : public webrtc::Transport {
kGenericDescriptorId);
receivers_extensions_.Register<RtpDependencyDescriptorExtension>(
kDependencyDescriptorId);
- receivers_extensions_.Register<FrameMarkingExtension>(
- kFrameMarkingExtensionId);
receivers_extensions_.Register<AbsoluteCaptureTimeExtension>(
kAbsoluteCaptureTimeExtensionId);
receivers_extensions_.Register<PlayoutDelayLimits>(
@@ -167,8 +166,8 @@ class RtpSenderVideoTest : public ::testing::TestWithParam<bool> {
: field_trials_(GetParam()),
fake_clock_(kStartTime),
retransmission_rate_limiter_(&fake_clock_, 1000),
- rtp_module_(RtpRtcp::Create([&] {
- RtpRtcp::Configuration config;
+ rtp_module_(ModuleRtpRtcpImpl2::Create([&] {
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
@@ -188,12 +187,12 @@ class RtpSenderVideoTest : public ::testing::TestWithParam<bool> {
int version);
protected:
- const RtpRtcp::Configuration config_;
+ const RtpRtcpInterface::Configuration config_;
FieldTrials field_trials_;
SimulatedClock fake_clock_;
LoopbackTransportTest transport_;
RateLimiter retransmission_rate_limiter_;
- std::unique_ptr<RtpRtcp> rtp_module_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_module_;
TestRtpSenderVideo rtp_sender_video_;
};
@@ -205,8 +204,8 @@ TEST_P(RtpSenderVideoTest, KeyFrameHasCVO) {
RTPVideoHeader hdr;
hdr.rotation = kVideoRotation_0;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
VideoRotation rotation;
EXPECT_TRUE(
@@ -232,7 +231,7 @@ TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) {
fake_clock_.AdvanceTimeMilliseconds(kPacketizationTimeMs);
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, kCaptureTimestamp,
- kFrame, nullptr, hdr,
+ kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
VideoSendTiming timing;
EXPECT_TRUE(transport_.last_sent_packet().GetExtension<VideoTimingExtension>(
@@ -250,15 +249,15 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) {
RTPVideoHeader hdr;
hdr.rotation = kVideoRotation_90;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video_.SendVideo(
- kPayload, kType, kTimestamp, 0, kFrame, nullptr, hdr,
- kDefaultExpectedRetransmissionTimeMs));
+ EXPECT_TRUE(
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs));
hdr.rotation = kVideoRotation_0;
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
- EXPECT_TRUE(rtp_sender_video_.SendVideo(
- kPayload, kType, kTimestamp + 1, 0, kFrame, nullptr, hdr,
- kDefaultExpectedRetransmissionTimeMs));
+ EXPECT_TRUE(
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp + 1, 0, kFrame,
+ hdr, kDefaultExpectedRetransmissionTimeMs));
VideoRotation rotation;
EXPECT_TRUE(
@@ -274,14 +273,14 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) {
RTPVideoHeader hdr;
hdr.rotation = kVideoRotation_90;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video_.SendVideo(
- kPayload, kType, kTimestamp, 0, kFrame, nullptr, hdr,
- kDefaultExpectedRetransmissionTimeMs));
+ EXPECT_TRUE(
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs));
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
- EXPECT_TRUE(rtp_sender_video_.SendVideo(
- kPayload, kType, kTimestamp + 1, 0, kFrame, nullptr, hdr,
- kDefaultExpectedRetransmissionTimeMs));
+ EXPECT_TRUE(
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp + 1, 0, kFrame,
+ hdr, kDefaultExpectedRetransmissionTimeMs));
VideoRotation rotation;
EXPECT_TRUE(
@@ -289,43 +288,6 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) {
EXPECT_EQ(kVideoRotation_90, rotation);
}
-TEST_P(RtpSenderVideoTest, CheckH264FrameMarking) {
- uint8_t kFrame[kMaxPacketLength];
- rtp_module_->RegisterRtpHeaderExtension(FrameMarkingExtension::kUri,
- kFrameMarkingExtensionId);
-
- RTPFragmentationHeader frag;
- frag.VerifyAndAllocateFragmentationHeader(1);
- frag.fragmentationOffset[0] = 0;
- frag.fragmentationLength[0] = sizeof(kFrame);
-
- RTPVideoHeader hdr;
- hdr.video_type_header.emplace<RTPVideoHeaderH264>().packetization_mode =
- H264PacketizationMode::NonInterleaved;
- hdr.codec = kVideoCodecH264;
- hdr.frame_marking.temporal_id = kNoTemporalIdx;
- hdr.frame_marking.tl0_pic_idx = 99;
- hdr.frame_marking.base_layer_sync = true;
- hdr.frame_type = VideoFrameType::kVideoFrameDelta;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, &frag,
- hdr, kDefaultExpectedRetransmissionTimeMs);
-
- FrameMarking fm;
- EXPECT_FALSE(
- transport_.last_sent_packet().GetExtension<FrameMarkingExtension>(&fm));
-
- hdr.frame_marking.temporal_id = 0;
- hdr.frame_type = VideoFrameType::kVideoFrameDelta;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp + 1, 0, kFrame, &frag,
- hdr, kDefaultExpectedRetransmissionTimeMs);
-
- EXPECT_TRUE(
- transport_.last_sent_packet().GetExtension<FrameMarkingExtension>(&fm));
- EXPECT_EQ(hdr.frame_marking.temporal_id, fm.temporal_id);
- EXPECT_EQ(hdr.frame_marking.tl0_pic_idx, fm.tl0_pic_idx);
- EXPECT_EQ(hdr.frame_marking.base_layer_sync, fm.base_layer_sync);
-}
-
// Make sure rotation is parsed correctly when the Camera (C) and Flip (F) bits
// are set in the CVO byte.
TEST_P(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) {
@@ -367,7 +329,6 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) {
header.video_type_header.emplace<RTPVideoHeaderH264>().packetization_mode =
H264PacketizationMode::NonInterleaved;
header.codec = kVideoCodecH264;
- header.frame_marking.temporal_id = kNoTemporalIdx;
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
@@ -378,14 +339,6 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) {
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kConditionallyRetransmitHigherLayers,
kDefaultExpectedRetransmissionTimeMs));
-
- // Test higher level retransmit.
- for (int tid = 0; tid <= kMaxTemporalStreams; ++tid) {
- header.frame_marking.temporal_id = tid;
- EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
- header, kRetransmitHigherLayers | kRetransmitBaseLayer,
- kDefaultExpectedRetransmissionTimeMs));
- }
}
TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) {
@@ -546,9 +499,9 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) {
FrameDependencyStructure video_structure;
video_structure.num_decode_targets = 2;
video_structure.templates = {
- GenericFrameInfo::Builder().S(0).T(0).Dtis("SS").Build(),
- GenericFrameInfo::Builder().S(1).T(0).Dtis("-S").Build(),
- GenericFrameInfo::Builder().S(1).T(1).Dtis("-D").Build(),
+ FrameDependencyTemplate().S(0).T(0).Dtis("SS"),
+ FrameDependencyTemplate().S(1).T(0).Dtis("-S"),
+ FrameDependencyTemplate().S(1).T(1).Dtis("-D"),
};
rtp_sender_video_.SetVideoStructure(&video_structure);
@@ -561,8 +514,8 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) {
generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
DecodeTargetIndication::kSwitch};
hdr.frame_type = VideoFrameType::kVideoFrameKey;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
ASSERT_EQ(transport_.packets_sent(), 1);
DependencyDescriptor descriptor_key;
@@ -587,8 +540,8 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) {
generic.decode_target_indications = {DecodeTargetIndication::kNotPresent,
DecodeTargetIndication::kRequired};
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
EXPECT_EQ(transport_.packets_sent(), 2);
DependencyDescriptor descriptor_delta;
@@ -606,6 +559,73 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) {
ElementsAre(1, 501));
}
+TEST_P(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) {
+ const int64_t kFrameId = 100000;
+ uint8_t kFrame[100];
+ rtp_module_->RegisterRtpHeaderExtension(
+ RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId);
+ FrameDependencyStructure video_structure;
+ video_structure.num_decode_targets = 2;
+ video_structure.num_chains = 1;
+ video_structure.decode_target_protected_by_chain = {0, 0};
+ video_structure.templates = {
+ FrameDependencyTemplate().S(0).T(0).Dtis("SS").ChainDiffs({1}),
+ };
+ rtp_sender_video_.SetVideoStructure(&video_structure);
+
+ RTPVideoHeader hdr;
+ RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
+ generic.frame_id = kFrameId;
+ generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
+ DecodeTargetIndication::kSwitch};
+ generic.chain_diffs = {2};
+ hdr.frame_type = VideoFrameType::kVideoFrameKey;
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
+
+ ASSERT_EQ(transport_.packets_sent(), 1);
+ DependencyDescriptor descriptor_key;
+ ASSERT_TRUE(transport_.last_sent_packet()
+ .GetExtension<RtpDependencyDescriptorExtension>(
+ nullptr, &descriptor_key));
+ EXPECT_THAT(descriptor_key.frame_dependencies.chain_diffs,
+ ContainerEq(generic.chain_diffs));
+}
+
+TEST_P(RtpSenderVideoTest,
+ PropagatesActiveDecodeTargetsIntoDependencyDescriptor) {
+ const int64_t kFrameId = 100000;
+ uint8_t kFrame[100];
+ rtp_module_->RegisterRtpHeaderExtension(
+ RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId);
+ FrameDependencyStructure video_structure;
+ video_structure.num_decode_targets = 2;
+ video_structure.num_chains = 1;
+ video_structure.decode_target_protected_by_chain = {0, 0};
+ video_structure.templates = {
+ FrameDependencyTemplate().S(0).T(0).Dtis("SS").ChainDiffs({1}),
+ };
+ rtp_sender_video_.SetVideoStructure(&video_structure);
+
+ RTPVideoHeader hdr;
+ RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
+ generic.frame_id = kFrameId;
+ generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
+ DecodeTargetIndication::kSwitch};
+ generic.active_decode_targets = 0b01;
+ generic.chain_diffs = {1};
+ hdr.frame_type = VideoFrameType::kVideoFrameKey;
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
+
+ ASSERT_EQ(transport_.packets_sent(), 1);
+ DependencyDescriptor descriptor_key;
+ ASSERT_TRUE(transport_.last_sent_packet()
+ .GetExtension<RtpDependencyDescriptorExtension>(
+ nullptr, &descriptor_key));
+ EXPECT_EQ(descriptor_key.active_decode_targets_bitmask, 0b01u);
+}
+
TEST_P(RtpSenderVideoTest,
SetDiffentVideoStructureAvoidsCollisionWithThePreviousStructure) {
const int64_t kFrameId = 100000;
@@ -615,14 +635,14 @@ TEST_P(RtpSenderVideoTest,
FrameDependencyStructure video_structure1;
video_structure1.num_decode_targets = 2;
video_structure1.templates = {
- GenericFrameInfo::Builder().S(0).T(0).Dtis("SS").Build(),
- GenericFrameInfo::Builder().S(0).T(1).Dtis("D-").Build(),
+ FrameDependencyTemplate().S(0).T(0).Dtis("SS"),
+ FrameDependencyTemplate().S(0).T(1).Dtis("D-"),
};
FrameDependencyStructure video_structure2;
video_structure2.num_decode_targets = 2;
video_structure2.templates = {
- GenericFrameInfo::Builder().S(0).T(0).Dtis("SS").Build(),
- GenericFrameInfo::Builder().S(0).T(1).Dtis("R-").Build(),
+ FrameDependencyTemplate().S(0).T(0).Dtis("SS"),
+ FrameDependencyTemplate().S(0).T(1).Dtis("R-"),
};
// Send 1st key frame.
@@ -633,8 +653,8 @@ TEST_P(RtpSenderVideoTest,
DecodeTargetIndication::kSwitch};
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SetVideoStructure(&video_structure1);
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
// Parse 1st extension.
ASSERT_EQ(transport_.packets_sent(), 1);
DependencyDescriptor descriptor_key1;
@@ -649,8 +669,8 @@ TEST_P(RtpSenderVideoTest,
generic.decode_target_indications = {DecodeTargetIndication::kDiscardable,
DecodeTargetIndication::kNotPresent};
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
ASSERT_EQ(transport_.packets_sent(), 2);
RtpPacket delta_packet = transport_.last_sent_packet();
@@ -661,8 +681,8 @@ TEST_P(RtpSenderVideoTest,
DecodeTargetIndication::kSwitch};
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SetVideoStructure(&video_structure2);
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
// Parse the 2nd key frame.
ASSERT_EQ(transport_.packets_sent(), 3);
DependencyDescriptor descriptor_key2;
@@ -705,7 +725,7 @@ TEST_P(RtpSenderVideoTest,
FrameDependencyStructure video_structure;
video_structure.num_decode_targets = 1;
- video_structure.templates = {GenericFrameInfo::Builder().Dtis("S").Build()};
+ video_structure.templates = {FrameDependencyTemplate().Dtis("S")};
rtp_sender_video.SetVideoStructure(&video_structure);
// Send key frame.
@@ -716,8 +736,8 @@ TEST_P(RtpSenderVideoTest,
EXPECT_CALL(*encryptor,
Encrypt(_, _, Not(IsEmpty()), ElementsAreArray(kFrame), _, _));
- rtp_sender_video.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
// Double check packet with the dependency descriptor is sent.
ASSERT_EQ(transport_.packets_sent(), 1);
EXPECT_TRUE(transport_.last_sent_packet()
@@ -738,8 +758,8 @@ TEST_P(RtpSenderVideoTest, PopulateGenericFrameDescriptor) {
generic.dependencies.push_back(kFrameId - 1);
generic.dependencies.push_back(kFrameId - 500);
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
RtpGenericFrameDescriptor descriptor_wire;
EXPECT_EQ(1, transport_.packets_sent());
@@ -773,7 +793,7 @@ void RtpSenderVideoTest::
generic.frame_id = kFrameId;
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
rtp_sender_video_.SendVideo(kPayload, VideoCodecType::kVideoCodecVP8,
- kTimestamp, 0, kFrame, nullptr, hdr,
+ kTimestamp, 0, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
ASSERT_EQ(transport_.packets_sent(), 1);
@@ -800,7 +820,7 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) {
RTPVideoHeader hdr;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp,
- kAbsoluteCaptureTimestampMs, kFrame, nullptr, hdr,
+ kAbsoluteCaptureTimestampMs, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
// It is expected that one and only one of the packets sent on this video
@@ -833,8 +853,8 @@ TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) {
auto& vp8_header = hdr.video_type_header.emplace<RTPVideoHeaderVP8>();
vp8_header.temporalIdx = 0;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
EXPECT_FALSE(
transport_.last_sent_packet().HasExtension<PlayoutDelayLimits>());
@@ -842,8 +862,8 @@ TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) {
hdr.playout_delay = kExpectedDelay;
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
vp8_header.temporalIdx = 1;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
PlayoutDelay received_delay = PlayoutDelay::Noop();
ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>(
&received_delay));
@@ -853,23 +873,23 @@ TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) {
// be populated since dilvery wasn't guaranteed on the last one.
hdr.playout_delay = PlayoutDelay::Noop(); // Inidcates "no change".
vp8_header.temporalIdx = 0;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>(
&received_delay));
EXPECT_EQ(received_delay, kExpectedDelay);
// The next frame does not need the extensions since it's delivery has
// already been guaranteed.
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
EXPECT_FALSE(
transport_.last_sent_packet().HasExtension<PlayoutDelayLimits>());
// Insert key-frame, we need to refresh the state here.
hdr.frame_type = VideoFrameType::kVideoFrameKey;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
- hdr, kDefaultExpectedRetransmissionTimeMs);
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
+ kDefaultExpectedRetransmissionTimeMs);
ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>(
&received_delay));
EXPECT_EQ(received_delay, kExpectedDelay);
@@ -884,8 +904,8 @@ class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test {
RtpSenderVideoWithFrameTransformerTest()
: fake_clock_(kStartTime),
retransmission_rate_limiter_(&fake_clock_, 1000),
- rtp_module_(RtpRtcp::Create([&] {
- RtpRtcp::Configuration config;
+ rtp_module_(ModuleRtpRtcpImpl2::Create([&] {
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
@@ -912,7 +932,7 @@ class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test {
SimulatedClock fake_clock_;
LoopbackTransportTest transport_;
RateLimiter retransmission_rate_limiter_;
- std::unique_ptr<RtpRtcp> rtp_module_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_module_;
};
std::unique_ptr<EncodedImage> CreateDefaultEncodedImage() {
@@ -955,7 +975,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest,
EXPECT_CALL(*mock_frame_transformer, Transform);
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
- *encoded_image, nullptr, video_header,
+ *encoded_image, video_header,
kDefaultExpectedRetransmissionTimeMs);
}
@@ -981,7 +1001,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) {
encoder_queue.SendTask(
[&] {
rtp_sender_video->SendEncodedImage(
- kPayload, kType, kTimestamp, *encoded_image, nullptr, video_header,
+ kPayload, kType, kTimestamp, *encoded_image, video_header,
kDefaultExpectedRetransmissionTimeMs);
},
RTC_FROM_HERE);
@@ -989,5 +1009,47 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) {
EXPECT_EQ(transport_.packets_sent(), 1);
}
+TEST_F(RtpSenderVideoWithFrameTransformerTest,
+ TransformableFrameMetadataHasCorrectValue) {
+ rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
+ new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
+ std::unique_ptr<RTPSenderVideo> rtp_sender_video =
+ CreateSenderWithFrameTransformer(mock_frame_transformer);
+ auto encoded_image = CreateDefaultEncodedImage();
+ RTPVideoHeader video_header;
+ video_header.width = 1280u;
+ video_header.height = 720u;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.frame_id = 10;
+ generic.temporal_index = 3;
+ generic.spatial_index = 2;
+ generic.decode_target_indications = {DecodeTargetIndication::kSwitch};
+ generic.dependencies = {5};
+
+ // Check that the transformable frame passed to the frame transformer has the
+ // correct metadata.
+ EXPECT_CALL(*mock_frame_transformer, Transform)
+ .WillOnce(
+ [](std::unique_ptr<TransformableFrameInterface> transformable_frame) {
+ auto frame =
+ absl::WrapUnique(static_cast<TransformableVideoFrameInterface*>(
+ transformable_frame.release()));
+ ASSERT_TRUE(frame);
+ auto metadata = frame->GetMetadata();
+ EXPECT_EQ(metadata.GetWidth(), 1280u);
+ EXPECT_EQ(metadata.GetHeight(), 720u);
+ EXPECT_EQ(metadata.GetFrameId(), 10);
+ EXPECT_EQ(metadata.GetTemporalIndex(), 3);
+ EXPECT_EQ(metadata.GetSpatialIndex(), 2);
+ EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5));
+ EXPECT_THAT(metadata.GetDecodeTargetIndications(),
+ ElementsAre(DecodeTargetIndication::kSwitch));
+ });
+ rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
+ *encoded_image, video_header,
+ kDefaultExpectedRetransmissionTimeMs);
+}
+
} // namespace
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_utility.cc b/modules/rtp_rtcp/source/rtp_utility.cc
index f76d7d0f0b..c25fd96fa5 100644
--- a/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/modules/rtp_rtcp/source/rtp_utility.cc
@@ -17,7 +17,6 @@
#include "api/array_view.h"
#include "api/video/video_content_type.h"
-#include "api/video/video_frame_marking.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
#include "modules/rtp_rtcp/include/rtp_cvo.h"
@@ -245,10 +244,6 @@ bool RtpHeaderParser::Parse(RTPHeader* header,
header->extension.has_video_timing = false;
header->extension.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false};
- header->extension.has_frame_marking = false;
- header->extension.frame_marking = {false, false, false, false,
- false, kNoTemporalIdx, 0, 0};
-
if (X) {
/* RTP header extension, RFC 3550.
0 1 2 3
@@ -497,15 +492,6 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
&header->extension.video_timing);
break;
}
- case kRtpExtensionFrameMarking: {
- if (!FrameMarkingExtension::Parse(rtc::MakeArrayView(ptr, len + 1),
- &header->extension.frame_marking)) {
- RTC_LOG(LS_WARNING) << "Incorrect frame marking len: " << len;
- return;
- }
- header->extension.has_frame_marking = true;
- break;
- }
case kRtpExtensionRtpStreamId: {
std::string name(reinterpret_cast<const char*>(ptr), len + 1);
if (IsLegalRsidName(name)) {
diff --git a/modules/rtp_rtcp/source/rtp_video_header.h b/modules/rtp_rtcp/source/rtp_video_header.h
index 7071463be4..a9c144033d 100644
--- a/modules/rtp_rtcp/source/rtp_video_header.h
+++ b/modules/rtp_rtcp/source/rtp_video_header.h
@@ -10,6 +10,7 @@
#ifndef MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_HEADER_H_
#define MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_HEADER_H_
+#include <bitset>
#include <cstdint>
#include "absl/container/inlined_vector.h"
@@ -19,11 +20,10 @@
#include "api/video/color_space.h"
#include "api/video/video_codec_type.h"
#include "api/video/video_content_type.h"
-#include "api/video/video_frame_marking.h"
#include "api/video/video_frame_type.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
-#include "common_types.h" // NOLINT(build/include)
+#include "common_types.h" // NOLINT(build/include_directory)
#include "modules/video_coding/codecs/h264/include/h264_globals.h"
#include "modules/video_coding/codecs/vp8/include/vp8_globals.h"
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
@@ -53,6 +53,8 @@ struct RTPVideoHeader {
int temporal_index = 0;
absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications;
absl::InlinedVector<int64_t, 5> dependencies;
+ absl::InlinedVector<int, 4> chain_diffs;
+ std::bitset<32> active_decode_targets = ~uint32_t{0};
};
RTPVideoHeader();
@@ -74,7 +76,6 @@ struct RTPVideoHeader {
PlayoutDelay playout_delay = {-1, -1};
VideoSendTiming video_timing;
- FrameMarking frame_marking = {false, false, false, false, false, 0xFF, 0, 0};
absl::optional<ColorSpace> color_space;
RTPVideoTypeHeader video_type_header;
};
diff --git a/modules/rtp_rtcp/source/source_tracker.cc b/modules/rtp_rtcp/source/source_tracker.cc
index 22b887c5d2..d6c744512a 100644
--- a/modules/rtp_rtcp/source/source_tracker.cc
+++ b/modules/rtp_rtcp/source/source_tracker.cc
@@ -25,7 +25,7 @@ void SourceTracker::OnFrameDelivered(const RtpPacketInfos& packet_infos) {
}
int64_t now_ms = clock_->TimeInMilliseconds();
- rtc::CritScope lock_scope(&lock_);
+ MutexLock lock_scope(&lock_);
for (const auto& packet_info : packet_infos) {
for (uint32_t csrc : packet_info.csrcs()) {
@@ -54,7 +54,7 @@ std::vector<RtpSource> SourceTracker::GetSources() const {
std::vector<RtpSource> sources;
int64_t now_ms = clock_->TimeInMilliseconds();
- rtc::CritScope lock_scope(&lock_);
+ MutexLock lock_scope(&lock_);
PruneEntries(now_ms);
diff --git a/modules/rtp_rtcp/source/source_tracker.h b/modules/rtp_rtcp/source/source_tracker.h
index fcf99bf8b5..0c7627c41d 100644
--- a/modules/rtp_rtcp/source/source_tracker.h
+++ b/modules/rtp_rtcp/source/source_tracker.h
@@ -20,7 +20,7 @@
#include "absl/types/optional.h"
#include "api/rtp_packet_infos.h"
#include "api/transport/rtp/rtp_source.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/time_utils.h"
#include "system_wrappers/include/clock.h"
@@ -116,7 +116,7 @@ class SourceTracker {
void PruneEntries(int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
Clock* const clock_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
// Entries are stored in reverse chronological order (i.e. with the most
// recently updated entries appearing first). Mutability is needed for timeout
diff --git a/modules/rtp_rtcp/source/ulpfec_generator.cc b/modules/rtp_rtcp/source/ulpfec_generator.cc
index 265fa4d1ac..76d1bb5d87 100644
--- a/modules/rtp_rtcp/source/ulpfec_generator.cc
+++ b/modules/rtp_rtcp/source/ulpfec_generator.cc
@@ -22,7 +22,7 @@
#include "modules/rtp_rtcp/source/forward_error_correction_internal.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -103,7 +103,7 @@ void UlpfecGenerator::SetProtectionParameters(
RTC_DCHECK_LE(key_params.fec_rate, 255);
// Store the new params and apply them for the next set of FEC packets being
// produced.
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
pending_params_.emplace(delta_params, key_params);
}
@@ -112,7 +112,7 @@ void UlpfecGenerator::AddPacketAndGenerateFec(const RtpPacketToSend& packet) {
RTC_DCHECK(generated_fec_packets_.empty());
if (media_packets_.empty()) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
if (pending_params_) {
current_params_ = *pending_params_;
pending_params_.reset();
@@ -230,19 +230,21 @@ std::vector<std::unique_ptr<RtpPacketToSend>> UlpfecGenerator::GetFecPackets() {
total_fec_size_bytes += red_packet->size();
red_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
red_packet->set_allow_retransmission(false);
+ red_packet->set_is_red(true);
+ red_packet->set_fec_protect_packet(false);
fec_packets.push_back(std::move(red_packet));
}
ResetState();
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
fec_bitrate_.Update(total_fec_size_bytes, clock_->TimeInMilliseconds());
return fec_packets;
}
DataRate UlpfecGenerator::CurrentFecRate() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return DataRate::BitsPerSec(
fec_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0));
}
diff --git a/modules/rtp_rtcp/source/ulpfec_generator.h b/modules/rtp_rtcp/source/ulpfec_generator.h
index be59e4c9ea..32ddc6c4b9 100644
--- a/modules/rtp_rtcp/source/ulpfec_generator.h
+++ b/modules/rtp_rtcp/source/ulpfec_generator.h
@@ -21,9 +21,9 @@
#include "modules/include/module_fec_types.h"
#include "modules/rtp_rtcp/source/forward_error_correction.h"
#include "modules/rtp_rtcp/source/video_fec_generator.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -112,9 +112,9 @@ class UlpfecGenerator : public VideoFecGenerator {
Params current_params_ RTC_GUARDED_BY(race_checker_);
bool keyframe_in_process_ RTC_GUARDED_BY(race_checker_);
- rtc::CriticalSection crit_;
- absl::optional<Params> pending_params_ RTC_GUARDED_BY(crit_);
- RateStatistics fec_bitrate_ RTC_GUARDED_BY(crit_);
+ mutable Mutex mutex_;
+ absl::optional<Params> pending_params_ RTC_GUARDED_BY(mutex_);
+ RateStatistics fec_bitrate_ RTC_GUARDED_BY(mutex_);
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc b/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc
index 4395d8ea6b..26993cabb6 100644
--- a/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc
+++ b/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc
@@ -42,7 +42,7 @@ UlpfecReceiverImpl::~UlpfecReceiverImpl() {
}
FecPacketCounter UlpfecReceiverImpl::GetPacketCounter() const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return packet_counter_;
}
@@ -87,7 +87,7 @@ bool UlpfecReceiverImpl::AddReceivedRedPacket(
"packet size; dropping.";
return false;
}
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
static constexpr uint8_t kRedHeaderLength = 1;
@@ -150,7 +150,7 @@ bool UlpfecReceiverImpl::AddReceivedRedPacket(
// TODO(nisse): Drop always-zero return value.
int32_t UlpfecReceiverImpl::ProcessReceivedFec() {
- crit_sect_.Enter();
+ mutex_.Lock();
// If we iterate over |received_packets_| and it contains a packet that cause
// us to recurse back to this function (for example a RED packet encapsulating
@@ -167,10 +167,10 @@ int32_t UlpfecReceiverImpl::ProcessReceivedFec() {
// Send received media packet to VCM.
if (!received_packet->is_fec) {
ForwardErrorCorrection::Packet* packet = received_packet->pkt;
- crit_sect_.Leave();
+ mutex_.Unlock();
recovered_packet_callback_->OnRecoveredPacket(packet->data.data(),
packet->data.size());
- crit_sect_.Enter();
+ mutex_.Lock();
// Create a packet with the buffer to modify it.
RtpPacketReceived rtp_packet;
const uint8_t* const original_data = packet->data.cdata();
@@ -207,13 +207,13 @@ int32_t UlpfecReceiverImpl::ProcessReceivedFec() {
// Set this flag first; in case the recovered packet carries a RED
// header, OnRecoveredPacket will recurse back here.
recovered_packet->returned = true;
- crit_sect_.Leave();
+ mutex_.Unlock();
recovered_packet_callback_->OnRecoveredPacket(packet->data.data(),
packet->data.size());
- crit_sect_.Enter();
+ mutex_.Lock();
}
- crit_sect_.Leave();
+ mutex_.Unlock();
return 0;
}
diff --git a/modules/rtp_rtcp/source/ulpfec_receiver_impl.h b/modules/rtp_rtcp/source/ulpfec_receiver_impl.h
index 9e4e5b8f0b..2bed042747 100644
--- a/modules/rtp_rtcp/source/ulpfec_receiver_impl.h
+++ b/modules/rtp_rtcp/source/ulpfec_receiver_impl.h
@@ -22,7 +22,7 @@
#include "modules/rtp_rtcp/include/ulpfec_receiver.h"
#include "modules/rtp_rtcp/source/forward_error_correction.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -44,7 +44,7 @@ class UlpfecReceiverImpl : public UlpfecReceiver {
const uint32_t ssrc_;
const RtpHeaderExtensionMap extensions_;
- rtc::CriticalSection crit_sect_;
+ mutable Mutex mutex_;
RecoveredPacketReceiver* recovered_packet_callback_;
std::unique_ptr<ForwardErrorCorrection> fec_;
// TODO(nisse): The AddReceivedRedPacket method adds one or two packets to
diff --git a/modules/utility/BUILD.gn b/modules/utility/BUILD.gn
index 8710ed4729..df6945ab2c 100644
--- a/modules/utility/BUILD.gn
+++ b/modules/utility/BUILD.gn
@@ -26,7 +26,7 @@ rtc_library("utility") {
}
if (is_ios) {
- libs = [ "AVFoundation.framework" ]
+ frameworks = [ "AVFoundation.framework" ]
}
deps = [
diff --git a/modules/utility/source/process_thread_impl.h b/modules/utility/source/process_thread_impl.h
index 24a72d91f5..ed9f5c3bfc 100644
--- a/modules/utility/source/process_thread_impl.h
+++ b/modules/utility/source/process_thread_impl.h
@@ -20,7 +20,7 @@
#include "api/task_queue/queued_task.h"
#include "modules/include/module.h"
#include "modules/utility/include/process_thread.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/location.h"
#include "rtc_base/platform_thread.h"
@@ -92,7 +92,8 @@ class ProcessThreadImpl : public ProcessThread {
// issues, but I haven't figured out what they are, if there are alignment
// requirements for mutexes on Mac or if there's something else to it.
// So be careful with changing the layout.
- rtc::CriticalSection lock_; // Used to guard modules_, tasks_ and stop_.
+ rtc::RecursiveCriticalSection
+ lock_; // Used to guard modules_, tasks_ and stop_.
rtc::ThreadChecker thread_checker_;
rtc::Event wake_up_;
diff --git a/modules/utility/source/process_thread_impl_unittest.cc b/modules/utility/source/process_thread_impl_unittest.cc
index 6f765369f4..1fef0b6740 100644
--- a/modules/utility/source/process_thread_impl_unittest.cc
+++ b/modules/utility/source/process_thread_impl_unittest.cc
@@ -37,9 +37,9 @@ static const int kEventWaitTimeout = 500;
class MockModule : public Module {
public:
- MOCK_METHOD0(TimeUntilNextProcess, int64_t());
- MOCK_METHOD0(Process, void());
- MOCK_METHOD1(ProcessThreadAttached, void(ProcessThread*));
+ MOCK_METHOD(int64_t, TimeUntilNextProcess, (), (override));
+ MOCK_METHOD(void, Process, (), (override));
+ MOCK_METHOD(void, ProcessThreadAttached, (ProcessThread*), (override));
};
class RaiseEventTask : public QueuedTask {
diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn
index 1c02412264..b4e8372620 100644
--- a/modules/video_capture/BUILD.gn
+++ b/modules/video_capture/BUILD.gn
@@ -36,11 +36,12 @@ rtc_library("video_capture_module") {
"../../media:rtc_media_base",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:stringutils",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:rw_lock_wrapper",
"../../system_wrappers",
- "//third_party/abseil-cpp/absl/strings",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
if (!build_with_chromium) {
@@ -51,6 +52,7 @@ if (!build_with_chromium) {
"../../api:scoped_refptr",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers",
]
@@ -129,15 +131,16 @@ if (!build_with_chromium) {
"../../api/video:video_rtp_headers",
"../../common_video",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers",
"../../test:frame_utils",
+ "../../test:test_main",
"../../test:test_support",
"../../test:video_test_common",
"../utility",
"//testing/gtest",
"//third_party/abseil-cpp/absl/memory",
]
- deps += [ "../../test:test_main" ]
}
}
}
diff --git a/modules/video_capture/linux/device_info_linux.cc b/modules/video_capture/linux/device_info_linux.cc
index bac5d4078a..3c8fdd20fa 100644
--- a/modules/video_capture/linux/device_info_linux.cc
+++ b/modules/video_capture/linux/device_info_linux.cc
@@ -47,11 +47,19 @@ uint32_t DeviceInfoLinux::NumberOfDevices() {
uint32_t count = 0;
char device[20];
int fd = -1;
+ struct v4l2_capability cap;
/* detect /dev/video [0-63]VideoCaptureModule entries */
for (int n = 0; n < 64; n++) {
sprintf(device, "/dev/video%d", n);
if ((fd = open(device, O_RDONLY)) != -1) {
+ // query device capabilities and make sure this is a video capture device
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 ||
+ !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) {
+ close(fd);
+ continue;
+ }
+
close(fd);
count++;
}
@@ -74,9 +82,16 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber,
char device[20];
int fd = -1;
bool found = false;
+ struct v4l2_capability cap;
for (int n = 0; n < 64; n++) {
sprintf(device, "/dev/video%d", n);
if ((fd = open(device, O_RDONLY)) != -1) {
+ // query device capabilities and make sure this is a video capture device
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 ||
+ !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) {
+ close(fd);
+ continue;
+ }
if (count == deviceNumber) {
// Found the device
found = true;
@@ -92,7 +107,6 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber,
return -1;
// query device capabilities
- struct v4l2_capability cap;
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
RTC_LOG(LS_INFO) << "error in querying the device capability for device "
<< device << ". errno = " << errno;
@@ -153,6 +167,11 @@ int32_t DeviceInfoLinux::CreateCapabilityMap(const char* deviceUniqueIdUTF8) {
// query device capabilities
struct v4l2_capability cap;
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) {
+ // skip devices without video capture capability
+ if (!(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) {
+ continue;
+ }
+
if (cap.bus_info[0] != 0) {
if (strncmp((const char*)cap.bus_info, (const char*)deviceUniqueIdUTF8,
strlen((const char*)deviceUniqueIdUTF8)) ==
diff --git a/modules/video_capture/linux/video_capture_linux.cc b/modules/video_capture/linux/video_capture_linux.cc
index 30865235b6..504565f512 100644
--- a/modules/video_capture/linux/video_capture_linux.cc
+++ b/modules/video_capture/linux/video_capture_linux.cc
@@ -115,7 +115,7 @@ int32_t VideoCaptureModuleV4L2::StartCapture(
}
}
- rtc::CritScope cs(&_captureCritSect);
+ MutexLock lock(&capture_lock_);
// first open /dev/video device
char device[20];
sprintf(device, "/dev/video%d", (int)_deviceId);
@@ -264,7 +264,7 @@ int32_t VideoCaptureModuleV4L2::StartCapture(
int32_t VideoCaptureModuleV4L2::StopCapture() {
if (_captureThread) {
{
- rtc::CritScope cs(&_captureCritSect);
+ MutexLock lock(&capture_lock_);
quit_ = true;
}
// Make sure the capture thread stop stop using the critsect.
@@ -272,7 +272,7 @@ int32_t VideoCaptureModuleV4L2::StopCapture() {
_captureThread.reset();
}
- rtc::CritScope cs(&_captureCritSect);
+ MutexLock lock(&capture_lock_);
if (_captureStarted) {
_captureStarted = false;
@@ -387,7 +387,7 @@ bool VideoCaptureModuleV4L2::CaptureProcess() {
}
{
- rtc::CritScope cs(&_captureCritSect);
+ MutexLock lock(&capture_lock_);
if (quit_) {
return false;
diff --git a/modules/video_capture/linux/video_capture_linux.h b/modules/video_capture/linux/video_capture_linux.h
index ac9409e23a..ddb5d5ba87 100644
--- a/modules/video_capture/linux/video_capture_linux.h
+++ b/modules/video_capture/linux/video_capture_linux.h
@@ -18,8 +18,8 @@
#include "modules/video_capture/video_capture_defines.h"
#include "modules/video_capture/video_capture_impl.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
namespace videocapturemodule {
@@ -43,8 +43,8 @@ class VideoCaptureModuleV4L2 : public VideoCaptureImpl {
// TODO(pbos): Stop using unique_ptr and resetting the thread.
std::unique_ptr<rtc::PlatformThread> _captureThread;
- rtc::CriticalSection _captureCritSect;
- bool quit_ RTC_GUARDED_BY(_captureCritSect);
+ Mutex capture_lock_;
+ bool quit_ RTC_GUARDED_BY(capture_lock_);
int32_t _deviceId;
int32_t _deviceFd;
diff --git a/modules/video_capture/test/video_capture_unittest.cc b/modules/video_capture/test/video_capture_unittest.cc
index be443e0820..1a0cf2d5da 100644
--- a/modules/video_capture/test/video_capture_unittest.cc
+++ b/modules/video_capture/test/video_capture_unittest.cc
@@ -23,7 +23,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/utility/include/process_thread.h"
#include "modules/video_capture/video_capture_factory.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/time_utils.h"
#include "system_wrappers/include/sleep.h"
#include "test/frame_utils.h"
@@ -74,7 +74,7 @@ class TestVideoCaptureCallback
}
void OnFrame(const webrtc::VideoFrame& videoFrame) override {
- rtc::CritScope cs(&capture_cs_);
+ webrtc::MutexLock lock(&capture_lock_);
int height = videoFrame.height();
int width = videoFrame.width();
#if defined(WEBRTC_ANDROID) && WEBRTC_ANDROID
@@ -106,38 +106,38 @@ class TestVideoCaptureCallback
}
void SetExpectedCapability(VideoCaptureCapability capability) {
- rtc::CritScope cs(&capture_cs_);
+ webrtc::MutexLock lock(&capture_lock_);
capability_ = capability;
incoming_frames_ = 0;
last_render_time_ms_ = 0;
}
int incoming_frames() {
- rtc::CritScope cs(&capture_cs_);
+ webrtc::MutexLock lock(&capture_lock_);
return incoming_frames_;
}
int timing_warnings() {
- rtc::CritScope cs(&capture_cs_);
+ webrtc::MutexLock lock(&capture_lock_);
return timing_warnings_;
}
VideoCaptureCapability capability() {
- rtc::CritScope cs(&capture_cs_);
+ webrtc::MutexLock lock(&capture_lock_);
return capability_;
}
bool CompareLastFrame(const webrtc::VideoFrame& frame) {
- rtc::CritScope cs(&capture_cs_);
+ webrtc::MutexLock lock(&capture_lock_);
return webrtc::test::FrameBufsEqual(last_frame_,
frame.video_frame_buffer());
}
void SetExpectedCaptureRotation(webrtc::VideoRotation rotation) {
- rtc::CritScope cs(&capture_cs_);
+ webrtc::MutexLock lock(&capture_lock_);
rotate_frame_ = rotation;
}
private:
- rtc::CriticalSection capture_cs_;
+ webrtc::Mutex capture_lock_;
VideoCaptureCapability capability_;
int64_t last_render_time_ms_;
int incoming_frames_;
diff --git a/modules/video_capture/video_capture_impl.cc b/modules/video_capture/video_capture_impl.cc
index 9d53a91157..6619d15924 100644
--- a/modules/video_capture/video_capture_impl.cc
+++ b/modules/video_capture/video_capture_impl.cc
@@ -96,12 +96,12 @@ VideoCaptureImpl::~VideoCaptureImpl() {
void VideoCaptureImpl::RegisterCaptureDataCallback(
rtc::VideoSinkInterface<VideoFrame>* dataCallBack) {
- rtc::CritScope cs(&_apiCs);
+ MutexLock lock(&api_lock_);
_dataCallBack = dataCallBack;
}
void VideoCaptureImpl::DeRegisterCaptureDataCallback() {
- rtc::CritScope cs(&_apiCs);
+ MutexLock lock(&api_lock_);
_dataCallBack = NULL;
}
int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) {
@@ -118,7 +118,7 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
size_t videoFrameLength,
const VideoCaptureCapability& frameInfo,
int64_t captureTime /*=0*/) {
- rtc::CritScope cs(&_apiCs);
+ MutexLock lock(&api_lock_);
const int32_t width = frameInfo.width;
const int32_t height = frameInfo.height;
@@ -223,7 +223,7 @@ int32_t VideoCaptureImpl::CaptureSettings(
}
int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) {
- rtc::CritScope cs(&_apiCs);
+ MutexLock lock(&api_lock_);
_rotateFrame = rotation;
return 0;
}
diff --git a/modules/video_capture/video_capture_impl.h b/modules/video_capture/video_capture_impl.h
index 197bfd387c..cbc99b76c1 100644
--- a/modules/video_capture/video_capture_impl.h
+++ b/modules/video_capture/video_capture_impl.h
@@ -25,7 +25,7 @@
#include "modules/video_capture/video_capture.h"
#include "modules/video_capture/video_capture_config.h"
#include "modules/video_capture/video_capture_defines.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -78,7 +78,7 @@ class VideoCaptureImpl : public VideoCaptureModule {
~VideoCaptureImpl() override;
char* _deviceUniqueId; // current Device unique name;
- rtc::CriticalSection _apiCs;
+ Mutex api_lock_;
VideoCaptureCapability _requestedCapability; // Should be set by platform
// dependent code in
// StartCapture.
diff --git a/modules/video_capture/windows/video_capture_ds.cc b/modules/video_capture/windows/video_capture_ds.cc
index 615a1b56ea..6dca74750c 100644
--- a/modules/video_capture/windows/video_capture_ds.cc
+++ b/modules/video_capture/windows/video_capture_ds.cc
@@ -130,7 +130,7 @@ int32_t VideoCaptureDS::Init(const char* deviceUniqueIdUTF8) {
}
int32_t VideoCaptureDS::StartCapture(const VideoCaptureCapability& capability) {
- rtc::CritScope cs(&_apiCs);
+ MutexLock lock(&api_lock_);
if (capability != _requestedCapability) {
DisconnectGraph();
@@ -148,7 +148,7 @@ int32_t VideoCaptureDS::StartCapture(const VideoCaptureCapability& capability) {
}
int32_t VideoCaptureDS::StopCapture() {
- rtc::CritScope cs(&_apiCs);
+ MutexLock lock(&api_lock_);
HRESULT hr = _mediaControl->Pause();
if (FAILED(hr)) {
diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn
index b1438392ae..5ef040da32 100644
--- a/modules/video_coding/BUILD.gn
+++ b/modules/video_coding/BUILD.gn
@@ -6,6 +6,7 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
+import("//third_party/libaom/options.gni")
import("../../webrtc.gni")
rtc_library("encoded_frame") {
@@ -30,11 +31,29 @@ rtc_library("encoded_frame") {
"../../rtc_base/experiments:rtt_mult_experiment",
"../../rtc_base/system:rtc_export",
"../../system_wrappers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/abseil-cpp/absl/types:variant",
]
}
+rtc_library("chain_diff_calculator") {
+ sources = [
+ "chain_diff_calculator.cc",
+ "chain_diff_calculator.h",
+ ]
+
+ deps = [
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
rtc_library("frame_dependencies_calculator") {
sources = [
"frame_dependencies_calculator.cc",
@@ -47,6 +66,8 @@ rtc_library("frame_dependencies_calculator") {
"../../common_video/generic_frame_descriptor",
"../../rtc_base:checks",
"../../rtc_base:logging",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/types:optional",
@@ -54,12 +75,11 @@ rtc_library("frame_dependencies_calculator") {
}
rtc_library("nack_module") {
- visibility = [ "*" ]
sources = [
"histogram.cc",
"histogram.h",
- "nack_module.cc",
- "nack_module.h",
+ "nack_module2.cc",
+ "nack_module2.h",
]
deps = [
@@ -69,7 +89,11 @@ rtc_library("nack_module") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_numerics",
+ "../../rtc_base:rtc_task_queue",
"../../rtc_base/experiments:field_trial_parser",
+ "../../rtc_base/synchronization:sequence_checker",
+ "../../rtc_base/task_utils:pending_task_safety_flag",
+ "../../rtc_base/task_utils:repeating_task",
"../../system_wrappers",
"../../system_wrappers:field_trial",
"../utility",
@@ -91,8 +115,13 @@ rtc_library("video_coding") {
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
"../rtp_rtcp:rtp_video_header",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
+ "//third_party/abseil-cpp/absl/types:variant",
]
sources = [
@@ -174,6 +203,7 @@ rtc_library("video_coding") {
"../../rtc_base/experiments:min_video_bitrate_experiment",
"../../rtc_base/experiments:rate_control_settings",
"../../rtc_base/experiments:rtt_mult_experiment",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/task_utils:repeating_task",
"../../rtc_base/third_party/base64",
@@ -181,9 +211,6 @@ rtc_library("video_coding") {
"../../system_wrappers",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/container:inlined_vector",
- "//third_party/abseil-cpp/absl/types:optional",
- "//third_party/abseil-cpp/absl/types:variant",
]
}
@@ -205,8 +232,8 @@ rtc_library("video_codec_interface") {
"../../common_video",
"../../common_video/generic_frame_descriptor",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_coding_legacy") {
@@ -253,11 +280,14 @@ rtc_library("video_coding_legacy") {
"../../rtc_base:logging",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_event",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../system_wrappers",
"../rtp_rtcp:rtp_rtcp_format",
"../rtp_rtcp:rtp_video_header",
"../utility",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/abseil-cpp/absl/types:variant",
@@ -310,6 +340,7 @@ rtc_library("video_coding_utility") {
"../../api/video:video_adaptation",
"../../api/video:video_bitrate_allocation",
"../../api/video:video_bitrate_allocator",
+ "../../api/video:video_frame",
"../../api/video_codecs:video_codecs_api",
"../../common_video",
"../../modules/rtp_rtcp",
@@ -329,8 +360,8 @@ rtc_library("video_coding_utility") {
"../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers:field_trial",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("webrtc_h264") {
@@ -363,9 +394,11 @@ rtc_library("webrtc_h264") {
"../../rtc_base/system:rtc_export",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
+ "//third_party/libyuv",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
- "//third_party/libyuv",
]
if (rtc_use_h264) {
@@ -406,6 +439,7 @@ rtc_library("webrtc_multiplex") {
"../../media:rtc_media_base",
"../../rtc_base",
"../../rtc_base:checks",
+ "../../rtc_base/synchronization:mutex",
"../rtp_rtcp:rtp_rtcp_format",
]
}
@@ -449,9 +483,9 @@ rtc_library("webrtc_vp8") {
"../../rtc_base/experiments:rate_control_settings",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (rtc_build_libvpx) {
deps += [ rtc_libvpx_dir ]
}
@@ -482,8 +516,8 @@ rtc_library("webrtc_vp8_temporal_layers") {
"../../rtc_base:rtc_numerics",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
# This target includes VP9 files that may be used for any VP9 codec, internal SW or external HW.
@@ -505,9 +539,10 @@ rtc_library("webrtc_vp9_helpers") {
"../../api/video_codecs:video_codecs_api",
"../../common_video",
"../../rtc_base:checks",
+ "../../rtc_base:logging",
"../../rtc_base/experiments:stable_target_rate_experiment",
- "//third_party/abseil-cpp/absl/container:inlined_vector",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ]
}
rtc_library("webrtc_vp9") {
@@ -540,10 +575,11 @@ rtc_library("webrtc_vp9") {
"../../rtc_base",
"../../rtc_base:checks",
"../../rtc_base/experiments:rate_control_settings",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers:field_trial",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
if (rtc_build_libvpx) {
deps += [ rtc_libvpx_dir ]
}
@@ -594,6 +630,25 @@ if (rtc_include_tests) {
}
}
+ rtc_library("encoded_video_frame_producer") {
+ testonly = true
+ sources = [
+ "codecs/test/encoded_video_frame_producer.cc",
+ "codecs/test/encoded_video_frame_producer.h",
+ ]
+ deps = [
+ ":video_codec_interface",
+ "../../api:create_frame_generator",
+ "../../api:frame_generator_api",
+ "../../api/transport/rtp:dependency_descriptor",
+ "../../api/video:encoded_image",
+ "../../api/video:video_frame",
+ "../../api/video:video_frame_type",
+ "../../api/video_codecs:video_codecs_api",
+ "../../rtc_base:checks",
+ ]
+ }
+
rtc_library("simulcast_test_fixture_impl") {
testonly = true
sources = [
@@ -656,15 +711,16 @@ if (rtc_include_tests) {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_task_queue",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/task_utils:to_queued_task",
"../../test:test_support",
"../../test:video_test_common",
"../../test:video_test_support",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
video_coding_modules_tests_resources = []
@@ -735,8 +791,8 @@ if (rtc_include_tests) {
"../../test:test_support",
"../../test:video_test_common",
"../../test:video_test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("videocodec_test_stats_impl") {
@@ -768,11 +824,17 @@ if (rtc_include_tests) {
"codecs/vp8/test/vp8_impl_unittest.cc",
"codecs/vp9/test/vp9_impl_unittest.cc",
]
+
+ # TODO(jianj): Fix crash on iOS and re-enable
+ if (enable_libaom && !is_ios) {
+ sources += [ "codecs/test/videocodec_test_libaom.cc" ]
+ }
if (rtc_use_h264) {
sources += [ "codecs/test/videocodec_test_openh264.cc" ]
}
deps = [
+ ":encoded_video_frame_producer",
":video_codec_interface",
":video_codecs_test_framework",
":video_coding_utility",
@@ -811,9 +873,9 @@ if (rtc_include_tests) {
"../../test:test_support",
"../../test:video_test_common",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
data = video_coding_modules_tests_resources
@@ -842,6 +904,7 @@ if (rtc_include_tests) {
testonly = true
sources = [
+ "chain_diff_calculator_unittest.cc",
"codecs/test/videocodec_test_fixture_config_unittest.cc",
"codecs/test/videocodec_test_stats_impl_unittest.cc",
"codecs/test/videoprocessor_unittest.cc",
@@ -861,6 +924,7 @@ if (rtc_include_tests) {
"jitter_buffer_unittest.cc",
"jitter_estimator_tests.cc",
"loss_notification_controller_unittest.cc",
+ "nack_module2_unittest.cc",
"nack_module_unittest.cc",
"packet_buffer_unittest.cc",
"receiver_unittest.cc",
@@ -888,6 +952,7 @@ if (rtc_include_tests) {
}
deps = [
+ ":chain_diff_calculator",
":codec_globals_headers",
":encoded_frame",
":frame_dependencies_calculator",
@@ -941,6 +1006,7 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_task_queue",
"../../rtc_base:task_queue_for_test",
"../../rtc_base/experiments:jitter_upper_bound_experiment",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
@@ -954,7 +1020,11 @@ if (rtc_include_tests) {
"../../test/time_controller:time_controller",
"../rtp_rtcp:rtp_rtcp_format",
"../rtp_rtcp:rtp_video_header",
+ "codecs/av1:scalability_structure_tests",
"codecs/av1:video_coding_codecs_av1_tests",
+ "deprecated:nack_module",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/abseil-cpp/absl/types:variant",
diff --git a/modules/video_coding/chain_diff_calculator.cc b/modules/video_coding/chain_diff_calculator.cc
new file mode 100644
index 0000000000..5f852717b5
--- /dev/null
+++ b/modules/video_coding/chain_diff_calculator.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/chain_diff_calculator.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <algorithm>
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+void ChainDiffCalculator::Reset(const std::vector<bool>& chains) {
+ last_frame_in_chain_.resize(chains.size());
+ for (size_t i = 0; i < chains.size(); ++i) {
+ if (chains[i]) {
+ last_frame_in_chain_[i] = absl::nullopt;
+ }
+ }
+}
+
+absl::InlinedVector<int, 4> ChainDiffCalculator::ChainDiffs(
+ int64_t frame_id) const {
+ absl::InlinedVector<int, 4> result;
+ result.reserve(last_frame_in_chain_.size());
+ for (const auto& frame_id_in_chain : last_frame_in_chain_) {
+ result.push_back(frame_id_in_chain ? (frame_id - *frame_id_in_chain) : 0);
+ }
+ return result;
+}
+
+absl::InlinedVector<int, 4> ChainDiffCalculator::From(
+ int64_t frame_id,
+ const std::vector<bool>& chains) {
+ auto result = ChainDiffs(frame_id);
+ if (chains.size() != last_frame_in_chain_.size()) {
+ RTC_LOG(LS_ERROR) << "Insconsistent chain configuration for frame#"
+ << frame_id << ": expected "
+ << last_frame_in_chain_.size() << " chains, found "
+ << chains.size();
+ }
+ size_t num_chains = std::min(last_frame_in_chain_.size(), chains.size());
+ for (size_t i = 0; i < num_chains; ++i) {
+ if (chains[i]) {
+ last_frame_in_chain_[i] = frame_id;
+ }
+ }
+ return result;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/chain_diff_calculator.h b/modules/video_coding/chain_diff_calculator.h
new file mode 100644
index 0000000000..bca7340c6f
--- /dev/null
+++ b/modules/video_coding/chain_diff_calculator.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CODING_CHAIN_DIFF_CALCULATOR_H_
+#define MODULES_VIDEO_CODING_CHAIN_DIFF_CALCULATOR_H_
+
+#include <stdint.h>
+
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+// This class is thread compatible.
+class ChainDiffCalculator {
+ public:
+ ChainDiffCalculator() = default;
+ ChainDiffCalculator(const ChainDiffCalculator&) = default;
+ ChainDiffCalculator& operator=(const ChainDiffCalculator&) = default;
+
+ // Restarts chains, i.e. for position where chains[i] == true next chain_diff
+ // will be 0. Saves chains.size() as number of chains in the stream.
+ void Reset(const std::vector<bool>& chains);
+
+ // Returns chain diffs based on flags if frame is part of the chain.
+ absl::InlinedVector<int, 4> From(int64_t frame_id,
+ const std::vector<bool>& chains);
+
+ private:
+ absl::InlinedVector<int, 4> ChainDiffs(int64_t frame_id) const;
+
+ absl::InlinedVector<absl::optional<int64_t>, 4> last_frame_in_chain_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CHAIN_DIFF_CALCULATOR_H_
diff --git a/modules/video_coding/chain_diff_calculator_unittest.cc b/modules/video_coding/chain_diff_calculator_unittest.cc
new file mode 100644
index 0000000000..efd09bd888
--- /dev/null
+++ b/modules/video_coding/chain_diff_calculator_unittest.cc
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/chain_diff_calculator.h"
+
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::SizeIs;
+
+TEST(ChainDiffCalculatorTest, SingleChain) {
+ // Simulate a stream with 2 temporal layer where chain
+ // protects temporal layer 0.
+ ChainDiffCalculator calculator;
+ // Key frame.
+ calculator.Reset({true});
+ EXPECT_THAT(calculator.From(1, {true}), ElementsAre(0));
+ // T1 delta frame.
+ EXPECT_THAT(calculator.From(2, {false}), ElementsAre(1));
+ // T0 delta frame.
+ EXPECT_THAT(calculator.From(3, {true}), ElementsAre(2));
+}
+
+TEST(ChainDiffCalculatorTest, TwoChainsFullSvc) {
+ // Simulate a full svc stream with 2 spatial and 2 temporal layers.
+ // chains are protecting temporal layers 0.
+ ChainDiffCalculator calculator;
+ // S0 Key frame.
+ calculator.Reset({true, true});
+ EXPECT_THAT(calculator.From(1, {true, true}), ElementsAre(0, 0));
+ // S1 Key frame.
+ EXPECT_THAT(calculator.From(2, {false, true}), ElementsAre(1, 1));
+ // S0T1 delta frame.
+ EXPECT_THAT(calculator.From(3, {false, false}), ElementsAre(2, 1));
+ // S1T1 delta frame.
+ EXPECT_THAT(calculator.From(4, {false, false}), ElementsAre(3, 2));
+ // S0T0 delta frame.
+ EXPECT_THAT(calculator.From(5, {true, true}), ElementsAre(4, 3));
+ // S1T0 delta frame.
+ EXPECT_THAT(calculator.From(6, {false, true}), ElementsAre(1, 1));
+}
+
+TEST(ChainDiffCalculatorTest, TwoChainsKSvc) {
+ // Simulate a k-svc stream with 2 spatial and 2 temporal layers.
+ // chains are protecting temporal layers 0.
+ ChainDiffCalculator calculator;
+ // S0 Key frame.
+ calculator.Reset({true, true});
+ EXPECT_THAT(calculator.From(1, {true, true}), ElementsAre(0, 0));
+ // S1 Key frame.
+ EXPECT_THAT(calculator.From(2, {false, true}), ElementsAre(1, 1));
+ // S0T1 delta frame.
+ EXPECT_THAT(calculator.From(3, {false, false}), ElementsAre(2, 1));
+ // S1T1 delta frame.
+ EXPECT_THAT(calculator.From(4, {false, false}), ElementsAre(3, 2));
+ // S0T0 delta frame.
+ EXPECT_THAT(calculator.From(5, {true, false}), ElementsAre(4, 3));
+ // S1T0 delta frame.
+ EXPECT_THAT(calculator.From(6, {false, true}), ElementsAre(1, 4));
+}
+
+TEST(ChainDiffCalculatorTest, TwoChainsSimulcast) {
+ // Simulate a k-svc stream with 2 spatial and 2 temporal layers.
+ // chains are protecting temporal layers 0.
+ ChainDiffCalculator calculator;
+ // S0 Key frame.
+ calculator.Reset({true, false});
+ EXPECT_THAT(calculator.From(1, {true, false}), ElementsAre(0, 0));
+ // S1 Key frame.
+ calculator.Reset({false, true});
+ EXPECT_THAT(calculator.From(2, {false, true}), ElementsAre(1, 0));
+ // S0T1 delta frame.
+ EXPECT_THAT(calculator.From(3, {false, false}), ElementsAre(2, 1));
+ // S1T1 delta frame.
+ EXPECT_THAT(calculator.From(4, {false, false}), ElementsAre(3, 2));
+ // S0T0 delta frame.
+ EXPECT_THAT(calculator.From(5, {true, false}), ElementsAre(4, 3));
+ // S1T0 delta frame.
+ EXPECT_THAT(calculator.From(6, {false, true}), ElementsAre(1, 4));
+}
+
+TEST(ChainDiffCalculatorTest, ResilentToAbsentChainConfig) {
+ ChainDiffCalculator calculator;
+ // Key frame.
+ calculator.Reset({true, false});
+ EXPECT_THAT(calculator.From(1, {true, false}), ElementsAre(0, 0));
+ // Forgot to set chains. should still return 2 chain_diffs.
+ EXPECT_THAT(calculator.From(2, {}), ElementsAre(1, 0));
+ // chain diffs for next frame(s) are undefined, but still there should be
+ // correct number of them.
+ EXPECT_THAT(calculator.From(3, {true, false}), SizeIs(2));
+ EXPECT_THAT(calculator.From(4, {false, true}), SizeIs(2));
+ // Since previous two frames updated all the chains, can expect what
+ // chain_diffs would be.
+ EXPECT_THAT(calculator.From(5, {false, false}), ElementsAre(2, 1));
+}
+
+TEST(ChainDiffCalculatorTest, ResilentToTooMainChains) {
+ ChainDiffCalculator calculator;
+ // Key frame.
+ calculator.Reset({true, false});
+ EXPECT_THAT(calculator.From(1, {true, false}), ElementsAre(0, 0));
+ // Set wrong number of chains. Expect number of chain_diffs is not changed.
+ EXPECT_THAT(calculator.From(2, {true, true, true}), ElementsAre(1, 0));
+ // chain diffs for next frame(s) are undefined, but still there should be
+ // correct number of them.
+ EXPECT_THAT(calculator.From(3, {true, false}), SizeIs(2));
+ EXPECT_THAT(calculator.From(4, {false, true}), SizeIs(2));
+ // Since previous two frames updated all the chains, can expect what
+ // chain_diffs would be.
+ EXPECT_THAT(calculator.From(5, {false, false}), ElementsAre(2, 1));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/BUILD.gn b/modules/video_coding/codecs/av1/BUILD.gn
index b2b82d4947..b6d55671f0 100644
--- a/modules/video_coding/codecs/av1/BUILD.gn
+++ b/modules/video_coding/codecs/av1/BUILD.gn
@@ -13,10 +13,8 @@ rtc_library("libaom_av1_decoder") {
visibility = [ "*" ]
poisonous = [ "software_video_codecs" ]
public = [ "libaom_av1_decoder.h" ]
- deps = [
- "../../../../api/video_codecs:video_codecs_api",
- "//third_party/abseil-cpp/absl/base:core_headers",
- ]
+ deps = [ "../../../../api/video_codecs:video_codecs_api" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
if (enable_libaom) {
sources = [ "libaom_av1_decoder.cc" ]
@@ -27,22 +25,86 @@ rtc_library("libaom_av1_decoder") {
"../../../../api/video:video_frame_i420",
"../../../../common_video",
"../../../../rtc_base:logging",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libaom",
"//third_party/libyuv",
]
+ absl_deps += [ "//third_party/abseil-cpp/absl/types:optional" ]
} else {
sources = [ "libaom_av1_decoder_absent.cc" ]
}
}
+rtc_source_set("scalable_video_controller") {
+ sources = [
+ "scalable_video_controller.h",
+ "scalable_video_controller_no_layering.cc",
+ "scalable_video_controller_no_layering.h",
+ ]
+ deps = [
+ "../../../../api/transport/rtp:dependency_descriptor",
+ "../../../../api/video:video_bitrate_allocation",
+ "../../../../common_video/generic_frame_descriptor",
+ "../../../../rtc_base:checks",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("scalability_structures") {
+ sources = [
+ "create_scalability_structure.cc",
+ "create_scalability_structure.h",
+ "scalability_structure_l1t2.cc",
+ "scalability_structure_l1t2.h",
+ "scalability_structure_l1t3.cc",
+ "scalability_structure_l1t3.h",
+ "scalability_structure_l2t1.cc",
+ "scalability_structure_l2t1.h",
+ "scalability_structure_l2t1_key.cc",
+ "scalability_structure_l2t1_key.h",
+ "scalability_structure_l2t1h.cc",
+ "scalability_structure_l2t1h.h",
+ "scalability_structure_l2t2.cc",
+ "scalability_structure_l2t2.h",
+ "scalability_structure_l2t2_key.cc",
+ "scalability_structure_l2t2_key.h",
+ "scalability_structure_l2t2_key_shift.cc",
+ "scalability_structure_l2t2_key_shift.h",
+ "scalability_structure_l3t1.cc",
+ "scalability_structure_l3t1.h",
+ "scalability_structure_l3t3.cc",
+ "scalability_structure_l3t3.h",
+ "scalability_structure_s2t1.cc",
+ "scalability_structure_s2t1.h",
+ ]
+ deps = [
+ ":scalable_video_controller",
+ "../../../../api/transport/rtp:dependency_descriptor",
+ "../../../../common_video/generic_frame_descriptor",
+ "../../../../rtc_base:checks",
+ "../../../../rtc_base:logging",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
rtc_library("libaom_av1_encoder") {
visibility = [ "*" ]
poisonous = [ "software_video_codecs" ]
public = [ "libaom_av1_encoder.h" ]
deps = [
+ ":scalable_video_controller",
"../../../../api/video_codecs:video_codecs_api",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/types:optional",
]
if (enable_libaom) {
@@ -56,7 +118,6 @@ rtc_library("libaom_av1_encoder") {
"../../../../common_video",
"../../../../rtc_base:checks",
"../../../../rtc_base:logging",
- "//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/libaom",
]
} else {
@@ -65,6 +126,21 @@ rtc_library("libaom_av1_encoder") {
}
if (rtc_include_tests) {
+ rtc_library("scalability_structure_tests") {
+ testonly = true
+ sources = [ "scalability_structure_unittest.cc" ]
+ deps = [
+ ":scalability_structures",
+ ":scalable_video_controller",
+ "../..:chain_diff_calculator",
+ "../..:frame_dependencies_calculator",
+ "../../../../api/transport/rtp:dependency_descriptor",
+ "../../../../api/video:video_frame_type",
+ "../../../../test:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
rtc_library("video_coding_codecs_av1_tests") {
testonly = true
@@ -76,13 +152,18 @@ if (rtc_include_tests) {
deps = [
":libaom_av1_decoder",
":libaom_av1_encoder",
+ ":scalability_structures",
+ ":scalable_video_controller",
+ "../..:encoded_video_frame_producer",
"../..:video_codec_interface",
- "../../../../api:create_frame_generator",
- "../../../../api:frame_generator_api",
+ "../../../../api:mock_video_encoder",
+ "../../../../api/units:data_size",
+ "../../../../api/units:time_delta",
+ "../../../../api/video:video_frame_i420",
"../../../../api/video_codecs:video_codecs_api",
"../../../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
}
diff --git a/modules/video_coding/codecs/av1/create_scalability_structure.cc b/modules/video_coding/codecs/av1/create_scalability_structure.cc
new file mode 100644
index 0000000000..17375547c5
--- /dev/null
+++ b/modules/video_coding/codecs/av1/create_scalability_structure.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/create_scalability_structure.h"
+
+#include <memory>
+
+#include "absl/strings/string_view.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t2.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t3.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1h.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t1.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t3.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_s2t1.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+
+struct NamedStructureFactory {
+ absl::string_view name;
+ // Use function pointer to make NamedStructureFactory trivally destructable.
+ std::unique_ptr<ScalableVideoController> (*factory)();
+};
+
+// Wrap std::make_unique function to have correct return type.
+template <typename T>
+std::unique_ptr<ScalableVideoController> Create() {
+ return std::make_unique<T>();
+}
+
+constexpr NamedStructureFactory kFactories[] = {
+ {"NONE", Create<ScalableVideoControllerNoLayering>},
+ {"L1T2", Create<ScalabilityStructureL1T2>},
+ {"L1T3", Create<ScalabilityStructureL1T3>},
+ {"L2T1", Create<ScalabilityStructureL2T1>},
+ {"L2T1h", Create<ScalabilityStructureL2T1h>},
+ {"L2T1_KEY", Create<ScalabilityStructureL2T1Key>},
+ {"L2T2", Create<ScalabilityStructureL2T2>},
+ {"L2T2_KEY", Create<ScalabilityStructureL2T2Key>},
+ {"L2T2_KEY_SHIFT", Create<ScalabilityStructureL2T2KeyShift>},
+ {"L3T1", Create<ScalabilityStructureL3T1>},
+ {"L3T3", Create<ScalabilityStructureL3T3>},
+ {"S2T1", Create<ScalabilityStructureS2T1>},
+};
+
+} // namespace
+
+std::unique_ptr<ScalableVideoController> CreateScalabilityStructure(
+ absl::string_view name) {
+ RTC_DCHECK(!name.empty());
+ for (const auto& entry : kFactories) {
+ if (entry.name == name) {
+ return entry.factory();
+ }
+ }
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/create_scalability_structure.h b/modules/video_coding/codecs/av1/create_scalability_structure.h
new file mode 100644
index 0000000000..fe4a283ae4
--- /dev/null
+++ b/modules/video_coding/codecs/av1/create_scalability_structure.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_CREATE_SCALABILITY_STRUCTURE_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_CREATE_SCALABILITY_STRUCTURE_H_
+
+#include <memory>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// Creates a structure by name according to
+// https://w3c.github.io/webrtc-svc/#scalabilitymodes*
+// Returns nullptr for unknown name.
+std::unique_ptr<ScalableVideoController> CreateScalabilityStructure(
+ absl::string_view name);
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_CREATE_SCALABILITY_STRUCTURE_H_
diff --git a/modules/video_coding/codecs/av1/libaom_av1_decoder.cc b/modules/video_coding/codecs/av1/libaom_av1_decoder.cc
index 122f214a5c..1a8a0c4775 100644
--- a/modules/video_coding/codecs/av1/libaom_av1_decoder.cc
+++ b/modules/video_coding/codecs/av1/libaom_av1_decoder.cc
@@ -53,6 +53,8 @@ class LibaomAv1Decoder final : public VideoDecoder {
int32_t Release() override;
+ const char* ImplementationName() const override;
+
private:
aom_codec_ctx_t context_;
bool inited_;
@@ -127,7 +129,7 @@ int32_t LibaomAv1Decoder::Decode(const EncodedImage& encoded_image,
// Return decoded frame data.
int qp;
- ret = aom_codec_control_(&context_, AOMD_GET_LAST_QUANTIZER, &qp);
+ ret = aom_codec_control(&context_, AOMD_GET_LAST_QUANTIZER, &qp);
if (ret != AOM_CODEC_OK) {
RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode returned " << ret
<< " on control AOME_GET_LAST_QUANTIZER.";
@@ -180,6 +182,10 @@ int32_t LibaomAv1Decoder::Release() {
return WEBRTC_VIDEO_CODEC_OK;
}
+const char* LibaomAv1Decoder::ImplementationName() const {
+ return "libaom";
+}
+
} // namespace
const bool kIsLibaomAv1DecoderSupported = true;
diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
index 59ad127435..cd9b37442a 100644
--- a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
+++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
@@ -13,15 +13,20 @@
#include <stdint.h>
#include <memory>
+#include <utility>
#include <vector>
#include "absl/algorithm/container.h"
+#include "absl/base/macros.h"
+#include "absl/types/optional.h"
#include "api/scoped_refptr.h"
#include "api/video/encoded_image.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "rtc_base/checks.h"
@@ -34,9 +39,7 @@ namespace webrtc {
namespace {
// Encoder configuration parameters
-constexpr int kQpMax = 56;
constexpr int kQpMin = 10;
-constexpr int kDefaultEncSpeed = 7; // Use values 6, 7, or 8 for RTC.
constexpr int kUsageProfile = 1; // 0 = good quality; 1 = real-time.
constexpr int kMinQindex = 58; // Min qindex threshold for QP scaling.
constexpr int kMaxQindex = 180; // Max qindex threshold for QP scaling.
@@ -45,9 +48,23 @@ constexpr int kLagInFrames = 0; // No look ahead.
constexpr int kRtpTicksPerSecond = 90000;
constexpr float kMinimumFrameRate = 1.0;
+// Only positive speeds, range for real-time coding currently is: 6 - 8.
+// Lower means slower/better quality, higher means fastest/lower quality.
+int GetCpuSpeed(int width, int height, int number_of_cores) {
+ // For smaller resolutions, use lower speed setting (get some coding gain at
+ // the cost of increased encoding complexity).
+ if (number_of_cores > 2 && width * height <= 320 * 180)
+ return 6;
+ else if (width * height >= 1280 * 720)
+ return 8;
+ else
+ return 7;
+}
+
class LibaomAv1Encoder final : public VideoEncoder {
public:
- LibaomAv1Encoder();
+ explicit LibaomAv1Encoder(
+ std::unique_ptr<ScalableVideoController> svc_controller);
~LibaomAv1Encoder();
int InitEncode(const VideoCodec* codec_settings,
@@ -66,8 +83,19 @@ class LibaomAv1Encoder final : public VideoEncoder {
EncoderInfo GetEncoderInfo() const override;
private:
+ bool SvcEnabled() const { return svc_params_.has_value(); }
+ // Fills svc_params_ memeber value. Returns false on error.
+ bool SetSvcParams(ScalableVideoController::StreamLayersConfig svc_config);
+ // Configures the encoder with layer for the next frame.
+ void SetSvcLayerId(
+ const ScalableVideoController::LayerFrameConfig& layer_frame);
+ // Configures the encoder which buffers next frame updates and can reference.
+ void SetSvcRefFrameConfig(
+ const ScalableVideoController::LayerFrameConfig& layer_frame);
+
+ const std::unique_ptr<ScalableVideoController> svc_controller_;
bool inited_;
- bool keyframe_required_;
+ absl::optional<aom_svc_params_t> svc_params_;
VideoCodec encoder_settings_;
aom_image_t* frame_for_encode_;
aom_codec_ctx_t ctx_;
@@ -100,11 +128,14 @@ int32_t VerifyCodecSettings(const VideoCodec& codec_settings) {
return WEBRTC_VIDEO_CODEC_OK;
}
-LibaomAv1Encoder::LibaomAv1Encoder()
- : inited_(false),
- keyframe_required_(true),
+LibaomAv1Encoder::LibaomAv1Encoder(
+ std::unique_ptr<ScalableVideoController> svc_controller)
+ : svc_controller_(std::move(svc_controller)),
+ inited_(false),
frame_for_encode_(nullptr),
- encoded_image_callback_(nullptr) {}
+ encoded_image_callback_(nullptr) {
+ RTC_DCHECK(svc_controller_);
+}
LibaomAv1Encoder::~LibaomAv1Encoder() {
Release();
@@ -134,6 +165,10 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings,
return result;
}
+ if (!SetSvcParams(svc_controller_->StreamConfig())) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
// Initialize encoder configuration structure with default values
aom_codec_err_t ret =
aom_codec_enc_config_default(aom_codec_av1_cx(), &cfg_, 0);
@@ -153,9 +188,9 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings,
cfg_.g_input_bit_depth = kBitDepth;
cfg_.kf_mode = AOM_KF_DISABLED;
cfg_.rc_min_quantizer = kQpMin;
- cfg_.rc_max_quantizer = kQpMax;
+ cfg_.rc_max_quantizer = encoder_settings_.qpMax;
cfg_.g_usage = kUsageProfile;
-
+ cfg_.g_error_resilient = 0;
// Low-latency settings.
cfg_.rc_end_usage = AOM_CBR; // Constant Bit Rate (CBR) mode
cfg_.g_pass = AOM_RC_ONE_PASS; // One-pass rate control
@@ -180,7 +215,9 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings,
inited_ = true;
// Set control parameters
- ret = aom_codec_control(&ctx_, AOME_SET_CPUUSED, kDefaultEncSpeed);
+ ret = aom_codec_control(
+ &ctx_, AOME_SET_CPUUSED,
+ GetCpuSpeed(cfg_.g_w, cfg_.g_h, settings.number_of_cores));
if (ret != AOM_CODEC_OK) {
RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
<< " on control AV1E_SET_CPUUSED.";
@@ -198,16 +235,147 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings,
<< " on control AV1E_SET_DELTAQ_MODE.";
return WEBRTC_VIDEO_CODEC_ERROR;
}
+ ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_ORDER_HINT, 0);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_ENABLE_ORDER_HINT.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
ret = aom_codec_control(&ctx_, AV1E_SET_AQ_MODE, 3);
if (ret != AOM_CODEC_OK) {
RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
<< " on control AV1E_SET_AQ_MODE.";
return WEBRTC_VIDEO_CODEC_ERROR;
}
+ if (SvcEnabled()) {
+ ret = aom_codec_control(&ctx_, AV1E_SET_SVC_PARAMS, &*svc_params_);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAV1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_SVC_PARAMS.";
+ return false;
+ }
+ }
+
+ ret = aom_codec_control(&ctx_, AOME_SET_MAX_INTRA_BITRATE_PCT, 300);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_MAX_INTRA_BITRATE_PCT.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ret = aom_codec_control(&ctx_, AV1E_SET_COEFF_COST_UPD_FREQ, 2);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_COEFF_COST_UPD_FREQ.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ret = aom_codec_control(&ctx_, AV1E_SET_MODE_COST_UPD_FREQ, 2);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_MODE_COST_UPD_FREQ.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ret = aom_codec_control(&ctx_, AV1E_SET_MV_COST_UPD_FREQ, 3);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_MV_COST_UPD_FREQ.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
return WEBRTC_VIDEO_CODEC_OK;
}
+bool LibaomAv1Encoder::SetSvcParams(
+ ScalableVideoController::StreamLayersConfig svc_config) {
+ bool svc_enabled =
+ svc_config.num_spatial_layers > 1 || svc_config.num_temporal_layers > 1;
+ if (!svc_enabled) {
+ svc_params_ = absl::nullopt;
+ return true;
+ }
+ if (svc_config.num_spatial_layers < 1 || svc_config.num_spatial_layers > 4) {
+ RTC_LOG(LS_WARNING) << "Av1 supports up to 4 spatial layers. "
+ << svc_config.num_spatial_layers << " configured.";
+ return false;
+ }
+ if (svc_config.num_temporal_layers < 1 ||
+ svc_config.num_temporal_layers > 8) {
+ RTC_LOG(LS_WARNING) << "Av1 supports up to 8 temporal layers. "
+ << svc_config.num_temporal_layers << " configured.";
+ return false;
+ }
+ aom_svc_params_t& svc_params = svc_params_.emplace();
+ svc_params.number_spatial_layers = svc_config.num_spatial_layers;
+ svc_params.number_temporal_layers = svc_config.num_temporal_layers;
+
+ int num_layers =
+ svc_config.num_spatial_layers * svc_config.num_temporal_layers;
+ for (int i = 0; i < num_layers; ++i) {
+ svc_params.min_quantizers[i] = kQpMin;
+ svc_params.max_quantizers[i] = encoder_settings_.qpMax;
+ }
+
+ // Assume each temporal layer doubles framerate.
+ for (int tid = 0; tid < svc_config.num_temporal_layers; ++tid) {
+ svc_params.framerate_factor[tid] =
+ 1 << (svc_config.num_temporal_layers - tid - 1);
+ }
+
+ for (int sid = 0; sid < svc_config.num_spatial_layers; ++sid) {
+ svc_params.scaling_factor_num[sid] = svc_config.scaling_factor_num[sid];
+ svc_params.scaling_factor_den[sid] = svc_config.scaling_factor_den[sid];
+ }
+
+ return true;
+}
+
+void LibaomAv1Encoder::SetSvcLayerId(
+ const ScalableVideoController::LayerFrameConfig& layer_frame) {
+ aom_svc_layer_id_t layer_id = {};
+ layer_id.spatial_layer_id = layer_frame.SpatialId();
+ layer_id.temporal_layer_id = layer_frame.TemporalId();
+ aom_codec_err_t ret =
+ aom_codec_control(&ctx_, AV1E_SET_SVC_LAYER_ID, &layer_id);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
+ << " on control AV1E_SET_SVC_LAYER_ID.";
+ }
+}
+
+void LibaomAv1Encoder::SetSvcRefFrameConfig(
+ const ScalableVideoController::LayerFrameConfig& layer_frame) {
+ // Buffer name to use for each layer_frame.buffers position. In particular
+ // when there are 2 buffers are referenced, prefer name them last and golden,
+ // because av1 bitstream format has dedicated fields for these two names.
+ // See last_frame_idx and golden_frame_idx in the av1 spec
+ // https://aomediacodec.github.io/av1-spec/av1-spec.pdf
+ static constexpr int kPreferedSlotName[] = {0, // Last
+ 3, // Golden
+ 1, 2, 4, 5, 6};
+ static constexpr int kAv1NumBuffers = 8;
+
+ aom_svc_ref_frame_config_t ref_frame_config = {};
+ RTC_CHECK_LE(layer_frame.Buffers().size(), ABSL_ARRAYSIZE(kPreferedSlotName));
+ for (size_t i = 0; i < layer_frame.Buffers().size(); ++i) {
+ const CodecBufferUsage& buffer = layer_frame.Buffers()[i];
+ int slot_name = kPreferedSlotName[i];
+ RTC_CHECK_GE(buffer.id, 0);
+ RTC_CHECK_LT(buffer.id, kAv1NumBuffers);
+ ref_frame_config.ref_idx[slot_name] = buffer.id;
+ if (buffer.referenced) {
+ ref_frame_config.reference[slot_name] = 1;
+ }
+ if (buffer.updated) {
+ ref_frame_config.refresh[buffer.id] = 1;
+ }
+ }
+ aom_codec_err_t ret = aom_codec_control(&ctx_, AV1E_SET_SVC_REF_FRAME_CONFIG,
+ &ref_frame_config);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
+ << " on control AV1_SET_SVC_REF_FRAME_CONFIG.";
+ }
+}
+
int32_t LibaomAv1Encoder::RegisterEncodeCompleteCallback(
EncodedImageCallback* encoded_image_callback) {
encoded_image_callback_ = encoded_image_callback;
@@ -235,10 +403,18 @@ int32_t LibaomAv1Encoder::Encode(
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
- keyframe_required_ =
+ bool keyframe_required =
frame_types != nullptr &&
absl::c_linear_search(*frame_types, VideoFrameType::kVideoFrameKey);
+ std::vector<ScalableVideoController::LayerFrameConfig> layer_frames =
+ svc_controller_->NextFrameConfig(keyframe_required);
+
+ if (layer_frames.empty()) {
+ RTC_LOG(LS_ERROR) << "SVCController returned no configuration for a frame.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
// Convert input frame to I420, if needed.
VideoFrame prepped_input_frame = frame;
if (prepped_input_frame.video_frame_buffer()->type() !=
@@ -263,75 +439,96 @@ int32_t LibaomAv1Encoder::Encode(
const uint32_t duration =
kRtpTicksPerSecond / static_cast<float>(encoder_settings_.maxFramerate);
- aom_enc_frame_flags_t flags = (keyframe_required_) ? AOM_EFLAG_FORCE_KF : 0;
- // Encode a frame.
- aom_codec_err_t ret = aom_codec_encode(&ctx_, frame_for_encode_,
- frame.timestamp(), duration, flags);
- if (ret != AOM_CODEC_OK) {
- RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
- << " on aom_codec_encode.";
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
+ for (ScalableVideoController::LayerFrameConfig& layer_frame : layer_frames) {
+ aom_enc_frame_flags_t flags =
+ layer_frame.IsKeyframe() ? AOM_EFLAG_FORCE_KF : 0;
- // Get encoded image data.
- EncodedImage encoded_image;
- encoded_image._completeFrame = true;
- aom_codec_iter_t iter = nullptr;
- int data_pkt_count = 0;
- while (const aom_codec_cx_pkt_t* pkt = aom_codec_get_cx_data(&ctx_, &iter)) {
- if (pkt->kind == AOM_CODEC_CX_FRAME_PKT && pkt->data.frame.sz > 0) {
- if (data_pkt_count > 0) {
- RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encoder returned more than "
- "one data packet for an input video frame.";
- Release();
- }
- // TODO(bugs.webrtc.org/11174): Remove this hack when
- // webrtc_pc_e2e::SingleProcessEncodedImageDataInjector not used or fixed
- // not to assume that encoded image transfered as is.
- const uint8_t* data = static_cast<const uint8_t*>(pkt->data.frame.buf);
- size_t size = pkt->data.frame.sz;
- if (size > 2 && data[0] == 0b0'0010'010 && data[1] == 0) {
- // Typically frame starts with a Temporal Delimter OBU of size 0 that is
- // not need by any component in webrtc and discarded during rtp
- // packetization. Before discarded it confuses test framework that
- // assumes received encoded frame is exactly same as sent frame.
- data += 2;
- size -= 2;
- }
- encoded_image.SetEncodedData(EncodedImageBuffer::Create(data, size));
-
- bool is_key_frame = ((pkt->data.frame.flags & AOM_EFLAG_FORCE_KF) != 0);
- encoded_image._frameType = is_key_frame
- ? VideoFrameType::kVideoFrameKey
- : VideoFrameType::kVideoFrameDelta;
- encoded_image.SetTimestamp(frame.timestamp());
- encoded_image.capture_time_ms_ = frame.render_time_ms();
- encoded_image.rotation_ = frame.rotation();
- encoded_image.content_type_ = VideoContentType::UNSPECIFIED;
- // If encoded image width/height info are added to aom_codec_cx_pkt_t,
- // use those values in lieu of the values in frame.
- encoded_image._encodedHeight = frame.height();
- encoded_image._encodedWidth = frame.width();
- encoded_image.timing_.flags = VideoSendTiming::kInvalid;
- int qp = -1;
- ret = aom_codec_control(&ctx_, AOME_GET_LAST_QUANTIZER, &qp);
- if (ret != AOM_CODEC_OK) {
- RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
- << " on control AOME_GET_LAST_QUANTIZER.";
- return WEBRTC_VIDEO_CODEC_ERROR;
+ if (SvcEnabled()) {
+ SetSvcLayerId(layer_frame);
+ SetSvcRefFrameConfig(layer_frame);
+ }
+
+ // Encode a frame.
+ aom_codec_err_t ret = aom_codec_encode(&ctx_, frame_for_encode_,
+ frame.timestamp(), duration, flags);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
+ << " on aom_codec_encode.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // Get encoded image data.
+ EncodedImage encoded_image;
+ encoded_image._completeFrame = true;
+ aom_codec_iter_t iter = nullptr;
+ int data_pkt_count = 0;
+ while (const aom_codec_cx_pkt_t* pkt =
+ aom_codec_get_cx_data(&ctx_, &iter)) {
+ if (pkt->kind == AOM_CODEC_CX_FRAME_PKT && pkt->data.frame.sz > 0) {
+ if (data_pkt_count > 0) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encoder returned more than "
+ "one data packet for an input video frame.";
+ Release();
+ }
+ encoded_image.SetEncodedData(EncodedImageBuffer::Create(
+ /*data=*/static_cast<const uint8_t*>(pkt->data.frame.buf),
+ /*size=*/pkt->data.frame.sz));
+
+ if ((pkt->data.frame.flags & AOM_EFLAG_FORCE_KF) != 0) {
+ layer_frame.Keyframe();
+ }
+ encoded_image._frameType = layer_frame.IsKeyframe()
+ ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta;
+ encoded_image.SetTimestamp(frame.timestamp());
+ encoded_image.capture_time_ms_ = frame.render_time_ms();
+ encoded_image.rotation_ = frame.rotation();
+ encoded_image.content_type_ = VideoContentType::UNSPECIFIED;
+ // If encoded image width/height info are added to aom_codec_cx_pkt_t,
+ // use those values in lieu of the values in frame.
+ encoded_image._encodedHeight = frame.height();
+ encoded_image._encodedWidth = frame.width();
+ encoded_image.timing_.flags = VideoSendTiming::kInvalid;
+ int qp = -1;
+ ret = aom_codec_control(&ctx_, AOME_GET_LAST_QUANTIZER, &qp);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
+ << " on control AOME_GET_LAST_QUANTIZER.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ encoded_image.qp_ = qp;
+ encoded_image.SetColorSpace(frame.color_space());
+ ++data_pkt_count;
}
- encoded_image.qp_ = qp;
- encoded_image.SetColorSpace(frame.color_space());
- ++data_pkt_count;
}
- }
- // Deliver encoded image data.
- if (encoded_image.size() > 0) {
- CodecSpecificInfo codec_specific_info;
- encoded_image_callback_->OnEncodedImage(encoded_image, &codec_specific_info,
- nullptr);
+ // Deliver encoded image data.
+ if (encoded_image.size() > 0) {
+ CodecSpecificInfo codec_specific_info;
+ codec_specific_info.codecType = kVideoCodecAV1;
+ bool is_keyframe = layer_frame.IsKeyframe();
+ codec_specific_info.generic_frame_info =
+ svc_controller_->OnEncodeDone(std::move(layer_frame));
+ if (is_keyframe && codec_specific_info.generic_frame_info) {
+ codec_specific_info.template_structure =
+ svc_controller_->DependencyStructure();
+ auto& resolutions = codec_specific_info.template_structure->resolutions;
+ if (SvcEnabled()) {
+ resolutions.resize(svc_params_->number_spatial_layers);
+ for (int sid = 0; sid < svc_params_->number_spatial_layers; ++sid) {
+ int n = svc_params_->scaling_factor_num[sid];
+ int d = svc_params_->scaling_factor_den[sid];
+ resolutions[sid] =
+ RenderResolution(cfg_.g_w * n / d, cfg_.g_h * n / d);
+ }
+ } else {
+ resolutions = {RenderResolution(cfg_.g_w, cfg_.g_h)};
+ }
+ }
+ encoded_image_callback_->OnEncodedImage(encoded_image,
+ &codec_specific_info, nullptr);
+ }
}
return WEBRTC_VIDEO_CODEC_OK;
@@ -359,9 +556,28 @@ void LibaomAv1Encoder::SetRates(const RateControlParameters& parameters) {
RTC_DCHECK_LE(rc_target_bitrate_kbps, encoder_settings_.maxBitrate);
RTC_DCHECK_GE(rc_target_bitrate_kbps, encoder_settings_.minBitrate);
+ svc_controller_->OnRatesUpdated(parameters.bitrate);
// Set target bit rate.
cfg_.rc_target_bitrate = rc_target_bitrate_kbps;
+ if (SvcEnabled()) {
+ for (int sid = 0; sid < svc_params_->number_spatial_layers; ++sid) {
+ // libaom bitrate for spatial id S and temporal id T means bitrate
+ // of frames with spatial_id=S and temporal_id<=T
+ // while `parameters.bitrate` provdies bitrate of frames with
+ // spatial_id=S and temporal_id=T
+ int accumulated_bitrate_bps = 0;
+ for (int tid = 0; tid < svc_params_->number_temporal_layers; ++tid) {
+ int layer_index = sid * svc_params_->number_temporal_layers + tid;
+ accumulated_bitrate_bps += parameters.bitrate.GetBitrate(sid, tid);
+ // `svc_params.layer_target_bitrate` expects bitrate in kbps.
+ svc_params_->layer_target_bitrate[layer_index] =
+ accumulated_bitrate_bps / 1000;
+ }
+ }
+ aom_codec_control(&ctx_, AV1E_SET_SVC_PARAMS, &*svc_params_);
+ }
+
// Set frame rate to closest integer value.
encoder_settings_.maxFramerate =
static_cast<uint32_t>(parameters.framerate_fps + 0.5);
@@ -389,7 +605,13 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const {
const bool kIsLibaomAv1EncoderSupported = true;
std::unique_ptr<VideoEncoder> CreateLibaomAv1Encoder() {
- return std::make_unique<LibaomAv1Encoder>();
+ return std::make_unique<LibaomAv1Encoder>(
+ std::make_unique<ScalableVideoControllerNoLayering>());
+}
+
+std::unique_ptr<VideoEncoder> CreateLibaomAv1Encoder(
+ std::unique_ptr<ScalableVideoController> svc_controller) {
+ return std::make_unique<LibaomAv1Encoder>(std::move(svc_controller));
}
} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.h b/modules/video_coding/codecs/av1/libaom_av1_encoder.h
index 4b0ee28d40..c2f04e669c 100644
--- a/modules/video_coding/codecs/av1/libaom_av1_encoder.h
+++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.h
@@ -14,12 +14,15 @@
#include "absl/base/attributes.h"
#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
namespace webrtc {
ABSL_CONST_INIT extern const bool kIsLibaomAv1EncoderSupported;
std::unique_ptr<VideoEncoder> CreateLibaomAv1Encoder();
+std::unique_ptr<VideoEncoder> CreateLibaomAv1Encoder(
+ std::unique_ptr<ScalableVideoController> controller);
} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
index 6d1d0bbb24..341a82774d 100644
--- a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
+++ b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
@@ -11,15 +11,38 @@
#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
#include <memory>
+#include <vector>
+#include "absl/types/optional.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t2.h"
+#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h"
#include "modules/video_coding/include/video_error_codes.h"
+#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace {
+using ::testing::SizeIs;
+
+VideoCodec DefaultCodecSettings() {
+ VideoCodec codec_settings;
+ codec_settings.width = 320;
+ codec_settings.height = 180;
+ codec_settings.maxFramerate = 30;
+ codec_settings.maxBitrate = 1000;
+ codec_settings.qpMax = 63;
+ return codec_settings;
+}
+
+VideoEncoder::Settings DefaultEncoderSettings() {
+ return VideoEncoder::Settings(
+ VideoEncoder::Capabilities(/*loss_notification=*/false),
+ /*number_of_cores=*/1, /*max_payload_size=*/1200);
+}
+
TEST(LibaomAv1EncoderTest, CanCreate) {
std::unique_ptr<VideoEncoder> encoder = CreateLibaomAv1Encoder();
EXPECT_TRUE(encoder);
@@ -28,17 +51,37 @@ TEST(LibaomAv1EncoderTest, CanCreate) {
TEST(LibaomAv1EncoderTest, InitAndRelease) {
std::unique_ptr<VideoEncoder> encoder = CreateLibaomAv1Encoder();
ASSERT_TRUE(encoder);
- VideoCodec codec_settings;
- codec_settings.width = 1280;
- codec_settings.height = 720;
- codec_settings.maxFramerate = 30;
- VideoEncoder::Capabilities capabilities(/*loss_notification=*/false);
- VideoEncoder::Settings encoder_settings(capabilities, /*number_of_cores=*/1,
- /*max_payload_size=*/1200);
- EXPECT_EQ(encoder->InitEncode(&codec_settings, encoder_settings),
+ VideoCodec codec_settings = DefaultCodecSettings();
+ EXPECT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
WEBRTC_VIDEO_CODEC_OK);
EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_OK);
}
+TEST(LibaomAv1EncoderTest, NoBitrateOnTopLayerRefecltedInActiveDecodeTargets) {
+ // Configure encoder with 2 temporal layers.
+ std::unique_ptr<VideoEncoder> encoder =
+ CreateLibaomAv1Encoder(std::make_unique<ScalabilityStructureL1T2>());
+ VideoCodec codec_settings = DefaultCodecSettings();
+ ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ VideoEncoder::RateControlParameters rate_parameters;
+ rate_parameters.framerate_fps = 30;
+ rate_parameters.bitrate.SetBitrate(0, /*temporal_index=*/0, 300'000);
+ rate_parameters.bitrate.SetBitrate(0, /*temporal_index=*/1, 0);
+ encoder->SetRates(rate_parameters);
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> encoded_frames =
+ EncodedVideoFrameProducer(*encoder).SetNumInputFrames(1).Encode();
+ ASSERT_THAT(encoded_frames, SizeIs(1));
+ ASSERT_NE(encoded_frames[0].codec_specific_info.generic_frame_info,
+ absl::nullopt);
+ // Assuming L1T2 structure uses 1st decode target for T0 and 2nd decode target
+ // for T0+T1 frames, expect only 1st decode target is active.
+ EXPECT_EQ(encoded_frames[0]
+ .codec_specific_info.generic_frame_info->active_decode_targets,
+ 0b01);
+}
+
} // namespace
} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/libaom_av1_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_unittest.cc
index 4a549ea453..7a577bc29c 100644
--- a/modules/video_coding/codecs/av1/libaom_av1_unittest.cc
+++ b/modules/video_coding/codecs/av1/libaom_av1_unittest.cc
@@ -11,16 +11,23 @@
#include <stddef.h>
#include <stdint.h>
+#include <map>
#include <memory>
+#include <ostream>
+#include <tuple>
#include <vector>
#include "absl/types/optional.h"
-#include "api/test/create_frame_generator.h"
-#include "api/test/frame_generator_interface.h"
+#include "api/units/data_size.h"
+#include "api/units/time_delta.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/av1/create_scalability_structure.h"
#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h"
#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h"
+#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "test/gmock.h"
@@ -29,79 +36,44 @@
namespace webrtc {
namespace {
+using ::testing::ContainerEq;
+using ::testing::Each;
using ::testing::ElementsAreArray;
+using ::testing::Ge;
using ::testing::IsEmpty;
using ::testing::Not;
using ::testing::NotNull;
+using ::testing::Pointwise;
+using ::testing::SizeIs;
+using ::testing::Truly;
+using ::testing::Values;
// Use small resolution for this test to make it faster.
constexpr int kWidth = 320;
constexpr int kHeight = 180;
constexpr int kFramerate = 30;
-constexpr int kRtpTicksPerSecond = 90000;
-class TestAv1Encoder {
- public:
- struct Encoded {
- EncodedImage encoded_image;
- CodecSpecificInfo codec_specific_info;
- };
-
- TestAv1Encoder() : encoder_(CreateLibaomAv1Encoder()) {
- RTC_CHECK(encoder_);
- VideoCodec codec_settings;
- codec_settings.width = kWidth;
- codec_settings.height = kHeight;
- codec_settings.maxFramerate = kFramerate;
- VideoEncoder::Settings encoder_settings(
- VideoEncoder::Capabilities(/*loss_notification=*/false),
- /*number_of_cores=*/1, /*max_payload_size=*/1200);
- EXPECT_EQ(encoder_->InitEncode(&codec_settings, encoder_settings),
- WEBRTC_VIDEO_CODEC_OK);
- EXPECT_EQ(encoder_->RegisterEncodeCompleteCallback(&callback_),
- WEBRTC_VIDEO_CODEC_OK);
- }
- // This class requires pointer stability and thus not copyable nor movable.
- TestAv1Encoder(const TestAv1Encoder&) = delete;
- TestAv1Encoder& operator=(const TestAv1Encoder&) = delete;
-
- void EncodeAndAppend(const VideoFrame& frame, std::vector<Encoded>* encoded) {
- callback_.SetEncodeStorage(encoded);
- std::vector<VideoFrameType> frame_types = {
- VideoFrameType::kVideoFrameDelta};
- EXPECT_EQ(encoder_->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_OK);
- // Prefer to crash checking nullptr rather than writing to random memory.
- callback_.SetEncodeStorage(nullptr);
- }
-
- private:
- class EncoderCallback : public EncodedImageCallback {
- public:
- void SetEncodeStorage(std::vector<Encoded>* storage) { storage_ = storage; }
-
- private:
- Result OnEncodedImage(
- const EncodedImage& encoded_image,
- const CodecSpecificInfo* codec_specific_info,
- const RTPFragmentationHeader* /*fragmentation*/) override {
- RTC_CHECK(storage_);
- storage_->push_back({encoded_image, *codec_specific_info});
- return Result(Result::Error::OK);
- }
-
- std::vector<Encoded>* storage_ = nullptr;
- };
-
- EncoderCallback callback_;
- std::unique_ptr<VideoEncoder> encoder_;
-};
+VideoCodec DefaultCodecSettings() {
+ VideoCodec codec_settings;
+ codec_settings.width = kWidth;
+ codec_settings.height = kHeight;
+ codec_settings.maxFramerate = kFramerate;
+ codec_settings.maxBitrate = 1000;
+ codec_settings.qpMax = 63;
+ return codec_settings;
+}
+VideoEncoder::Settings DefaultEncoderSettings() {
+ return VideoEncoder::Settings(
+ VideoEncoder::Capabilities(/*loss_notification=*/false),
+ /*number_of_cores=*/1, /*max_payload_size=*/1200);
+}
class TestAv1Decoder {
public:
- TestAv1Decoder() {
- decoder_ = CreateLibaomAv1Decoder();
+ explicit TestAv1Decoder(int decoder_id)
+ : decoder_id_(decoder_id), decoder_(CreateLibaomAv1Decoder()) {
if (decoder_ == nullptr) {
- ADD_FAILURE() << "Failed to create a decoder";
+ ADD_FAILURE() << "Failed to create a decoder#" << decoder_id_;
return;
}
EXPECT_EQ(decoder_->InitDecode(/*codec_settings=*/nullptr,
@@ -116,20 +88,17 @@ class TestAv1Decoder {
void Decode(int64_t frame_id, const EncodedImage& image) {
ASSERT_THAT(decoder_, NotNull());
- requested_ids_.push_back(frame_id);
int32_t error = decoder_->Decode(image, /*missing_frames=*/false,
/*render_time_ms=*/image.capture_time_ms_);
if (error != WEBRTC_VIDEO_CODEC_OK) {
ADD_FAILURE() << "Failed to decode frame id " << frame_id
- << " with error code " << error;
+ << " with error code " << error << " by decoder#"
+ << decoder_id_;
return;
}
decoded_ids_.push_back(frame_id);
}
- const std::vector<int64_t>& requested_frame_ids() const {
- return requested_ids_;
- }
const std::vector<int64_t>& decoded_frame_ids() const { return decoded_ids_; }
size_t num_output_frames() const { return callback_.num_called(); }
@@ -156,51 +125,208 @@ class TestAv1Decoder {
int num_called_ = 0;
};
- std::vector<int64_t> requested_ids_;
+ const int decoder_id_;
std::vector<int64_t> decoded_ids_;
DecoderCallback callback_;
- std::unique_ptr<VideoDecoder> decoder_;
+ const std::unique_ptr<VideoDecoder> decoder_;
};
-std::vector<VideoFrame> GenerateFrames(size_t num_frames) {
- std::vector<VideoFrame> frames;
- frames.reserve(num_frames);
-
- auto input_frame_generator = test::CreateSquareFrameGenerator(
- kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420,
- absl::nullopt);
- uint32_t timestamp = 1000;
- for (size_t i = 0; i < num_frames; ++i) {
- frames.push_back(
- VideoFrame::Builder()
- .set_video_frame_buffer(input_frame_generator->NextFrame().buffer)
- .set_timestamp_rtp(timestamp += kRtpTicksPerSecond / kFramerate)
- .build());
- }
- return frames;
-}
-
TEST(LibaomAv1Test, EncodeDecode) {
- TestAv1Decoder decoder;
- TestAv1Encoder encoder;
+ TestAv1Decoder decoder(0);
+ std::unique_ptr<VideoEncoder> encoder = CreateLibaomAv1Encoder();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
+ WEBRTC_VIDEO_CODEC_OK);
- std::vector<TestAv1Encoder::Encoded> encoded_frames;
- for (const VideoFrame& frame : GenerateFrames(/*num_frames=*/4)) {
- encoder.EncodeAndAppend(frame, &encoded_frames);
- }
- for (size_t frame_idx = 0; frame_idx < encoded_frames.size(); ++frame_idx) {
- decoder.Decode(static_cast<int64_t>(frame_idx),
- encoded_frames[frame_idx].encoded_image);
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> encoded_frames =
+ EncodedVideoFrameProducer(*encoder).SetNumInputFrames(4).Encode();
+ for (size_t frame_id = 0; frame_id < encoded_frames.size(); ++frame_id) {
+ decoder.Decode(static_cast<int64_t>(frame_id),
+ encoded_frames[frame_id].encoded_image);
}
// Check encoder produced some frames for decoder to decode.
ASSERT_THAT(encoded_frames, Not(IsEmpty()));
// Check decoder found all of them valid.
- EXPECT_THAT(decoder.decoded_frame_ids(),
- ElementsAreArray(decoder.requested_frame_ids()));
+ EXPECT_THAT(decoder.decoded_frame_ids(), SizeIs(encoded_frames.size()));
// Check each of them produced an output frame.
EXPECT_EQ(decoder.num_output_frames(), decoder.decoded_frame_ids().size());
}
+struct LayerId {
+ friend bool operator==(const LayerId& lhs, const LayerId& rhs) {
+ return std::tie(lhs.spatial_id, lhs.temporal_id) ==
+ std::tie(rhs.spatial_id, rhs.temporal_id);
+ }
+ friend bool operator<(const LayerId& lhs, const LayerId& rhs) {
+ return std::tie(lhs.spatial_id, lhs.temporal_id) <
+ std::tie(rhs.spatial_id, rhs.temporal_id);
+ }
+ friend std::ostream& operator<<(std::ostream& s, const LayerId& layer) {
+ return s << "S" << layer.spatial_id << "T" << layer.temporal_id;
+ }
+
+ int spatial_id = 0;
+ int temporal_id = 0;
+};
+
+struct SvcTestParam {
+ std::string name;
+ int num_frames_to_generate;
+ std::map<LayerId, DataRate> configured_bitrates;
+};
+
+class LibaomAv1SvcTest : public ::testing::TestWithParam<SvcTestParam> {};
+
+TEST_P(LibaomAv1SvcTest, EncodeAndDecodeAllDecodeTargets) {
+ std::unique_ptr<ScalableVideoController> svc_controller =
+ CreateScalabilityStructure(GetParam().name);
+ size_t num_decode_targets =
+ svc_controller->DependencyStructure().num_decode_targets;
+
+ std::unique_ptr<VideoEncoder> encoder =
+ CreateLibaomAv1Encoder(std::move(svc_controller));
+ VideoCodec codec_settings = DefaultCodecSettings();
+ ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
+ WEBRTC_VIDEO_CODEC_OK);
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> encoded_frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(GetParam().num_frames_to_generate)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+
+ ASSERT_THAT(
+ encoded_frames,
+ Each(Truly([&](const EncodedVideoFrameProducer::EncodedFrame& frame) {
+ return frame.codec_specific_info.generic_frame_info &&
+ frame.codec_specific_info.generic_frame_info
+ ->decode_target_indications.size() == num_decode_targets;
+ })));
+
+ for (size_t dt = 0; dt < num_decode_targets; ++dt) {
+ TestAv1Decoder decoder(dt);
+ std::vector<int64_t> requested_ids;
+ for (int64_t frame_id = 0;
+ frame_id < static_cast<int64_t>(encoded_frames.size()); ++frame_id) {
+ const EncodedVideoFrameProducer::EncodedFrame& frame =
+ encoded_frames[frame_id];
+ if (frame.codec_specific_info.generic_frame_info
+ ->decode_target_indications[dt] !=
+ DecodeTargetIndication::kNotPresent) {
+ requested_ids.push_back(frame_id);
+ decoder.Decode(frame_id, frame.encoded_image);
+ }
+ }
+
+ ASSERT_THAT(requested_ids, SizeIs(Ge(2u)));
+ // Check decoder found all of them valid.
+ EXPECT_THAT(decoder.decoded_frame_ids(), ContainerEq(requested_ids))
+ << "Decoder#" << dt;
+ // Check each of them produced an output frame.
+ EXPECT_EQ(decoder.num_output_frames(), decoder.decoded_frame_ids().size())
+ << "Decoder#" << dt;
+ }
+}
+
+MATCHER(SameLayerIdAndBitrateIsNear, "") {
+ // First check if layer id is the same.
+ return std::get<0>(arg).first == std::get<1>(arg).first &&
+ // check measured bitrate is not much lower than requested.
+ std::get<0>(arg).second >= std::get<1>(arg).second * 0.8 &&
+ // check measured bitrate is not much larger than requested.
+ std::get<0>(arg).second <= std::get<1>(arg).second * 1.1;
+}
+
+TEST_P(LibaomAv1SvcTest, SetRatesMatchMeasuredBitrate) {
+ const SvcTestParam param = GetParam();
+ if (param.configured_bitrates.empty()) {
+ // Rates are not configured for this particular structure, skip the test.
+ return;
+ }
+ constexpr TimeDelta kDuration = TimeDelta::Seconds(5);
+
+ VideoBitrateAllocation allocation;
+ for (const auto& kv : param.configured_bitrates) {
+ allocation.SetBitrate(kv.first.spatial_id, kv.first.temporal_id,
+ kv.second.bps());
+ }
+
+ std::unique_ptr<VideoEncoder> encoder =
+ CreateLibaomAv1Encoder(CreateScalabilityStructure(param.name));
+ ASSERT_TRUE(encoder);
+ VideoCodec codec_settings = DefaultCodecSettings();
+ codec_settings.maxBitrate = allocation.get_sum_kbps();
+ codec_settings.maxFramerate = 30;
+ ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ encoder->SetRates(VideoEncoder::RateControlParameters(
+ allocation, codec_settings.maxFramerate));
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> encoded_frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(codec_settings.maxFramerate * kDuration.seconds())
+ .SetResolution({codec_settings.width, codec_settings.height})
+ .SetFramerateFps(codec_settings.maxFramerate)
+ .Encode();
+
+ // Calculate size of each layer.
+ std::map<LayerId, DataSize> layer_size;
+ for (const auto& frame : encoded_frames) {
+ ASSERT_TRUE(frame.codec_specific_info.generic_frame_info);
+ const auto& layer = *frame.codec_specific_info.generic_frame_info;
+ LayerId layer_id = {layer.spatial_id, layer.temporal_id};
+ // This is almost same as
+ // layer_size[layer_id] += DataSize::Bytes(frame.encoded_image.size());
+ // but avoids calling deleted default constructor for DataSize.
+ layer_size.emplace(layer_id, DataSize::Zero()).first->second +=
+ DataSize::Bytes(frame.encoded_image.size());
+ }
+ // Convert size of the layer into bitrate of that layer.
+ std::vector<std::pair<LayerId, DataRate>> measured_bitrates;
+ for (const auto& kv : layer_size) {
+ measured_bitrates.emplace_back(kv.first, kv.second / kDuration);
+ }
+ EXPECT_THAT(measured_bitrates, Pointwise(SameLayerIdAndBitrateIsNear(),
+ param.configured_bitrates));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ Svc,
+ LibaomAv1SvcTest,
+ Values(SvcTestParam{"NONE", /*num_frames_to_generate=*/4},
+ SvcTestParam{"L1T2",
+ /*num_frames_to_generate=*/4,
+ /*configured_bitrates=*/
+ {{{0, 0}, DataRate::KilobitsPerSec(60)},
+ {{0, 1}, DataRate::KilobitsPerSec(40)}}},
+ SvcTestParam{"L1T3", /*num_frames_to_generate=*/8},
+ SvcTestParam{"L2T1",
+ /*num_frames_to_generate=*/3,
+ /*configured_bitrates=*/
+ {{{0, 0}, DataRate::KilobitsPerSec(30)},
+ {{1, 0}, DataRate::KilobitsPerSec(70)}}},
+ SvcTestParam{"L2T1h",
+ /*num_frames_to_generate=*/3,
+ /*configured_bitrates=*/
+ {{{0, 0}, DataRate::KilobitsPerSec(30)},
+ {{1, 0}, DataRate::KilobitsPerSec(70)}}},
+ SvcTestParam{"L2T1_KEY", /*num_frames_to_generate=*/3},
+ SvcTestParam{"L3T1", /*num_frames_to_generate=*/3},
+ SvcTestParam{"L3T3", /*num_frames_to_generate=*/8},
+ SvcTestParam{"S2T1", /*num_frames_to_generate=*/3},
+ SvcTestParam{"L2T2", /*num_frames_to_generate=*/4},
+ SvcTestParam{"L2T2_KEY", /*num_frames_to_generate=*/4},
+ SvcTestParam{"L2T2_KEY_SHIFT",
+ /*num_frames_to_generate=*/4,
+ /*configured_bitrates=*/
+ {{{0, 0}, DataRate::KilobitsPerSec(70)},
+ {{0, 1}, DataRate::KilobitsPerSec(30)},
+ {{1, 0}, DataRate::KilobitsPerSec(110)},
+ {{1, 1}, DataRate::KilobitsPerSec(80)}}}),
+ [](const testing::TestParamInfo<SvcTestParam>& info) {
+ return info.param.name;
+ });
+
} // namespace
} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc b/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc
new file mode 100644
index 0000000000..ae4c879224
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t2.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[3][2] = {
+ {kSwitch, kSwitch}, // KeyFrame
+ {kNotPresent, kDiscardable}, // DeltaFrame T1
+ {kSwitch, kSwitch}, // DeltaFrame T0
+};
+
+} // namespace
+
+ScalabilityStructureL1T2::~ScalabilityStructureL1T2() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL1T2::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 1;
+ result.num_temporal_layers = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL1T2::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 1;
+ structure.decode_target_protected_by_chain = {0, 0};
+ structure.templates.resize(3);
+ structure.templates[0].T(0).Dtis("SS").ChainDiffs({0});
+ structure.templates[1].T(0).Dtis("SS").ChainDiffs({2}).FrameDiffs({2});
+ structure.templates[2].T(1).Dtis("-D").ChainDiffs({1}).FrameDiffs({1});
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL1T2::NextFrameConfig(bool restart) {
+ if (!active_decode_targets_[0]) {
+ RTC_LOG(LS_WARNING) << "No bitrate allocated for temporal layer 0, yet "
+ "frame is requested. No frame will be encoded.";
+ return {};
+ }
+ if (restart) {
+ next_pattern_ = kKeyFrame;
+ } else if (!active_decode_targets_[1]) {
+ next_pattern_ = kDeltaFrameT0;
+ }
+ std::vector<LayerFrameConfig> result(1);
+
+ switch (next_pattern_) {
+ case kKeyFrame:
+ result[0].Id(0).T(0).Keyframe().Update(0);
+ next_pattern_ = kDeltaFrameT1;
+ break;
+ case kDeltaFrameT1:
+ result[0].Id(1).T(1).Reference(0);
+ next_pattern_ = kDeltaFrameT0;
+ break;
+ case kDeltaFrameT0:
+ result[0].Id(2).T(0).ReferenceAndUpdate(0);
+ next_pattern_ = kDeltaFrameT1;
+ break;
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL1T2::OnEncodeDone(
+ LayerFrameConfig config) {
+ // Encoder may have generated a keyframe even when not asked for it. Treat
+ // such frame same as requested keyframe, in particular restart the sequence.
+ if (config.IsKeyframe()) {
+ config = NextFrameConfig(/*restart=*/true).front();
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ frame_info->part_of_chain = {config.TemporalId() == 0};
+ frame_info->active_decode_targets = active_decode_targets_;
+ return frame_info;
+}
+
+void ScalabilityStructureL1T2::OnRatesUpdated(
+ const VideoBitrateAllocation& bitrates) {
+ if (bitrates.GetBitrate(0, 0) == 0) {
+ // It is unclear what frame can be produced when base layer is disabled,
+ // so mark all decode targets as inactive to produce no frames.
+ active_decode_targets_.reset();
+ return;
+ }
+ active_decode_targets_.set(0, true);
+ active_decode_targets_.set(1, bitrates.GetBitrate(0, 1) > 0);
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l1t2.h b/modules/video_coding/codecs/av1/scalability_structure_l1t2.h
new file mode 100644
index 0000000000..55a9e8bbb0
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l1t2.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T2_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T2_H_
+
+#include <bitset>
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+class ScalabilityStructureL1T2 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL1T2() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override;
+
+ private:
+ enum FramePattern {
+ kKeyFrame,
+ kDeltaFrameT1,
+ kDeltaFrameT0,
+ };
+
+ FramePattern next_pattern_ = kKeyFrame;
+ std::bitset<32> active_decode_targets_ = 0b11;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T2_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc b/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc
new file mode 100644
index 0000000000..a04a4262ed
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t3.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[3][3] = {
+ {kSwitch, kSwitch, kSwitch}, // T0
+ {kNotPresent, kDiscardable, kSwitch}, // T1
+ {kNotPresent, kNotPresent, kDiscardable}, // T2
+};
+
+} // namespace
+
+ScalabilityStructureL1T3::~ScalabilityStructureL1T3() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL1T3::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 1;
+ result.num_temporal_layers = 3;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL1T3::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 3;
+ structure.num_chains = 1;
+ structure.decode_target_protected_by_chain = {0, 0, 0};
+ structure.templates.resize(5);
+ structure.templates[0].T(0).Dtis("SSS").ChainDiffs({0});
+ structure.templates[1].T(0).Dtis("SSS").ChainDiffs({4}).FrameDiffs({4});
+ structure.templates[2].T(1).Dtis("-DS").ChainDiffs({2}).FrameDiffs({2});
+ structure.templates[3].T(2).Dtis("--D").ChainDiffs({1}).FrameDiffs({1});
+ structure.templates[4].T(2).Dtis("--D").ChainDiffs({3}).FrameDiffs({1});
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL1T3::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKeyFrame;
+ }
+ std::vector<LayerFrameConfig> config(1);
+
+ switch (next_pattern_) {
+ case kKeyFrame:
+ config[0].T(0).Keyframe().Update(0);
+ next_pattern_ = kDeltaFrameT2A;
+ break;
+ case kDeltaFrameT2A:
+ config[0].T(2).Reference(0);
+ next_pattern_ = kDeltaFrameT1;
+ break;
+ case kDeltaFrameT1:
+ config[0].T(1).Reference(0).Update(1);
+ next_pattern_ = kDeltaFrameT2B;
+ break;
+ case kDeltaFrameT2B:
+ config[0].T(2).Reference(1);
+ next_pattern_ = kDeltaFrameT0;
+ break;
+ case kDeltaFrameT0:
+ config[0].T(0).ReferenceAndUpdate(0);
+ next_pattern_ = kDeltaFrameT2A;
+ break;
+ }
+ return config;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL1T3::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.TemporalId() < 0 ||
+ config.TemporalId() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected temporal id " << config.TemporalId();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(
+ std::begin(kDtis[config.TemporalId()]),
+ std::end(kDtis[config.TemporalId()]));
+ frame_info->part_of_chain = {config.TemporalId() == 0};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l1t3.h b/modules/video_coding/codecs/av1/scalability_structure_l1t3.h
new file mode 100644
index 0000000000..562d0f2a50
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l1t3.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T3_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T3_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// T2 0 0 0 0
+// | / | /
+// T1 / 0 / 0 ...
+// |_/ |_/
+// T0 0-------0------
+// Time-> 0 1 2 3 4 5 6 7
+class ScalabilityStructureL1T3 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL1T3() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKeyFrame,
+ kDeltaFrameT2A,
+ kDeltaFrameT1,
+ kDeltaFrameT2B,
+ kDeltaFrameT0,
+ };
+
+ FramePattern next_pattern_ = kKeyFrame;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T3_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc b/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc
new file mode 100644
index 0000000000..2070a4c9bb
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+constexpr auto kRequired = DecodeTargetIndication::kRequired;
+
+constexpr DecodeTargetIndication kDtis[4][2] = {
+ {kSwitch, kSwitch}, // Key, S0
+ {kNotPresent, kSwitch}, // Key, S1
+ {kSwitch, kRequired}, // Delta, S0
+ {kNotPresent, kRequired}, // Delta, S1
+};
+
+} // namespace
+
+ScalabilityStructureL2T1::~ScalabilityStructureL2T1() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T1::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 1;
+ result.scaling_factor_num[0] = 1;
+ result.scaling_factor_den[0] = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T1::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 1};
+ structure.templates.resize(4);
+ structure.templates[0].S(0).Dtis("SR").ChainDiffs({2, 1}).FrameDiffs({2});
+ structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0});
+ structure.templates[2].S(1).Dtis("-R").ChainDiffs({1, 1}).FrameDiffs({2, 1});
+ structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T1::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).S(0).Keyframe().Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T1::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> result(2);
+ // Buffer0 keeps latest S0 frame, Buffer1 keeps latest S1 frame.
+ if (restart || keyframe_) {
+ result[0] = KeyFrameConfig();
+ result[1].Id(1).S(1).Reference(0).Update(1);
+ keyframe_ = false;
+ } else {
+ result[0].Id(2).S(0).ReferenceAndUpdate(0);
+ result[1].Id(3).S(1).Reference(0).ReferenceAndUpdate(1);
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T1::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = std::move(config.Buffers());
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ frame_info->part_of_chain = {config.SpatialId() == 0, true};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t1.h b/modules/video_coding/codecs/av1/scalability_structure_l2t1.h
new file mode 100644
index 0000000000..0f53602604
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t1.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1 0--0--0-
+// | | | ...
+// S0 0--0--0-
+class ScalabilityStructureL2T1 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T1() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ LayerFrameConfig KeyFrameConfig() const;
+
+ bool keyframe_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc b/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc
new file mode 100644
index 0000000000..ab76f1f7ef
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[3][2] = {
+ {kSwitch, kSwitch}, // Key, S0
+ {kSwitch, kNotPresent}, // Delta, S0
+ {kNotPresent, kSwitch}, // Key and Delta, S1
+};
+
+} // namespace
+
+ScalabilityStructureL2T1Key::~ScalabilityStructureL2T1Key() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T1Key::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 1;
+ result.scaling_factor_num[0] = 1;
+ result.scaling_factor_den[0] = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T1Key::DependencyStructure()
+ const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 1};
+ structure.templates.resize(4);
+ structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2});
+ structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0});
+ structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2});
+ structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T1Key::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).S(0).Keyframe().Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T1Key::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> result(2);
+
+ // Buffer0 keeps latest S0T0 frame, Buffer1 keeps latest S1T0 frame.
+ if (restart || keyframe_) {
+ result[0] = KeyFrameConfig();
+ result[1].Id(2).S(1).Reference(0).Update(1);
+ keyframe_ = false;
+ } else {
+ result[0].Id(1).S(0).ReferenceAndUpdate(0);
+ result[1].Id(2).S(1).ReferenceAndUpdate(1);
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T1Key::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = std::move(config.Buffers());
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.IsKeyframe()) {
+ frame_info->part_of_chain = {true, true};
+ } else {
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() == 1};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h b/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h
new file mode 100644
index 0000000000..c1d8c8947f
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_KEY_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_KEY_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1 0--0--0-
+// | ...
+// S0 0--0--0-
+class ScalabilityStructureL2T1Key : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T1Key() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ LayerFrameConfig KeyFrameConfig() const;
+
+ bool keyframe_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_KEY_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t1h.cc b/modules/video_coding/codecs/av1/scalability_structure_l2t1h.cc
new file mode 100644
index 0000000000..7b05c92cf6
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t1h.cc
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1h.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+ScalabilityStructureL2T1h::~ScalabilityStructureL2T1h() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T1h::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 1;
+ // 1.5:1 scaling, see https://w3c.github.io/webrtc-svc/#scalabilitymodes*
+ result.scaling_factor_num[0] = 2;
+ result.scaling_factor_den[0] = 3;
+ return result;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t1h.h b/modules/video_coding/codecs/av1/scalability_structure_l2t1h.h
new file mode 100644
index 0000000000..ec419d9c31
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t1h.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1H_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1H_H_
+
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+class ScalabilityStructureL2T1h : public ScalabilityStructureL2T1 {
+ public:
+ ~ScalabilityStructureL2T1h() override;
+
+ StreamLayersConfig StreamConfig() const override;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1H_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc b/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc
new file mode 100644
index 0000000000..3da41832ab
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+constexpr auto kRequired = DecodeTargetIndication::kRequired;
+
+// decode targets: S0T0, S0T1, S1T0, S1T1
+constexpr DecodeTargetIndication kDtis[6][4] = {
+ {kSwitch, kSwitch, kSwitch, kSwitch}, // kKey, S0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kKey, S1
+ {kNotPresent, kDiscardable, kNotPresent, kRequired}, // kDeltaT1, S0
+ {kNotPresent, kNotPresent, kNotPresent, kDiscardable}, // kDeltaT1, S1
+ {kSwitch, kSwitch, kRequired, kRequired}, // kDeltaT0, S0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kDeltaT0, S1
+};
+
+} // namespace
+
+ScalabilityStructureL2T2::~ScalabilityStructureL2T2() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T2::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 2;
+ result.scaling_factor_num[0] = 1;
+ result.scaling_factor_den[0] = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T2::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 4;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 0, 1, 1};
+ structure.templates.resize(6);
+ auto& templates = structure.templates;
+ templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0});
+ templates[1].S(0).T(0).Dtis("SSRR").ChainDiffs({4, 3}).FrameDiffs({4});
+ templates[2].S(0).T(1).Dtis("-D-R").ChainDiffs({2, 1}).FrameDiffs({2});
+ templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1});
+ templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({4, 1});
+ templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2, 1});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T2::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).Keyframe().S(0).T(0).Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T2::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKey;
+ }
+ std::vector<LayerFrameConfig> result(2);
+
+ // Buffer0 keeps latest S0T0 frame,
+ // Buffer1 keeps latest S1T0 frame.
+ // Buffer2 keeps latest S0T1 frame.
+ switch (next_pattern_) {
+ case kKey:
+ result[0] = KeyFrameConfig();
+ result[1].Id(1).S(1).T(0).Reference(0).Update(1);
+ next_pattern_ = kDeltaT1;
+ break;
+ case kDeltaT1:
+ result[0].Id(2).S(0).T(1).Reference(0).Update(2);
+ result[1].Id(3).S(1).T(1).Reference(2).Reference(1);
+ next_pattern_ = kDeltaT0;
+ break;
+ case kDeltaT0:
+ result[0].Id(4).S(0).T(0).ReferenceAndUpdate(0);
+ result[1].Id(5).S(1).T(0).Reference(0).ReferenceAndUpdate(1);
+ next_pattern_ = kDeltaT1;
+ break;
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T2::OnEncodeDone(
+ LayerFrameConfig config) {
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.TemporalId() == 0) {
+ frame_info->part_of_chain = {config.SpatialId() == 0, true};
+ } else {
+ frame_info->part_of_chain = {false, false};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t2.h b/modules/video_coding/codecs/av1/scalability_structure_l2t2.h
new file mode 100644
index 0000000000..dbf5036c1f
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t2.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1T1 0 0
+// /| /| /
+// S1T0 0-+-0-+-0
+// | | | | | ...
+// S0T1 | 0 | 0 |
+// |/ |/ |/
+// S0T0 0---0---0--
+// Time-> 0 1 2 3 4
+class ScalabilityStructureL2T2 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T2() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKey,
+ kDeltaT1,
+ kDeltaT0,
+ };
+ LayerFrameConfig KeyFrameConfig() const;
+
+ FramePattern next_pattern_ = kKey;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc b/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc
new file mode 100644
index 0000000000..a59ef1a97d
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+// decode targets: S0T0, S0T1, S1T0, S1T1
+constexpr DecodeTargetIndication kDtis[6][4] = {
+ {kSwitch, kSwitch, kSwitch, kSwitch}, // kKey, S0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kKey, S1
+ {kNotPresent, kDiscardable, kNotPresent, kNotPresent}, // kDeltaT1, S0
+ {kNotPresent, kNotPresent, kNotPresent, kDiscardable}, // kDeltaT1, S1
+ {kSwitch, kSwitch, kNotPresent, kNotPresent}, // kDeltaT0, S0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kDeltaT0, S1
+};
+
+} // namespace
+
+ScalabilityStructureL2T2Key::~ScalabilityStructureL2T2Key() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T2Key::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 2;
+ result.scaling_factor_num[0] = 1;
+ result.scaling_factor_den[0] = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T2Key::DependencyStructure()
+ const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 4;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 0, 1, 1};
+ structure.templates.resize(6);
+ auto& templates = structure.templates;
+ templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0});
+ templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({4, 3}).FrameDiffs({4});
+ templates[2].S(0).T(1).Dtis("-D--").ChainDiffs({2, 1}).FrameDiffs({2});
+ templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1});
+ templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 4}).FrameDiffs({4});
+ templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T2Key::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).Keyframe().S(0).T(0).Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T2Key::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKey;
+ }
+ std::vector<LayerFrameConfig> result(2);
+
+ // Buffer0 keeps latest S0T0 frame,
+ // Buffer1 keeps latest S1T0 frame.
+ switch (next_pattern_) {
+ case kKey:
+ result[0] = KeyFrameConfig();
+ result[1].Id(1).S(1).T(0).Reference(0).Update(1);
+ next_pattern_ = kDeltaT1;
+ break;
+ case kDeltaT1:
+ result[0].Id(2).S(0).T(1).Reference(0);
+ result[1].Id(3).S(1).T(1).Reference(1);
+ next_pattern_ = kDeltaT0;
+ break;
+ case kDeltaT0:
+ result[0].Id(4).S(0).T(0).ReferenceAndUpdate(0);
+ result[1].Id(5).S(1).T(0).ReferenceAndUpdate(1);
+ next_pattern_ = kDeltaT1;
+ break;
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T2Key::OnEncodeDone(
+ LayerFrameConfig config) {
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.IsKeyframe()) {
+ frame_info->part_of_chain = {true, true};
+ } else if (config.TemporalId() == 0) {
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() == 1};
+ } else {
+ frame_info->part_of_chain = {false, false};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h b/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h
new file mode 100644
index 0000000000..9adfcbcd58
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1T1 0 0
+// / / /
+// S1T0 0---0---0
+// | ...
+// S0T1 | 0 0
+// |/ / /
+// S0T0 0---0---0
+// Time-> 0 1 2 3 4
+class ScalabilityStructureL2T2Key : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T2Key() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKey,
+ kDeltaT1,
+ kDeltaT0,
+ };
+ LayerFrameConfig KeyFrameConfig() const;
+
+ FramePattern next_pattern_ = kKey;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc b/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc
new file mode 100644
index 0000000000..d205b4a1c6
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[6][4] = {
+ {kSwitch, kSwitch, kSwitch, kSwitch}, // kKey, S0T0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kKey, S1T0
+ {kSwitch, kSwitch, kNotPresent, kNotPresent}, // kDelta0, S0T0
+ {kNotPresent, kNotPresent, kNotPresent, kDiscardable}, // kDelta0, S1T1
+ {kNotPresent, kDiscardable, kNotPresent, kNotPresent}, // kDelta1, S0T1
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kDelta1, S1T0
+};
+
+} // namespace
+
+ScalabilityStructureL2T2KeyShift::~ScalabilityStructureL2T2KeyShift() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T2KeyShift::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 2;
+ result.scaling_factor_num[0] = 1;
+ result.scaling_factor_den[0] = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T2KeyShift::DependencyStructure()
+ const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 4;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 0, 1, 1};
+ structure.templates.resize(7);
+ auto& templates = structure.templates;
+ templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0});
+ templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({2, 1}).FrameDiffs({2});
+ templates[2].S(0).T(0).Dtis("SS--").ChainDiffs({4, 1}).FrameDiffs({4});
+ templates[3].S(0).T(1).Dtis("-D--").ChainDiffs({2, 3}).FrameDiffs({2});
+ templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1});
+ templates[5].S(1).T(0).Dtis("--SS").ChainDiffs({3, 4}).FrameDiffs({4});
+ templates[6].S(1).T(1).Dtis("---D").ChainDiffs({1, 2}).FrameDiffs({2});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T2KeyShift::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).Keyframe().S(0).T(0).Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T2KeyShift::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKey;
+ }
+ std::vector<LayerFrameConfig> result(2);
+
+ // Buffer0 keeps latest S0T0 frame,
+ // Buffer1 keeps latest S1T0 frame.
+ switch (next_pattern_) {
+ case kKey:
+ result[0] = KeyFrameConfig();
+ result[1].Id(1).S(1).T(0).Reference(0).Update(1);
+ next_pattern_ = kDelta0;
+ break;
+ case kDelta0:
+ result[0].Id(2).S(0).T(0).ReferenceAndUpdate(0);
+ result[1].Id(3).S(1).T(1).Reference(1);
+ next_pattern_ = kDelta1;
+ break;
+ case kDelta1:
+ result[0].Id(4).S(0).T(1).Reference(0);
+ result[1].Id(5).S(1).T(0).ReferenceAndUpdate(1);
+ next_pattern_ = kDelta0;
+ break;
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T2KeyShift::OnEncodeDone(
+ LayerFrameConfig config) {
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.IsKeyframe()) {
+ frame_info->part_of_chain = {true, true};
+ } else if (config.TemporalId() == 0) {
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() == 1};
+ } else {
+ frame_info->part_of_chain = {false, false};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h b/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h
new file mode 100644
index 0000000000..1b18bd7c17
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1T1 0 0
+// / / /
+// S1T0 0---0---0
+// | ...
+// S0T1 | 0 0
+// | / /
+// S0T0 0-0---0--
+// Time-> 0 1 2 3 4
+class ScalabilityStructureL2T2KeyShift : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T2KeyShift() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKey,
+ kDelta0,
+ kDelta1,
+ };
+ LayerFrameConfig KeyFrameConfig() const;
+
+ FramePattern next_pattern_ = kKey;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc b/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc
new file mode 100644
index 0000000000..51eb00352c
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc
@@ -0,0 +1,112 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t1.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+constexpr auto kRequired = DecodeTargetIndication::kRequired;
+
+constexpr DecodeTargetIndication kDtis[5][3] = {
+ {kSwitch, kSwitch, kSwitch}, // Key, S0
+ {kNotPresent, kSwitch, kSwitch}, // Key, S1
+ {kNotPresent, kNotPresent, kSwitch}, // Key and Delta, S2
+ {kSwitch, kRequired, kRequired}, // Delta, S0
+ {kNotPresent, kSwitch, kRequired}, // Delta, S1
+};
+
+} // namespace
+
+ScalabilityStructureL3T1::~ScalabilityStructureL3T1() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL3T1::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 3;
+ result.num_temporal_layers = 1;
+ result.scaling_factor_num[0] = 1;
+ result.scaling_factor_den[0] = 4;
+ result.scaling_factor_num[1] = 1;
+ result.scaling_factor_den[1] = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL3T1::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 3;
+ structure.num_chains = 3;
+ structure.decode_target_protected_by_chain = {0, 1, 2};
+ auto& templates = structure.templates;
+ templates.resize(6);
+ templates[0].S(0).Dtis("SRR").ChainDiffs({3, 2, 1}).FrameDiffs({3});
+ templates[1].S(0).Dtis("SSS").ChainDiffs({0, 0, 0});
+ templates[2].S(1).Dtis("-SR").ChainDiffs({1, 1, 1}).FrameDiffs({3, 1});
+ templates[3].S(1).Dtis("-SS").ChainDiffs({1, 1, 1}).FrameDiffs({1});
+ templates[4].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({3, 1});
+ templates[5].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({1});
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL3T1::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> config(3);
+
+ // Buffer i keeps latest frame for spatial layer i
+ if (restart || keyframe_) {
+ config[0].Id(0).S(0).Keyframe().Update(0);
+ config[1].Id(1).S(1).Update(1).Reference(0);
+ config[2].Id(2).S(2).Update(2).Reference(1);
+ keyframe_ = false;
+ } else {
+ config[0].Id(3).S(0).ReferenceAndUpdate(0);
+ config[1].Id(4).S(1).ReferenceAndUpdate(1).Reference(0);
+ config[2].Id(2).S(2).ReferenceAndUpdate(2).Reference(1);
+ }
+ return config;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL3T1::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.IsKeyframe() && config.Id() != 0) {
+ // Encoder generated a key frame without asking to.
+ if (config.SpatialId() > 0) {
+ RTC_LOG(LS_WARNING) << "Unexpected spatial id " << config.SpatialId()
+ << " for key frame.";
+ }
+ config = LayerFrameConfig().Id(0).S(0).Keyframe().Update(0);
+ }
+
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ frame_info->part_of_chain = {config.SpatialId() == 0, config.SpatialId() <= 1,
+ true};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l3t1.h b/modules/video_coding/codecs/av1/scalability_structure_l3t1.h
new file mode 100644
index 0000000000..404860d08f
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l3t1.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T1_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T1_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S2 0-0-0-
+// | | |
+// S1 0-0-0-...
+// | | |
+// S0 0-0-0-
+// Time-> 0 1 2
+class ScalabilityStructureL3T1 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL3T1() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ bool keyframe_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T1_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc b/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc
new file mode 100644
index 0000000000..14e261f74c
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc
@@ -0,0 +1,224 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t3.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+constexpr auto kRequired = DecodeTargetIndication::kRequired;
+
+constexpr DecodeTargetIndication kDtis[12][9] = {
+ // Key, S0
+ {kSwitch, kSwitch, kSwitch, // S0
+ kSwitch, kSwitch, kSwitch, // S1
+ kSwitch, kSwitch, kSwitch}, // S2
+ // Key, S1
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kSwitch, kSwitch, kSwitch, // S1
+ kSwitch, kSwitch, kSwitch}, // S2
+ // Key, S2
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kNotPresent, // S1
+ kSwitch, kSwitch, kSwitch}, // S2
+ // Delta, S0T2
+ {kNotPresent, kNotPresent, kDiscardable, // S0
+ kNotPresent, kNotPresent, kRequired, // S1
+ kNotPresent, kNotPresent, kRequired}, // S2
+ // Delta, S1T2
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kDiscardable, // S1
+ kNotPresent, kNotPresent, kRequired}, // S2
+ // Delta, S2T2
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kNotPresent, // S1
+ kNotPresent, kNotPresent, kDiscardable}, // S2
+ // Delta, S0T1
+ {kNotPresent, kDiscardable, kSwitch, // S0
+ kNotPresent, kRequired, kRequired, // S1
+ kNotPresent, kRequired, kRequired}, // S2
+ // Delta, S1T1
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kDiscardable, kSwitch, // S1
+ kNotPresent, kRequired, kRequired}, // S2
+ // Delta, S2T1
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kNotPresent, // S1
+ kNotPresent, kDiscardable, kSwitch}, // S2
+ // Delta, S0T0
+ {kSwitch, kSwitch, kSwitch, // S0
+ kRequired, kRequired, kRequired, // S1
+ kRequired, kRequired, kRequired}, // S2
+ // Delta, S1T0
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kSwitch, kSwitch, kSwitch, // S1
+ kRequired, kRequired, kRequired}, // S2
+ // Delta, S2T0
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kNotPresent, // S1
+ kSwitch, kSwitch, kSwitch}, // S2
+};
+
+} // namespace
+
+ScalabilityStructureL3T3::~ScalabilityStructureL3T3() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL3T3::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 3;
+ result.num_temporal_layers = 3;
+ result.scaling_factor_num[0] = 1;
+ result.scaling_factor_den[0] = 4;
+ result.scaling_factor_num[1] = 1;
+ result.scaling_factor_den[1] = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL3T3::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 9;
+ structure.num_chains = 3;
+ structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2};
+ auto& t = structure.templates;
+ t.resize(15);
+ // Templates are shown in the order frames following them appear in the
+ // stream, but in `structure.templates` array templates are sorted by
+ // (`spatial_id`, `temporal_id`) since that is a dependency descriptor
+ // requirement. Indexes are written in hex for nicer alignment.
+ t[0x1].S(0).T(0).Dtis("SSSSSSSSS").ChainDiffs({0, 0, 0});
+ t[0x6].S(1).T(0).Dtis("---SSSSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1});
+ t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({1});
+ t[0x3].S(0).T(2).Dtis("--D--R--R").ChainDiffs({3, 2, 1}).FrameDiffs({3});
+ t[0x8].S(1).T(2).Dtis("-----D--R").ChainDiffs({4, 3, 2}).FrameDiffs({3, 1});
+ t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3, 1});
+ t[0x2].S(0).T(1).Dtis("-DS-RR-RR").ChainDiffs({6, 5, 4}).FrameDiffs({6});
+ t[0x7].S(1).T(1).Dtis("----DS-RR").ChainDiffs({7, 6, 5}).FrameDiffs({6, 1});
+ t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6, 1});
+ t[0x4].S(0).T(2).Dtis("--D--R--R").ChainDiffs({9, 8, 7}).FrameDiffs({3});
+ t[0x9].S(1).T(2).Dtis("-----D--R").ChainDiffs({10, 9, 8}).FrameDiffs({3, 1});
+ t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3, 1});
+ t[0x0].S(0).T(0).Dtis("SSSRRRRRR").ChainDiffs({12, 11, 10}).FrameDiffs({12});
+ t[0x5].S(1).T(0).Dtis("---SSSRRR").ChainDiffs({1, 1, 1}).FrameDiffs({12, 1});
+ t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({12, 1});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL3T3::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).S(0).T(0).Keyframe().Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL3T3::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKeyFrame;
+ }
+ std::vector<LayerFrameConfig> config(3);
+
+ // For this structure name each of 8 buffers after the layer of the frame that
+ // buffer keeps.
+ static constexpr int kS0T0 = 0;
+ static constexpr int kS1T0 = 1;
+ static constexpr int kS2T0 = 2;
+ static constexpr int kS0T1 = 3;
+ static constexpr int kS1T1 = 4;
+ static constexpr int kS2T1 = 5;
+ static constexpr int kS0T2 = 6;
+ static constexpr int kS1T2 = 7;
+ switch (next_pattern_) {
+ case kKeyFrame:
+ config[0].Id(0).S(0).T(0).Keyframe().Update(kS0T0);
+ config[1].Id(1).S(1).T(0).Update(kS1T0).Reference(kS0T0);
+ config[2].Id(2).S(2).T(0).Update(kS2T0).Reference(kS1T0);
+ next_pattern_ = kDeltaFrameT2A;
+ break;
+ case kDeltaFrameT2A:
+ config[0].Id(3).S(0).T(2).Reference(kS0T0).Update(kS0T2);
+ config[1].Id(4).S(1).T(2).Reference(kS1T0).Reference(kS0T2).Update(kS1T2);
+ config[2].Id(5).S(2).T(2).Reference(kS2T0).Reference(kS1T2);
+ next_pattern_ = kDeltaFrameT1;
+ break;
+ case kDeltaFrameT1:
+ config[0].Id(6).S(0).T(1).Reference(kS0T0).Update(kS0T1);
+ config[1].Id(7).S(1).T(1).Reference(kS1T0).Reference(kS0T1).Update(kS1T1);
+ config[2].Id(8).S(2).T(1).Reference(kS2T0).Reference(kS1T1).Update(kS2T1);
+ next_pattern_ = kDeltaFrameT2B;
+ break;
+ case kDeltaFrameT2B:
+ config[0].Id(3).S(0).T(2).Reference(kS0T1).Update(kS0T2);
+ config[1].Id(4).S(1).T(2).Reference(kS1T1).Reference(kS0T2).Update(kS1T2);
+ config[2].Id(5).S(2).T(2).Reference(kS2T1).Reference(kS1T2);
+ next_pattern_ = kDeltaFrameT0;
+ break;
+ case kDeltaFrameT0:
+ config[0].Id(9).S(0).T(0).ReferenceAndUpdate(kS0T0);
+ config[1].Id(10).S(1).T(0).ReferenceAndUpdate(kS1T0).Reference(kS0T0);
+ config[2].Id(11).S(2).T(0).ReferenceAndUpdate(kS2T0).Reference(kS1T0);
+ next_pattern_ = kDeltaFrameT2A;
+ break;
+ }
+ return config;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL3T3::OnEncodeDone(
+ LayerFrameConfig config) {
+ if (config.IsKeyframe() && config.Id() != 0) {
+ // Encoder generated a key frame without asking to.
+ if (config.SpatialId() > 0) {
+ RTC_LOG(LS_WARNING) << "Unexpected spatial id " << config.SpatialId()
+ << " for key frame.";
+ }
+ config = LayerFrameConfig()
+ .Keyframe()
+ .Id(0)
+ .S(0)
+ .T(0)
+ .Update(0)
+ .Update(1)
+ .Update(2)
+ .Update(3)
+ .Update(4)
+ .Update(5)
+ .Update(6)
+ .Update(7);
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.TemporalId() == 0) {
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() <= 1, true};
+ } else {
+ frame_info->part_of_chain = {false, false, false};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_l3t3.h b/modules/video_coding/codecs/av1/scalability_structure_l3t3.h
new file mode 100644
index 0000000000..363f07e015
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_l3t3.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T3_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T3_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// https://aomediacodec.github.io/av1-rtp-spec/#a63-l3t3-full-svc
+class ScalabilityStructureL3T3 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL3T3() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKeyFrame,
+ kDeltaFrameT2A,
+ kDeltaFrameT1,
+ kDeltaFrameT2B,
+ kDeltaFrameT0,
+ };
+ LayerFrameConfig KeyFrameConfig() const;
+
+ FramePattern next_pattern_ = kKeyFrame;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T3_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc b/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc
new file mode 100644
index 0000000000..182891fa50
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_s2t1.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[2][2] = {
+ {kSwitch, kNotPresent}, // S0
+ {kNotPresent, kSwitch}, // S1
+};
+
+} // namespace
+
+ScalabilityStructureS2T1::~ScalabilityStructureS2T1() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureS2T1::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 1;
+ result.scaling_factor_num[0] = 1;
+ result.scaling_factor_den[0] = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureS2T1::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 1};
+ structure.templates.resize(4);
+ structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2});
+ structure.templates[1].S(0).Dtis("S-").ChainDiffs({0, 0});
+ structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2});
+ structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 0});
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureS2T1::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> result(2);
+ // Buffer0 keeps latest S0T0 frame, Buffer1 keeps latest S1T0 frame.
+ if (restart || keyframe_) {
+ result[0].S(0).Keyframe().Update(0);
+ result[1].S(1).Keyframe().Update(1);
+ keyframe_ = false;
+ } else {
+ result[0].S(0).ReferenceAndUpdate(0);
+ result[1].S(1).ReferenceAndUpdate(1);
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureS2T1::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.SpatialId() < 0 ||
+ config.SpatialId() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected spatial id " << config.SpatialId();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = std::move(config.Buffers());
+ frame_info->decode_target_indications.assign(
+ std::begin(kDtis[config.SpatialId()]),
+ std::end(kDtis[config.SpatialId()]));
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() == 1};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalability_structure_s2t1.h b/modules/video_coding/codecs/av1/scalability_structure_s2t1.h
new file mode 100644
index 0000000000..06a99775c4
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_s2t1.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_S2T1_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_S2T1_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1 0--0--0-
+// ...
+// S0 0--0--0-
+class ScalabilityStructureS2T1 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureS2T1() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ bool keyframe_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_S2T1_H_
diff --git a/modules/video_coding/codecs/av1/scalability_structure_unittest.cc b/modules/video_coding/codecs/av1/scalability_structure_unittest.cc
new file mode 100644
index 0000000000..4d0e283fdd
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalability_structure_unittest.cc
@@ -0,0 +1,295 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <ostream>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "api/video/video_frame_type.h"
+#include "modules/video_coding/chain_diff_calculator.h"
+#include "modules/video_coding/codecs/av1/create_scalability_structure.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+#include "modules/video_coding/frame_dependencies_calculator.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::AllOf;
+using ::testing::Contains;
+using ::testing::Each;
+using ::testing::Field;
+using ::testing::Ge;
+using ::testing::IsEmpty;
+using ::testing::Le;
+using ::testing::Lt;
+using ::testing::Not;
+using ::testing::SizeIs;
+using ::testing::TestWithParam;
+using ::testing::Values;
+
+struct SvcTestParam {
+ friend std::ostream& operator<<(std::ostream& os, const SvcTestParam& param) {
+ return os << param.name;
+ }
+
+ std::string name;
+ int num_temporal_units;
+};
+
+class ScalabilityStructureTest : public TestWithParam<SvcTestParam> {
+ public:
+ std::vector<GenericFrameInfo> GenerateAllFrames() {
+ std::vector<GenericFrameInfo> frames;
+
+ FrameDependenciesCalculator frame_deps_calculator;
+ ChainDiffCalculator chain_diff_calculator;
+ std::unique_ptr<ScalableVideoController> structure_controller =
+ CreateScalabilityStructure(GetParam().name);
+ FrameDependencyStructure structure =
+ structure_controller->DependencyStructure();
+ for (int i = 0; i < GetParam().num_temporal_units; ++i) {
+ for (auto& layer_frame :
+ structure_controller->NextFrameConfig(/*reset=*/false)) {
+ int64_t frame_id = static_cast<int64_t>(frames.size());
+ bool is_keyframe = layer_frame.IsKeyframe();
+ absl::optional<GenericFrameInfo> frame_info =
+ structure_controller->OnEncodeDone(std::move(layer_frame));
+ EXPECT_TRUE(frame_info.has_value());
+ if (is_keyframe) {
+ chain_diff_calculator.Reset(frame_info->part_of_chain);
+ }
+ frame_info->chain_diffs =
+ chain_diff_calculator.From(frame_id, frame_info->part_of_chain);
+ for (int64_t base_frame_id : frame_deps_calculator.FromBuffersUsage(
+ is_keyframe ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta,
+ frame_id, frame_info->encoder_buffers)) {
+ EXPECT_LT(base_frame_id, frame_id);
+ EXPECT_GE(base_frame_id, 0);
+ frame_info->frame_diffs.push_back(frame_id - base_frame_id);
+ }
+
+ frames.push_back(*std::move(frame_info));
+ }
+ }
+ return frames;
+ }
+};
+
+TEST_P(ScalabilityStructureTest,
+ NumberOfDecodeTargetsAndChainsAreInRangeAndConsistent) {
+ FrameDependencyStructure structure =
+ CreateScalabilityStructure(GetParam().name)->DependencyStructure();
+ EXPECT_GT(structure.num_decode_targets, 0);
+ EXPECT_LE(structure.num_decode_targets,
+ DependencyDescriptor::kMaxDecodeTargets);
+ EXPECT_GE(structure.num_chains, 0);
+ EXPECT_LE(structure.num_chains, structure.num_decode_targets);
+ if (structure.num_chains == 0) {
+ EXPECT_THAT(structure.decode_target_protected_by_chain, IsEmpty());
+ } else {
+ EXPECT_THAT(structure.decode_target_protected_by_chain,
+ AllOf(SizeIs(structure.num_decode_targets), Each(Ge(0)),
+ Each(Lt(structure.num_chains))));
+ }
+ EXPECT_THAT(structure.templates,
+ SizeIs(Lt(size_t{DependencyDescriptor::kMaxTemplates})));
+}
+
+TEST_P(ScalabilityStructureTest, TemplatesAreSortedByLayerId) {
+ FrameDependencyStructure structure =
+ CreateScalabilityStructure(GetParam().name)->DependencyStructure();
+ ASSERT_THAT(structure.templates, Not(IsEmpty()));
+ const auto& first_templates = structure.templates.front();
+ EXPECT_EQ(first_templates.spatial_id, 0);
+ EXPECT_EQ(first_templates.temporal_id, 0);
+ for (size_t i = 1; i < structure.templates.size(); ++i) {
+ const auto& prev_template = structure.templates[i - 1];
+ const auto& next_template = structure.templates[i];
+ if (next_template.spatial_id == prev_template.spatial_id &&
+ next_template.temporal_id == prev_template.temporal_id) {
+ // Same layer, next_layer_idc == 0
+ } else if (next_template.spatial_id == prev_template.spatial_id &&
+ next_template.temporal_id == prev_template.temporal_id + 1) {
+ // Next temporal layer, next_layer_idc == 1
+ } else if (next_template.spatial_id == prev_template.spatial_id + 1 &&
+ next_template.temporal_id == 0) {
+ // Next spatial layer, next_layer_idc == 2
+ } else {
+ // everything else is invalid.
+ ADD_FAILURE() << "Invalid templates order. Template #" << i
+ << " with layer (" << next_template.spatial_id << ","
+ << next_template.temporal_id
+ << ") follows template with layer ("
+ << prev_template.spatial_id << ","
+ << prev_template.temporal_id << ").";
+ }
+ }
+}
+
+TEST_P(ScalabilityStructureTest, TemplatesMatchNumberOfDecodeTargetsAndChains) {
+ FrameDependencyStructure structure =
+ CreateScalabilityStructure(GetParam().name)->DependencyStructure();
+ EXPECT_THAT(
+ structure.templates,
+ Each(AllOf(Field(&FrameDependencyTemplate::decode_target_indications,
+ SizeIs(structure.num_decode_targets)),
+ Field(&FrameDependencyTemplate::chain_diffs,
+ SizeIs(structure.num_chains)))));
+}
+
+TEST_P(ScalabilityStructureTest, FrameInfoMatchesFrameDependencyStructure) {
+ FrameDependencyStructure structure =
+ CreateScalabilityStructure(GetParam().name)->DependencyStructure();
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ for (size_t frame_id = 0; frame_id < frame_infos.size(); ++frame_id) {
+ const auto& frame = frame_infos[frame_id];
+ EXPECT_GE(frame.spatial_id, 0) << " for frame " << frame_id;
+ EXPECT_GE(frame.temporal_id, 0) << " for frame " << frame_id;
+ EXPECT_THAT(frame.decode_target_indications,
+ SizeIs(structure.num_decode_targets))
+ << " for frame " << frame_id;
+ EXPECT_THAT(frame.part_of_chain, SizeIs(structure.num_chains))
+ << " for frame " << frame_id;
+ }
+}
+
+TEST_P(ScalabilityStructureTest, ThereIsAPerfectTemplateForEachFrame) {
+ FrameDependencyStructure structure =
+ CreateScalabilityStructure(GetParam().name)->DependencyStructure();
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ for (size_t frame_id = 0; frame_id < frame_infos.size(); ++frame_id) {
+ EXPECT_THAT(structure.templates, Contains(frame_infos[frame_id]))
+ << " for frame " << frame_id;
+ }
+}
+
+TEST_P(ScalabilityStructureTest, FrameDependsOnSameOrLowerLayer) {
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ int64_t num_frames = frame_infos.size();
+
+ for (int64_t frame_id = 0; frame_id < num_frames; ++frame_id) {
+ const auto& frame = frame_infos[frame_id];
+ for (int frame_diff : frame.frame_diffs) {
+ int64_t base_frame_id = frame_id - frame_diff;
+ const auto& base_frame = frame_infos[base_frame_id];
+ EXPECT_GE(frame.spatial_id, base_frame.spatial_id)
+ << "Frame " << frame_id << " depends on frame " << base_frame_id;
+ EXPECT_GE(frame.temporal_id, base_frame.temporal_id)
+ << "Frame " << frame_id << " depends on frame " << base_frame_id;
+ }
+ }
+}
+
+TEST_P(ScalabilityStructureTest, NoFrameDependsOnDiscardableOrNotPresent) {
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ int64_t num_frames = frame_infos.size();
+ FrameDependencyStructure structure =
+ CreateScalabilityStructure(GetParam().name)->DependencyStructure();
+
+ for (int dt = 0; dt < structure.num_decode_targets; ++dt) {
+ for (int64_t frame_id = 0; frame_id < num_frames; ++frame_id) {
+ const auto& frame = frame_infos[frame_id];
+ if (frame.decode_target_indications[dt] ==
+ DecodeTargetIndication::kNotPresent) {
+ continue;
+ }
+ for (int frame_diff : frame.frame_diffs) {
+ int64_t base_frame_id = frame_id - frame_diff;
+ const auto& base_frame = frame_infos[base_frame_id];
+ EXPECT_NE(base_frame.decode_target_indications[dt],
+ DecodeTargetIndication::kNotPresent)
+ << "Frame " << frame_id << " depends on frame " << base_frame_id
+ << " that is not part of decode target#" << dt;
+ EXPECT_NE(base_frame.decode_target_indications[dt],
+ DecodeTargetIndication::kDiscardable)
+ << "Frame " << frame_id << " depends on frame " << base_frame_id
+ << " that is discardable for decode target#" << dt;
+ }
+ }
+ }
+}
+
+TEST_P(ScalabilityStructureTest, NoFrameDependsThroughSwitchIndication) {
+ FrameDependencyStructure structure =
+ CreateScalabilityStructure(GetParam().name)->DependencyStructure();
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ int64_t num_frames = frame_infos.size();
+ std::vector<std::set<int64_t>> full_deps(num_frames);
+
+ // For each frame calculate set of all frames it depends on, both directly and
+ // indirectly.
+ for (int64_t frame_id = 0; frame_id < num_frames; ++frame_id) {
+ std::set<int64_t> all_base_frames;
+ for (int frame_diff : frame_infos[frame_id].frame_diffs) {
+ int64_t base_frame_id = frame_id - frame_diff;
+ all_base_frames.insert(base_frame_id);
+ const auto& indirect = full_deps[base_frame_id];
+ all_base_frames.insert(indirect.begin(), indirect.end());
+ }
+ full_deps[frame_id] = std::move(all_base_frames);
+ }
+
+ // Now check the switch indication: frames after the switch indication mustn't
+ // depend on any addition frames before the switch indications.
+ for (int dt = 0; dt < structure.num_decode_targets; ++dt) {
+ for (int64_t switch_frame_id = 0; switch_frame_id < num_frames;
+ ++switch_frame_id) {
+ if (frame_infos[switch_frame_id].decode_target_indications[dt] !=
+ DecodeTargetIndication::kSwitch) {
+ continue;
+ }
+ for (int64_t later_frame_id = switch_frame_id + 1;
+ later_frame_id < num_frames; ++later_frame_id) {
+ if (frame_infos[later_frame_id].decode_target_indications[dt] ==
+ DecodeTargetIndication::kNotPresent) {
+ continue;
+ }
+ for (int frame_diff : frame_infos[later_frame_id].frame_diffs) {
+ int64_t early_frame_id = later_frame_id - frame_diff;
+ if (early_frame_id < switch_frame_id) {
+ EXPECT_THAT(full_deps[switch_frame_id], Contains(early_frame_id))
+ << "For decode target #" << dt << " frame " << later_frame_id
+ << " depends on the frame " << early_frame_id
+ << " that switch indication frame " << switch_frame_id
+ << " doesn't directly on indirectly depend on.";
+ }
+ }
+ }
+ }
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ Svc,
+ ScalabilityStructureTest,
+ Values(SvcTestParam{"L1T2", /*num_temporal_units=*/4},
+ SvcTestParam{"L1T3", /*num_temporal_units=*/8},
+ SvcTestParam{"L2T1", /*num_temporal_units=*/3},
+ SvcTestParam{"L2T1_KEY", /*num_temporal_units=*/3},
+ SvcTestParam{"L3T1", /*num_temporal_units=*/3},
+ SvcTestParam{"L3T3", /*num_temporal_units=*/8},
+ SvcTestParam{"S2T1", /*num_temporal_units=*/3},
+ SvcTestParam{"L2T2", /*num_temporal_units=*/4},
+ SvcTestParam{"L2T2_KEY", /*num_temporal_units=*/4},
+ SvcTestParam{"L2T2_KEY_SHIFT", /*num_temporal_units=*/4}),
+ [](const testing::TestParamInfo<SvcTestParam>& info) {
+ return info.param.name;
+ });
+
+} // namespace
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalable_video_controller.h b/modules/video_coding/codecs/av1/scalable_video_controller.h
new file mode 100644
index 0000000000..0167820798
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalable_video_controller.h
@@ -0,0 +1,141 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_H_
+
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+
+namespace webrtc {
+
+// Controls how video should be encoded to be scalable. Outputs results as
+// buffer usage configuration for encoder and enough details to communicate the
+// scalability structure via dependency descriptor rtp header extension.
+class ScalableVideoController {
+ public:
+ struct StreamLayersConfig {
+ int num_spatial_layers = 1;
+ int num_temporal_layers = 1;
+ // Spatial layers scaling. Frames with spatial_id = i expected to be encoded
+ // with original_resolution * scaling_factor_num[i] / scaling_factor_den[i].
+ int scaling_factor_num[DependencyDescriptor::kMaxSpatialIds] = {1, 1, 1, 1};
+ int scaling_factor_den[DependencyDescriptor::kMaxSpatialIds] = {1, 1, 1, 1};
+ };
+ class LayerFrameConfig {
+ public:
+ // Builders/setters.
+ LayerFrameConfig& Id(int value);
+ LayerFrameConfig& Keyframe();
+ LayerFrameConfig& S(int value);
+ LayerFrameConfig& T(int value);
+ LayerFrameConfig& Reference(int buffer_id);
+ LayerFrameConfig& Update(int buffer_id);
+ LayerFrameConfig& ReferenceAndUpdate(int buffer_id);
+
+ // Getters.
+ int Id() const { return id_; }
+ bool IsKeyframe() const { return is_keyframe_; }
+ int SpatialId() const { return spatial_id_; }
+ int TemporalId() const { return temporal_id_; }
+ const absl::InlinedVector<CodecBufferUsage, kMaxEncoderBuffers>& Buffers()
+ const {
+ return buffers_;
+ }
+
+ private:
+ // Id to match configuration returned by NextFrameConfig with
+ // (possibly modified) configuration passed back via OnEncoderDone.
+ // The meaning of the id is an implementation detail of
+ // the ScalableVideoController.
+ int id_ = 0;
+
+ // Indication frame should be encoded as a key frame. In particular when
+ // `is_keyframe=true` property `CodecBufferUsage::referenced` should be
+ // ignored and treated as false.
+ bool is_keyframe_ = false;
+
+ int spatial_id_ = 0;
+ int temporal_id_ = 0;
+ // Describes how encoder which buffers encoder allowed to reference and
+ // which buffers encoder should update.
+ absl::InlinedVector<CodecBufferUsage, kMaxEncoderBuffers> buffers_;
+ };
+
+ virtual ~ScalableVideoController() = default;
+
+ // Returns video structure description for encoder to configure itself.
+ virtual StreamLayersConfig StreamConfig() const = 0;
+
+ // Returns video structure description in format compatible with
+ // dependency descriptor rtp header extension.
+ virtual FrameDependencyStructure DependencyStructure() const = 0;
+
+ // Notifies Controller with updated bitrates per layer. In particular notifies
+ // when certain layers should be disabled.
+ // Controller shouldn't produce LayerFrameConfig for disabled layers.
+ // TODO(bugs.webrtc.org/11404): Make pure virtual when implemented by all
+ // structures.
+ virtual void OnRatesUpdated(const VideoBitrateAllocation& bitrates) {}
+
+ // When `restart` is true, first `LayerFrameConfig` should have `is_keyframe`
+ // set to true.
+ // Returned vector shouldn't be empty.
+ virtual std::vector<LayerFrameConfig> NextFrameConfig(bool restart) = 0;
+
+ // Returns configuration to pass to EncoderCallback.
+ virtual absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) = 0;
+};
+
+// Below are implementation details.
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::Id(int value) {
+ id_ = value;
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::Keyframe() {
+ is_keyframe_ = true;
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::S(int value) {
+ spatial_id_ = value;
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::T(int value) {
+ temporal_id_ = value;
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::Reference(int buffer_id) {
+ buffers_.emplace_back(buffer_id, /*referenced=*/true, /*updated=*/false);
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::Update(int buffer_id) {
+ buffers_.emplace_back(buffer_id, /*referenced=*/false, /*updated=*/true);
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::ReferenceAndUpdate(int buffer_id) {
+ buffers_.emplace_back(buffer_id, /*referenced=*/true, /*updated=*/true);
+ return *this;
+}
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_H_
diff --git a/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc b/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc
new file mode 100644
index 0000000000..0d211fb911
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h"
+
+#include <utility>
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+ScalableVideoControllerNoLayering::~ScalableVideoControllerNoLayering() =
+ default;
+
+ScalableVideoController::StreamLayersConfig
+ScalableVideoControllerNoLayering::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 1;
+ result.num_temporal_layers = 1;
+ return result;
+}
+
+FrameDependencyStructure
+ScalableVideoControllerNoLayering::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 1;
+ FrameDependencyTemplate a_template;
+ a_template.decode_target_indications = {DecodeTargetIndication::kSwitch};
+ structure.templates.push_back(a_template);
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalableVideoControllerNoLayering::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> result(1);
+ if (restart || start_) {
+ result[0].Id(0).Keyframe().Update(0);
+ } else {
+ result[0].Id(0).ReferenceAndUpdate(0);
+ }
+ start_ = false;
+ return result;
+}
+
+absl::optional<GenericFrameInfo>
+ScalableVideoControllerNoLayering::OnEncodeDone(LayerFrameConfig config) {
+ RTC_DCHECK_EQ(config.Id(), 0);
+ absl::optional<GenericFrameInfo> frame_info(absl::in_place);
+ frame_info->encoder_buffers = config.Buffers();
+ if (config.IsKeyframe()) {
+ for (auto& buffer : frame_info->encoder_buffers) {
+ buffer.referenced = false;
+ }
+ }
+ frame_info->decode_target_indications = {DecodeTargetIndication::kSwitch};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h b/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h
new file mode 100644
index 0000000000..ad730989af
--- /dev/null
+++ b/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+class ScalableVideoControllerNoLayering : public ScalableVideoController {
+ public:
+ ~ScalableVideoControllerNoLayering() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ bool start_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_
diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index 33efa648ba..9e32c68f76 100644
--- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -32,7 +32,6 @@ extern "C" {
#include "common_video/include/video_frame_buffer.h"
#include "modules/video_coding/codecs/h264/h264_color_space.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/logging.h"
#include "system_wrappers/include/field_trial.h"
diff --git a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h b/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h
index 9e718303b7..92a4c88cef 100644
--- a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h
+++ b/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h
@@ -21,7 +21,7 @@
#include "api/video_codecs/video_encoder_factory.h"
#include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h"
#include "modules/video_coding/include/video_codec_interface.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -71,7 +71,7 @@ class MultiplexEncoderAdapter : public VideoEncoder {
EncodedImageCallback* encoded_complete_callback_;
std::map<uint32_t /* timestamp */, MultiplexImage> stashed_images_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
uint16_t picture_index_ = 0;
std::vector<uint8_t> multiplex_dummy_planes_;
@@ -79,7 +79,7 @@ class MultiplexEncoderAdapter : public VideoEncoder {
int key_frame_interval_;
EncodedImage combined_image_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
const bool supports_augmented_data_;
int augmenting_data_size_ = 0;
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h
index 4a913fe502..9f9f39ce05 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h
@@ -16,6 +16,7 @@
#include <vector>
#include "api/video/encoded_image.h"
+#include "api/video_codecs/video_codec.h"
namespace webrtc {
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
index 492ff19ffa..13f177cdad 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
@@ -180,7 +180,7 @@ int MultiplexEncoderAdapter::Encode(
}
{
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
stashed_images_.emplace(
std::piecewise_construct,
std::forward_as_tuple(input_image.timestamp()),
@@ -273,7 +273,7 @@ int MultiplexEncoderAdapter::Release() {
}
encoders_.clear();
adapter_callbacks_.clear();
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
stashed_images_.clear();
return WEBRTC_VIDEO_CODEC_OK;
@@ -298,7 +298,7 @@ EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage(
// If we don't already own the buffer, make a copy.
image_component.encoded_image.Retain();
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
const auto& stashed_image_itr =
stashed_images_.find(encodedImage.Timestamp());
const auto& stashed_image_next_itr = std::next(stashed_image_itr, 1);
diff --git a/modules/video_coding/codecs/test/encoded_video_frame_producer.cc b/modules/video_coding/codecs/test/encoded_video_frame_producer.cc
new file mode 100644
index 0000000000..7dc387b857
--- /dev/null
+++ b/modules/video_coding/codecs/test/encoded_video_frame_producer.cc
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h"
+
+#include <memory>
+#include <vector>
+
+#include "api/test/create_frame_generator.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_type.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+
+class EncoderCallback : public EncodedImageCallback {
+ public:
+ explicit EncoderCallback(
+ std::vector<EncodedVideoFrameProducer::EncodedFrame>& output_frames)
+ : output_frames_(output_frames) {}
+
+ private:
+ Result OnEncodedImage(
+ const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* /*fragmentation*/) override {
+ output_frames_.push_back({encoded_image, *codec_specific_info});
+ return Result(Result::Error::OK);
+ }
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame>& output_frames_;
+};
+
+} // namespace
+
+std::vector<EncodedVideoFrameProducer::EncodedFrame>
+EncodedVideoFrameProducer::Encode() {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_buffer_generator =
+ test::CreateSquareFrameGenerator(
+ resolution_.Width(), resolution_.Height(),
+ test::FrameGeneratorInterface::OutputType::kI420, absl::nullopt);
+
+ std::vector<EncodedFrame> encoded_frames;
+ EncoderCallback encoder_callback(encoded_frames);
+ RTC_CHECK_EQ(encoder_.RegisterEncodeCompleteCallback(&encoder_callback),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ uint32_t rtp_tick = 90000 / framerate_fps_;
+ std::vector<VideoFrameType> frame_types = {VideoFrameType::kVideoFrameDelta};
+ for (int i = 0; i < num_input_frames_; ++i) {
+ VideoFrame frame =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(frame_buffer_generator->NextFrame().buffer)
+ .set_timestamp_rtp(rtp_timestamp_)
+ .build();
+ rtp_timestamp_ += rtp_tick;
+ RTC_CHECK_EQ(encoder_.Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_OK);
+ }
+
+ RTC_CHECK_EQ(encoder_.RegisterEncodeCompleteCallback(nullptr),
+ WEBRTC_VIDEO_CODEC_OK);
+ return encoded_frames;
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/test/encoded_video_frame_producer.h b/modules/video_coding/codecs/test/encoded_video_frame_producer.h
new file mode 100644
index 0000000000..1b1b9018f9
--- /dev/null
+++ b/modules/video_coding/codecs/test/encoded_video_frame_producer.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_
+#define MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_
+
+#include <stdint.h>
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "api/video/encoded_image.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+
+namespace webrtc {
+
+// Wrapper around VideoEncoder::Encode for convenient input (generates frames)
+// and output (returns encoded frames instead of passing them to callback)
+class EncodedVideoFrameProducer {
+ public:
+ struct EncodedFrame {
+ EncodedImage encoded_image;
+ CodecSpecificInfo codec_specific_info;
+ };
+
+ // `encoder` should be initialized, but shouldn't have `EncoderCallback` set.
+ explicit EncodedVideoFrameProducer(VideoEncoder& encoder)
+ : encoder_(encoder) {}
+ EncodedVideoFrameProducer(const EncodedVideoFrameProducer&) = delete;
+ EncodedVideoFrameProducer& operator=(const EncodedVideoFrameProducer&) =
+ delete;
+
+ // Number of the input frames to pass to the encoder.
+ EncodedVideoFrameProducer& SetNumInputFrames(int value);
+ // Resolution of the input frames.
+ EncodedVideoFrameProducer& SetResolution(RenderResolution value);
+
+ EncodedVideoFrameProducer& SetFramerateFps(int value);
+
+ // Generates input video frames and encodes them with `encoder` provided in
+ // the constructor. Returns frame passed to the `OnEncodedImage` by wraping
+ // `EncodedImageCallback` underneath.
+ std::vector<EncodedFrame> Encode();
+
+ private:
+ VideoEncoder& encoder_;
+
+ uint32_t rtp_timestamp_ = 1000;
+ int num_input_frames_ = 1;
+ int framerate_fps_ = 30;
+ RenderResolution resolution_ = {320, 180};
+};
+
+inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetNumInputFrames(
+ int value) {
+ RTC_DCHECK_GT(value, 0);
+ num_input_frames_ = value;
+ return *this;
+}
+
+inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetResolution(
+ RenderResolution value) {
+ resolution_ = value;
+ return *this;
+}
+
+inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetFramerateFps(
+ int value) {
+ RTC_DCHECK_GT(value, 0);
+ framerate_fps_ = value;
+ return *this;
+}
+
+} // namespace webrtc
+#endif // MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_
diff --git a/modules/video_coding/codecs/test/video_codec_unittest.cc b/modules/video_coding/codecs/test/video_codec_unittest.cc
index c6cf1add94..94806b844e 100644
--- a/modules/video_coding/codecs/test/video_codec_unittest.cc
+++ b/modules/video_coding/codecs/test/video_codec_unittest.cc
@@ -37,7 +37,7 @@ VideoCodecUnitTest::FakeEncodeCompleteCallback::OnEncodedImage(
const EncodedImage& frame,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
- rtc::CritScope lock(&test_->encoded_frame_section_);
+ MutexLock lock(&test_->encoded_frame_section_);
test_->encoded_frames_.push_back(frame);
RTC_DCHECK(codec_specific_info);
test_->codec_specific_infos_.push_back(*codec_specific_info);
@@ -58,7 +58,7 @@ void VideoCodecUnitTest::FakeDecodeCompleteCallback::Decoded(
VideoFrame& frame,
absl::optional<int32_t> decode_time_ms,
absl::optional<uint8_t> qp) {
- rtc::CritScope lock(&test_->decoded_frame_section_);
+ MutexLock lock(&test_->decoded_frame_section_);
test_->decoded_frame_.emplace(frame);
test_->decoded_qp_ = qp;
test_->decoded_frame_event_.Set();
@@ -126,7 +126,7 @@ bool VideoCodecUnitTest::WaitForEncodedFrame(
}
void VideoCodecUnitTest::SetWaitForEncodedFramesThreshold(size_t num_frames) {
- rtc::CritScope lock(&encoded_frame_section_);
+ MutexLock lock(&encoded_frame_section_);
wait_for_encoded_frames_threshold_ = num_frames;
}
@@ -136,7 +136,7 @@ bool VideoCodecUnitTest::WaitForEncodedFrames(
EXPECT_TRUE(encoded_frame_event_.Wait(kEncodeTimeoutMs))
<< "Timed out while waiting for encoded frame.";
// This becomes unsafe if there are multiple threads waiting for frames.
- rtc::CritScope lock(&encoded_frame_section_);
+ MutexLock lock(&encoded_frame_section_);
EXPECT_FALSE(encoded_frames_.empty());
EXPECT_FALSE(codec_specific_infos_.empty());
EXPECT_EQ(encoded_frames_.size(), codec_specific_infos_.size());
@@ -157,7 +157,7 @@ bool VideoCodecUnitTest::WaitForDecodedFrame(std::unique_ptr<VideoFrame>* frame,
bool ret = decoded_frame_event_.Wait(kDecodeTimeoutMs);
EXPECT_TRUE(ret) << "Timed out while waiting for a decoded frame.";
// This becomes unsafe if there are multiple threads waiting for frames.
- rtc::CritScope lock(&decoded_frame_section_);
+ MutexLock lock(&decoded_frame_section_);
EXPECT_TRUE(decoded_frame_);
if (decoded_frame_) {
frame->reset(new VideoFrame(std::move(*decoded_frame_)));
@@ -170,7 +170,7 @@ bool VideoCodecUnitTest::WaitForDecodedFrame(std::unique_ptr<VideoFrame>* frame,
}
size_t VideoCodecUnitTest::GetNumEncodedFrames() {
- rtc::CritScope lock(&encoded_frame_section_);
+ MutexLock lock(&encoded_frame_section_);
return encoded_frames_.size();
}
diff --git a/modules/video_coding/codecs/test/video_codec_unittest.h b/modules/video_coding/codecs/test/video_codec_unittest.h
index 1ce37a7ed5..c10eec4cb4 100644
--- a/modules/video_coding/codecs/test/video_codec_unittest.h
+++ b/modules/video_coding/codecs/test/video_codec_unittest.h
@@ -20,8 +20,8 @@
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/utility/vp8_header_parser.h"
#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "test/gtest.h"
@@ -108,7 +108,7 @@ class VideoCodecUnitTest : public ::testing::Test {
FakeDecodeCompleteCallback decode_complete_callback_;
rtc::Event encoded_frame_event_;
- rtc::CriticalSection encoded_frame_section_;
+ Mutex encoded_frame_section_;
size_t wait_for_encoded_frames_threshold_;
std::vector<EncodedImage> encoded_frames_
RTC_GUARDED_BY(encoded_frame_section_);
@@ -116,7 +116,7 @@ class VideoCodecUnitTest : public ::testing::Test {
RTC_GUARDED_BY(encoded_frame_section_);
rtc::Event decoded_frame_event_;
- rtc::CriticalSection decoded_frame_section_;
+ Mutex decoded_frame_section_;
absl::optional<VideoFrame> decoded_frame_
RTC_GUARDED_BY(decoded_frame_section_);
absl::optional<uint8_t> decoded_qp_ RTC_GUARDED_BY(decoded_frame_section_);
diff --git a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc
index 7e92b360bd..990db54321 100644
--- a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc
+++ b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc
@@ -205,6 +205,9 @@ void VideoCodecTestFixtureImpl::Config::SetCodecSettings(
codec_settings.VP9()->numberOfSpatialLayers =
static_cast<uint8_t>(num_spatial_layers);
break;
+ case kVideoCodecAV1:
+ codec_settings.qpMax = 63;
+ break;
case kVideoCodecH264:
codec_settings.H264()->frameDroppingOn = frame_dropper_on;
codec_settings.H264()->keyFrameInterval = kBaseKeyFrameInterval;
diff --git a/modules/video_coding/codecs/test/videocodec_test_libaom.cc b/modules/video_coding/codecs/test/videocodec_test_libaom.cc
new file mode 100644
index 0000000000..45730aa09e
--- /dev/null
+++ b/modules/video_coding/codecs/test/videocodec_test_libaom.cc
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <vector>
+
+#include "api/test/create_videocodec_test_fixture.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "media/base/media_constants.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "media/engine/simulcast_encoder_adapter.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+// Test clips settings.
+constexpr int kCifWidth = 352;
+constexpr int kCifHeight = 288;
+constexpr int kNumFramesLong = 300;
+
+VideoCodecTestFixture::Config CreateConfig(std::string filename) {
+ VideoCodecTestFixture::Config config;
+ config.filename = filename;
+ config.filepath = ResourcePath(config.filename, "yuv");
+ config.num_frames = kNumFramesLong;
+ config.use_single_core = true;
+ return config;
+}
+
+TEST(VideoCodecTestLibaom, HighBitrateAV1) {
+ auto config = CreateConfig("foreman_cif");
+ config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true,
+ kCifWidth, kCifHeight);
+ config.num_frames = kNumFramesLong;
+ auto fixture = CreateVideoCodecTestFixture(config);
+
+ std::vector<RateProfile> rate_profiles = {{500, 30, 0}};
+
+ std::vector<RateControlThresholds> rc_thresholds = {
+ {12, 1, 0, 1, 0.3, 0.1, 0, 1}};
+
+ std::vector<QualityThresholds> quality_thresholds = {{37, 34, 0.94, 0.92}};
+
+ fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr);
+}
+
+TEST(VideoCodecTestLibaom, VeryLowBitrateAV1) {
+ auto config = CreateConfig("foreman_cif");
+ config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true,
+ kCifWidth, kCifHeight);
+ auto fixture = CreateVideoCodecTestFixture(config);
+
+ std::vector<RateProfile> rate_profiles = {{50, 30, 0}};
+
+ std::vector<RateControlThresholds> rc_thresholds = {
+ {15, 8, 75, 2, 2, 2, 2, 1}};
+
+ std::vector<QualityThresholds> quality_thresholds = {{28, 25, 0.70, 0.62}};
+
+ fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr);
+}
+
+#if !defined(WEBRTC_ANDROID)
+constexpr int kHdWidth = 1280;
+constexpr int kHdHeight = 720;
+TEST(VideoCodecTestLibaom, HdAV1) {
+ auto config = CreateConfig("ConferenceMotion_1280_720_50");
+ config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true,
+ kHdWidth, kHdHeight);
+ config.num_frames = kNumFramesLong;
+ auto fixture = CreateVideoCodecTestFixture(config);
+
+ std::vector<RateProfile> rate_profiles = {{1000, 50, 0}};
+
+ std::vector<RateControlThresholds> rc_thresholds = {
+ {13, 3, 0, 1, 0.3, 0.1, 0, 1}};
+
+ std::vector<QualityThresholds> quality_thresholds = {{36, 32, 0.93, 0.87}};
+
+ fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr);
+}
+#endif
+
+} // namespace
+} // namespace test
+} // namespace webrtc
diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/modules/video_coding/codecs/vp8/default_temporal_layers.cc
index 83ea450d88..b5652593ae 100644
--- a/modules/video_coding/codecs/vp8/default_temporal_layers.cc
+++ b/modules/video_coding/codecs/vp8/default_temporal_layers.cc
@@ -608,58 +608,52 @@ FrameDependencyStructure DefaultTemporalLayers::GetTemplateStructure(
FrameDependencyStructure template_structure;
template_structure.num_decode_targets = num_layers;
- using Builder = GenericFrameInfo::Builder;
switch (num_layers) {
case 1: {
- template_structure.templates = {
- Builder().T(0).Dtis("S").Build(),
- Builder().T(0).Dtis("S").Fdiffs({1}).Build(),
- };
+ template_structure.templates.resize(2);
+ template_structure.templates[0].T(0).Dtis("S");
+ template_structure.templates[1].T(0).Dtis("S").FrameDiffs({1});
return template_structure;
}
case 2: {
- template_structure.templates = {
- Builder().T(0).Dtis("SS").Build(),
- Builder().T(0).Dtis("SS").Fdiffs({2}).Build(),
- Builder().T(0).Dtis("SR").Fdiffs({2}).Build(),
- Builder().T(1).Dtis("-S").Fdiffs({1}).Build(),
- Builder().T(1).Dtis("-D").Fdiffs({1, 2}).Build(),
- };
+ template_structure.templates.resize(5);
+ template_structure.templates[0].T(0).Dtis("SS");
+ template_structure.templates[1].T(0).Dtis("SS").FrameDiffs({2});
+ template_structure.templates[2].T(0).Dtis("SR").FrameDiffs({2});
+ template_structure.templates[3].T(1).Dtis("-S").FrameDiffs({1});
+ template_structure.templates[4].T(1).Dtis("-D").FrameDiffs({2, 1});
return template_structure;
}
case 3: {
if (field_trial::IsEnabled("WebRTC-UseShortVP8TL3Pattern")) {
- template_structure.templates = {
- Builder().T(0).Dtis("SSS").Build(),
- Builder().T(0).Dtis("SSS").Fdiffs({4}).Build(),
- Builder().T(1).Dtis("-DR").Fdiffs({2}).Build(),
- Builder().T(2).Dtis("--S").Fdiffs({1}).Build(),
- Builder().T(2).Dtis("--D").Fdiffs({1, 2}).Build(),
- };
+ template_structure.templates.resize(5);
+ template_structure.templates[0].T(0).Dtis("SSS");
+ template_structure.templates[1].T(0).Dtis("SSS").FrameDiffs({4});
+ template_structure.templates[2].T(1).Dtis("-DR").FrameDiffs({2});
+ template_structure.templates[3].T(2).Dtis("--S").FrameDiffs({1});
+ template_structure.templates[4].T(2).Dtis("--D").FrameDiffs({2, 1});
} else {
- template_structure.templates = {
- Builder().T(0).Dtis("SSS").Build(),
- Builder().T(0).Dtis("SSS").Fdiffs({4}).Build(),
- Builder().T(0).Dtis("SRR").Fdiffs({4}).Build(),
- Builder().T(1).Dtis("-SS").Fdiffs({2}).Build(),
- Builder().T(1).Dtis("-DS").Fdiffs({2, 4}).Build(),
- Builder().T(2).Dtis("--D").Fdiffs({1}).Build(),
- Builder().T(2).Dtis("--D").Fdiffs({1, 3}).Build(),
- };
+ template_structure.templates.resize(7);
+ template_structure.templates[0].T(0).Dtis("SSS");
+ template_structure.templates[1].T(0).Dtis("SSS").FrameDiffs({4});
+ template_structure.templates[2].T(0).Dtis("SRR").FrameDiffs({4});
+ template_structure.templates[3].T(1).Dtis("-SS").FrameDiffs({2});
+ template_structure.templates[4].T(1).Dtis("-DS").FrameDiffs({4, 2});
+ template_structure.templates[5].T(2).Dtis("--D").FrameDiffs({1});
+ template_structure.templates[6].T(2).Dtis("--D").FrameDiffs({3, 1});
}
return template_structure;
}
case 4: {
- template_structure.templates = {
- Builder().T(0).Dtis("SSSS").Build(),
- Builder().T(0).Dtis("SSSS").Fdiffs({8}).Build(),
- Builder().T(1).Dtis("-SRR").Fdiffs({4}).Build(),
- Builder().T(1).Dtis("-SRR").Fdiffs({4, 8}).Build(),
- Builder().T(2).Dtis("--SR").Fdiffs({2}).Build(),
- Builder().T(2).Dtis("--SR").Fdiffs({2, 4}).Build(),
- Builder().T(3).Dtis("---D").Fdiffs({1}).Build(),
- Builder().T(3).Dtis("---D").Fdiffs({1, 3}).Build(),
- };
+ template_structure.templates.resize(8);
+ template_structure.templates[0].T(0).Dtis("SSSS");
+ template_structure.templates[1].T(0).Dtis("SSSS").FrameDiffs({8});
+ template_structure.templates[2].T(1).Dtis("-SRR").FrameDiffs({4});
+ template_structure.templates[3].T(1).Dtis("-SRR").FrameDiffs({4, 8});
+ template_structure.templates[4].T(2).Dtis("--SR").FrameDiffs({2});
+ template_structure.templates[5].T(2).Dtis("--SR").FrameDiffs({2, 4});
+ template_structure.templates[6].T(3).Dtis("---D").FrameDiffs({1});
+ template_structure.templates[7].T(3).Dtis("---D").FrameDiffs({1, 3});
return template_structure;
}
default:
diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers.h b/modules/video_coding/codecs/vp8/default_temporal_layers.h
index 29cfcf0489..d127d8056d 100644
--- a/modules/video_coding/codecs/vp8/default_temporal_layers.h
+++ b/modules/video_coding/codecs/vp8/default_temporal_layers.h
@@ -75,7 +75,7 @@ class DefaultTemporalLayers final : public Vp8FrameBufferController {
DependencyInfo(absl::string_view indication_symbols,
Vp8FrameConfig frame_config)
: decode_target_indications(
- GenericFrameInfo::DecodeTargetInfo(indication_symbols)),
+ webrtc_impl::StringToDecodeTargetIndications(indication_symbols)),
frame_config(frame_config) {}
absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications;
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
index a3ee2c0c41..d86d8767c5 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
@@ -44,26 +44,48 @@ constexpr int kVp8ErrorPropagationTh = 30;
constexpr long kDecodeDeadlineRealtime = 1; // NOLINT
const char kVp8PostProcArmFieldTrial[] = "WebRTC-VP8-Postproc-Config-Arm";
+const char kVp8PostProcFieldTrial[] = "WebRTC-VP8-Postproc-Config";
-void GetPostProcParamsFromFieldTrialGroup(
- LibvpxVp8Decoder::DeblockParams* deblock_params) {
- std::string group =
- webrtc::field_trial::FindFullName(kVp8PostProcArmFieldTrial);
- if (group.empty())
- return;
+#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \
+ defined(WEBRTC_ANDROID)
+constexpr bool kIsArm = true;
+#else
+constexpr bool kIsArm = false;
+#endif
+
+absl::optional<LibvpxVp8Decoder::DeblockParams> DefaultDeblockParams() {
+ if (kIsArm) {
+ // For ARM, this is only called when deblocking is explicitly enabled, and
+ // the default strength is set by the ctor.
+ return LibvpxVp8Decoder::DeblockParams();
+ }
+ // For non-arm, don't use the explicit deblocking settings by default.
+ return absl::nullopt;
+}
+
+absl::optional<LibvpxVp8Decoder::DeblockParams>
+GetPostProcParamsFromFieldTrialGroup() {
+ std::string group = webrtc::field_trial::FindFullName(
+ kIsArm ? kVp8PostProcArmFieldTrial : kVp8PostProcFieldTrial);
+ if (group.empty()) {
+ return DefaultDeblockParams();
+ }
LibvpxVp8Decoder::DeblockParams params;
if (sscanf(group.c_str(), "Enabled-%d,%d,%d", &params.max_level,
- &params.min_qp, &params.degrade_qp) != 3)
- return;
+ &params.min_qp, &params.degrade_qp) != 3) {
+ return DefaultDeblockParams();
+ }
- if (params.max_level < 0 || params.max_level > 16)
- return;
+ if (params.max_level < 0 || params.max_level > 16) {
+ return DefaultDeblockParams();
+ }
- if (params.min_qp < 0 || params.degrade_qp <= params.min_qp)
- return;
+ if (params.min_qp < 0 || params.degrade_qp <= params.min_qp) {
+ return DefaultDeblockParams();
+ }
- *deblock_params = params;
+ return params;
}
} // namespace
@@ -97,8 +119,9 @@ class LibvpxVp8Decoder::QpSmoother {
};
LibvpxVp8Decoder::LibvpxVp8Decoder()
- : use_postproc_arm_(
- webrtc::field_trial::IsEnabled(kVp8PostProcArmFieldTrial)),
+ : use_postproc_(
+ kIsArm ? webrtc::field_trial::IsEnabled(kVp8PostProcArmFieldTrial)
+ : true),
buffer_pool_(false, 300 /* max_number_of_buffers*/),
decode_complete_callback_(NULL),
inited_(false),
@@ -107,10 +130,9 @@ LibvpxVp8Decoder::LibvpxVp8Decoder()
last_frame_width_(0),
last_frame_height_(0),
key_frame_required_(true),
- qp_smoother_(use_postproc_arm_ ? new QpSmoother() : nullptr) {
- if (use_postproc_arm_)
- GetPostProcParamsFromFieldTrialGroup(&deblock_);
-}
+ deblock_params_(use_postproc_ ? GetPostProcParamsFromFieldTrialGroup()
+ : absl::nullopt),
+ qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr) {}
LibvpxVp8Decoder::~LibvpxVp8Decoder() {
inited_ = true; // in order to do the actual release
@@ -131,12 +153,7 @@ int LibvpxVp8Decoder::InitDecode(const VideoCodec* inst, int number_of_cores) {
cfg.threads = 1;
cfg.h = cfg.w = 0; // set after decode
-#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \
- defined(WEBRTC_ANDROID)
- vpx_codec_flags_t flags = use_postproc_arm_ ? VPX_CODEC_USE_POSTPROC : 0;
-#else
- vpx_codec_flags_t flags = VPX_CODEC_USE_POSTPROC;
-#endif
+ vpx_codec_flags_t flags = use_postproc_ ? VPX_CODEC_USE_POSTPROC : 0;
if (vpx_codec_dec_init(decoder_, vpx_codec_vp8_dx(), &cfg, flags)) {
delete decoder_;
@@ -174,43 +191,47 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image,
}
// Post process configurations.
-#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \
- defined(WEBRTC_ANDROID)
- if (use_postproc_arm_) {
+ if (use_postproc_) {
vp8_postproc_cfg_t ppcfg;
+ // MFQE enabled to reduce key frame popping.
ppcfg.post_proc_flag = VP8_MFQE;
- // For low resolutions, use stronger deblocking filter.
- int last_width_x_height = last_frame_width_ * last_frame_height_;
- if (last_width_x_height > 0 && last_width_x_height <= 320 * 240) {
- // Enable the deblock and demacroblocker based on qp thresholds.
- RTC_DCHECK(qp_smoother_);
- int qp = qp_smoother_->GetAvg();
- if (qp > deblock_.min_qp) {
- int level = deblock_.max_level;
- if (qp < deblock_.degrade_qp) {
- // Use lower level.
- level = deblock_.max_level * (qp - deblock_.min_qp) /
- (deblock_.degrade_qp - deblock_.min_qp);
+
+ if (kIsArm) {
+ RTC_DCHECK(deblock_params_.has_value());
+ }
+ if (deblock_params_.has_value()) {
+ // For low resolutions, use stronger deblocking filter.
+ int last_width_x_height = last_frame_width_ * last_frame_height_;
+ if (last_width_x_height > 0 && last_width_x_height <= 320 * 240) {
+ // Enable the deblock and demacroblocker based on qp thresholds.
+ RTC_DCHECK(qp_smoother_);
+ int qp = qp_smoother_->GetAvg();
+ if (qp > deblock_params_->min_qp) {
+ int level = deblock_params_->max_level;
+ if (qp < deblock_params_->degrade_qp) {
+ // Use lower level.
+ level = deblock_params_->max_level *
+ (qp - deblock_params_->min_qp) /
+ (deblock_params_->degrade_qp - deblock_params_->min_qp);
+ }
+ // Deblocking level only affects VP8_DEMACROBLOCK.
+ ppcfg.deblocking_level = std::max(level, 1);
+ ppcfg.post_proc_flag |= VP8_DEBLOCK | VP8_DEMACROBLOCK;
}
- // Deblocking level only affects VP8_DEMACROBLOCK.
- ppcfg.deblocking_level = std::max(level, 1);
- ppcfg.post_proc_flag |= VP8_DEBLOCK | VP8_DEMACROBLOCK;
}
+ } else {
+ // Non-arm with no explicit deblock params set.
+ ppcfg.post_proc_flag |= VP8_DEBLOCK;
+ // For VGA resolutions and lower, enable the demacroblocker postproc.
+ if (last_frame_width_ * last_frame_height_ <= 640 * 360) {
+ ppcfg.post_proc_flag |= VP8_DEMACROBLOCK;
+ }
+ // Strength of deblocking filter. Valid range:[0,16]
+ ppcfg.deblocking_level = 3;
}
+
vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
}
-#else
- vp8_postproc_cfg_t ppcfg;
- // MFQE enabled to reduce key frame popping.
- ppcfg.post_proc_flag = VP8_MFQE | VP8_DEBLOCK;
- // For VGA resolutions and lower, enable the demacroblocker postproc.
- if (last_frame_width_ * last_frame_height_ <= 640 * 360) {
- ppcfg.post_proc_flag |= VP8_DEMACROBLOCK;
- }
- // Strength of deblocking filter. Valid range:[0,16]
- ppcfg.deblocking_level = 3;
- vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
-#endif
// Always start with a complete key frame.
if (key_frame_required_) {
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
index d9bfee81c1..2a0c5f2c5b 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
@@ -13,6 +13,7 @@
#include <memory>
+#include "absl/types/optional.h"
#include "api/video/encoded_image.h"
#include "api/video_codecs/video_decoder.h"
#include "common_video/include/i420_buffer_pool.h"
@@ -51,7 +52,7 @@ class LibvpxVp8Decoder : public VideoDecoder {
uint32_t timeStamp,
int qp,
const webrtc::ColorSpace* explicit_color_space);
- const bool use_postproc_arm_;
+ const bool use_postproc_;
I420BufferPool buffer_pool_;
DecodedImageCallback* decode_complete_callback_;
@@ -61,7 +62,7 @@ class LibvpxVp8Decoder : public VideoDecoder {
int last_frame_width_;
int last_frame_height_;
bool key_frame_required_;
- DeblockParams deblock_;
+ const absl::optional<DeblockParams> deblock_params_;
const std::unique_ptr<QpSmoother> qp_smoother_;
};
diff --git a/modules/video_coding/codecs/vp8/screenshare_layers.cc b/modules/video_coding/codecs/vp8/screenshare_layers.cc
index 01858c6ee9..caccb4246c 100644
--- a/modules/video_coding/codecs/vp8/screenshare_layers.cc
+++ b/modules/video_coding/codecs/vp8/screenshare_layers.cc
@@ -36,6 +36,7 @@ constexpr int kMinTimeBetweenSyncs = kOneSecond90Khz * 2;
constexpr int kMaxTimeBetweenSyncs = kOneSecond90Khz * 4;
constexpr int kQpDeltaThresholdForSync = 8;
constexpr int kMinBitrateKbpsForQpBoost = 500;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
} // namespace
const double ScreenshareLayers::kMaxTL0FpsReduction = 2.5;
@@ -319,8 +320,7 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index,
if (number_of_temporal_layers_ == 1) {
vp8_info.temporalIdx = kNoTemporalIdx;
vp8_info.layerSync = false;
- generic_frame_info.decode_target_indications =
- GenericFrameInfo::DecodeTargetInfo("S");
+ generic_frame_info.decode_target_indications = {kSwitch};
generic_frame_info.encoder_buffers.emplace_back(
0, /*referenced=*/!is_keyframe, /*updated=*/true);
} else {
@@ -344,8 +344,7 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index,
active_layer_ = 1;
info->template_structure =
GetTemplateStructure(number_of_temporal_layers_);
- generic_frame_info.decode_target_indications =
- GenericFrameInfo::DecodeTargetInfo("SS");
+ generic_frame_info.decode_target_indications = {kSwitch, kSwitch};
} else if (active_layer_ >= 0 && layers_[active_layer_].state ==
TemporalLayer::State::kKeyFrame) {
layers_[active_layer_].state = TemporalLayer::State::kNormal;
@@ -429,21 +428,18 @@ FrameDependencyStructure ScreenshareLayers::GetTemplateStructure(
FrameDependencyStructure template_structure;
template_structure.num_decode_targets = num_layers;
- using Builder = GenericFrameInfo::Builder;
switch (num_layers) {
case 1: {
- template_structure.templates = {
- Builder().T(0).Dtis("S").Build(),
- Builder().T(0).Dtis("S").Fdiffs({1}).Build(),
- };
+ template_structure.templates.resize(2);
+ template_structure.templates[0].T(0).Dtis("S");
+ template_structure.templates[1].T(0).Dtis("S").FrameDiffs({1});
return template_structure;
}
case 2: {
- template_structure.templates = {
- Builder().T(0).Dtis("SS").Build(),
- Builder().T(0).Dtis("SS").Fdiffs({1}).Build(),
- Builder().T(1).Dtis("-S").Fdiffs({1}).Build(),
- };
+ template_structure.templates.resize(3);
+ template_structure.templates[0].T(0).Dtis("SS");
+ template_structure.templates[1].T(0).Dtis("SS").FrameDiffs({1});
+ template_structure.templates[2].T(1).Dtis("-S").FrameDiffs({1});
return template_structure;
}
default:
diff --git a/modules/video_coding/codecs/vp8/screenshare_layers.h b/modules/video_coding/codecs/vp8/screenshare_layers.h
index 5270ffe81c..39477f12f1 100644
--- a/modules/video_coding/codecs/vp8/screenshare_layers.h
+++ b/modules/video_coding/codecs/vp8/screenshare_layers.h
@@ -78,7 +78,7 @@ class ScreenshareLayers final : public Vp8FrameBufferController {
DependencyInfo(absl::string_view indication_symbols,
Vp8FrameConfig frame_config)
: decode_target_indications(
- GenericFrameInfo::DecodeTargetInfo(indication_symbols)),
+ webrtc_impl::StringToDecodeTargetIndications(indication_symbols)),
frame_config(frame_config) {}
absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications;
diff --git a/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h b/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h
index dcff1e6a18..1ac927d29f 100644
--- a/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h
+++ b/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h
@@ -19,74 +19,86 @@ namespace webrtc {
class MockLibvpxVp8Interface : public LibvpxInterface {
public:
- MOCK_CONST_METHOD5(img_alloc,
- vpx_image_t*(vpx_image_t*,
- vpx_img_fmt_t,
- unsigned int,
- unsigned int,
- unsigned int));
- MOCK_CONST_METHOD6(img_wrap,
- vpx_image_t*(vpx_image_t*,
- vpx_img_fmt_t,
- unsigned int,
- unsigned int,
- unsigned int,
- unsigned char*));
- MOCK_CONST_METHOD1(img_free, void(vpx_image_t* img));
- MOCK_CONST_METHOD2(codec_enc_config_set,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- const vpx_codec_enc_cfg_t*));
- MOCK_CONST_METHOD3(codec_enc_config_default,
- vpx_codec_err_t(vpx_codec_iface_t*,
- vpx_codec_enc_cfg_t*,
- unsigned int));
- MOCK_CONST_METHOD4(codec_enc_init,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vpx_codec_iface_t*,
- const vpx_codec_enc_cfg_t*,
- vpx_codec_flags_t));
- MOCK_CONST_METHOD6(codec_enc_init_multi,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vpx_codec_iface_t*,
- vpx_codec_enc_cfg_t*,
- int,
- vpx_codec_flags_t,
- vpx_rational_t*));
- MOCK_CONST_METHOD1(codec_destroy, vpx_codec_err_t(vpx_codec_ctx_t*));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- uint32_t));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- int));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- int*));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- vpx_roi_map*));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- vpx_active_map*));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- vpx_scaling_mode*));
- MOCK_CONST_METHOD6(codec_encode,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- const vpx_image_t*,
- vpx_codec_pts_t,
- uint64_t,
- vpx_enc_frame_flags_t,
- uint64_t));
- MOCK_CONST_METHOD2(codec_get_cx_data,
- const vpx_codec_cx_pkt_t*(vpx_codec_ctx_t*,
- vpx_codec_iter_t*));
+ MOCK_METHOD(
+ vpx_image_t*,
+ img_alloc,
+ (vpx_image_t*, vpx_img_fmt_t, unsigned int, unsigned int, unsigned int),
+ (const, override));
+ MOCK_METHOD(vpx_image_t*,
+ img_wrap,
+ (vpx_image_t*,
+ vpx_img_fmt_t,
+ unsigned int,
+ unsigned int,
+ unsigned int,
+ unsigned char*),
+ (const, override));
+ MOCK_METHOD(void, img_free, (vpx_image_t * img), (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_enc_config_set,
+ (vpx_codec_ctx_t*, const vpx_codec_enc_cfg_t*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_enc_config_default,
+ (vpx_codec_iface_t*, vpx_codec_enc_cfg_t*, unsigned int),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_enc_init,
+ (vpx_codec_ctx_t*,
+ vpx_codec_iface_t*,
+ const vpx_codec_enc_cfg_t*,
+ vpx_codec_flags_t),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_enc_init_multi,
+ (vpx_codec_ctx_t*,
+ vpx_codec_iface_t*,
+ vpx_codec_enc_cfg_t*,
+ int,
+ vpx_codec_flags_t,
+ vpx_rational_t*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_destroy,
+ (vpx_codec_ctx_t*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, uint32_t),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, int),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, int*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_roi_map*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_active_map*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_scaling_mode*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_encode,
+ (vpx_codec_ctx_t*,
+ const vpx_image_t*,
+ vpx_codec_pts_t,
+ uint64_t,
+ vpx_enc_frame_flags_t,
+ uint64_t),
+ (const, override));
+ MOCK_METHOD(const vpx_codec_cx_pkt_t*,
+ codec_get_cx_data,
+ (vpx_codec_ctx_t*, vpx_codec_iter_t*),
+ (const, override));
};
} // namespace webrtc
diff --git a/modules/video_coding/codecs/vp9/include/vp9.h b/modules/video_coding/codecs/vp9/include/vp9.h
index 8091cacec9..7cf1c2ebd1 100644
--- a/modules/video_coding/codecs/vp9/include/vp9.h
+++ b/modules/video_coding/codecs/vp9/include/vp9.h
@@ -25,6 +25,10 @@ namespace webrtc {
// negotiate in SDP, in order of preference.
std::vector<SdpVideoFormat> SupportedVP9Codecs();
+// Returns a vector with all supported internal VP9 decode profiles in order of
+// preference. These will be availble for receive-only connections.
+std::vector<SdpVideoFormat> SupportedVP9DecoderCodecs();
+
class VP9Encoder : public VideoEncoder {
public:
// Deprecated. Returns default implementation using VP9 Profile 0.
diff --git a/modules/video_coding/codecs/vp9/include/vp9_globals.h b/modules/video_coding/codecs/vp9/include/vp9_globals.h
index c6853127ac..6f9d09933f 100644
--- a/modules/video_coding/codecs/vp9/include/vp9_globals.h
+++ b/modules/video_coding/codecs/vp9/include/vp9_globals.h
@@ -30,8 +30,8 @@ const size_t kMaxVp9RefPics = 3;
const size_t kMaxVp9FramesInGof = 0xFF; // 8 bits
const size_t kMaxVp9NumberOfSpatialLayers = 8;
-const size_t kMinVp9SpatialLayerWidth = 320;
-const size_t kMinVp9SpatialLayerHeight = 180;
+const size_t kMinVp9SpatialLayerWidth = 240;
+const size_t kMinVp9SpatialLayerHeight = 135;
enum TemporalStructureMode {
kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP...
diff --git a/modules/video_coding/codecs/vp9/svc_config.cc b/modules/video_coding/codecs/vp9/svc_config.cc
index e5d88bce21..cc7743ad25 100644
--- a/modules/video_coding/codecs/vp9/svc_config.cc
+++ b/modules/video_coding/codecs/vp9/svc_config.cc
@@ -16,6 +16,7 @@
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
namespace webrtc {
@@ -74,11 +75,23 @@ std::vector<SpatialLayer> ConfigureSvcNormalVideo(size_t input_width,
const size_t num_layers_fit_vert = static_cast<size_t>(
std::floor(1 + std::max(0.0f, std::log2(1.0f * input_height /
kMinVp9SpatialLayerHeight))));
- num_spatial_layers =
- std::min({num_spatial_layers, num_layers_fit_horz, num_layers_fit_vert});
+ const size_t limited_num_spatial_layers =
+ std::min(num_layers_fit_horz, num_layers_fit_vert);
+ if (limited_num_spatial_layers < num_spatial_layers) {
+ RTC_LOG(LS_WARNING) << "Reducing number of spatial layers from "
+ << num_spatial_layers << " to "
+ << limited_num_spatial_layers
+ << " due to low input resolution.";
+ num_spatial_layers = limited_num_spatial_layers;
+ }
// First active layer must be configured.
num_spatial_layers = std::max(num_spatial_layers, first_active_layer + 1);
+ // Ensure top layer is even enough.
+ int required_divisiblity = 1 << (num_spatial_layers - first_active_layer - 1);
+ input_width = input_width - input_width % required_divisiblity;
+ input_height = input_height - input_height % required_divisiblity;
+
for (size_t sl_idx = first_active_layer; sl_idx < num_spatial_layers;
++sl_idx) {
SpatialLayer spatial_layer = {0};
@@ -108,6 +121,19 @@ std::vector<SpatialLayer> ConfigureSvcNormalVideo(size_t input_width,
spatial_layers.push_back(spatial_layer);
}
+ // A workaround for sitiation when single HD layer is left with minBitrate
+ // about 500kbps. This would mean that there will always be at least 500kbps
+ // allocated to video regardless of how low is the actual BWE.
+ // Also, boost maxBitrate for the first layer to account for lost ability to
+ // predict from previous layers.
+ if (first_active_layer > 0) {
+ spatial_layers[0].minBitrate = kMinVp9SvcBitrateKbps;
+ // TODO(ilnik): tune this value or come up with a different formula to
+ // ensure that all singlecast configurations look good and not too much
+ // bitrate is added.
+ spatial_layers[0].maxBitrate *= 1.1;
+ }
+
return spatial_layers;
}
diff --git a/modules/video_coding/codecs/vp9/svc_config_unittest.cc b/modules/video_coding/codecs/vp9/svc_config_unittest.cc
index abc67a22ff..1891628921 100644
--- a/modules/video_coding/codecs/vp9/svc_config_unittest.cc
+++ b/modules/video_coding/codecs/vp9/svc_config_unittest.cc
@@ -41,6 +41,32 @@ TEST(SvcConfig, AlwaysSendsAtLeastOneLayer) {
EXPECT_EQ(spatial_layers.back().width, kMinVp9SpatialLayerWidth);
}
+TEST(SvcConfig, EnforcesMinimalRequiredParity) {
+ const size_t max_num_spatial_layers = 3;
+ const size_t kOddSize = 1023;
+
+ std::vector<SpatialLayer> spatial_layers =
+ GetSvcConfig(kOddSize, kOddSize, 30,
+ /*first_active_layer=*/1, max_num_spatial_layers, 1, false);
+ // Since there are 2 layers total (1, 2), divisiblity by 2 is required.
+ EXPECT_EQ(spatial_layers.back().width, kOddSize - 1);
+ EXPECT_EQ(spatial_layers.back().width, kOddSize - 1);
+
+ spatial_layers =
+ GetSvcConfig(kOddSize, kOddSize, 30,
+ /*first_active_layer=*/0, max_num_spatial_layers, 1, false);
+ // Since there are 3 layers total (0, 1, 2), divisiblity by 4 is required.
+ EXPECT_EQ(spatial_layers.back().width, kOddSize - 3);
+ EXPECT_EQ(spatial_layers.back().width, kOddSize - 3);
+
+ spatial_layers =
+ GetSvcConfig(kOddSize, kOddSize, 30,
+ /*first_active_layer=*/2, max_num_spatial_layers, 1, false);
+ // Since there is only 1 layer active (2), divisiblity by 1 is required.
+ EXPECT_EQ(spatial_layers.back().width, kOddSize);
+ EXPECT_EQ(spatial_layers.back().width, kOddSize);
+}
+
TEST(SvcConfig, SkipsInactiveLayers) {
const size_t num_spatial_layers = 4;
const size_t first_active_layer = 2;
diff --git a/modules/video_coding/codecs/vp9/svc_rate_allocator.cc b/modules/video_coding/codecs/vp9/svc_rate_allocator.cc
index cc9a0d8997..25bca63c0e 100644
--- a/modules/video_coding/codecs/vp9/svc_rate_allocator.cc
+++ b/modules/video_coding/codecs/vp9/svc_rate_allocator.cc
@@ -140,7 +140,8 @@ DataRate FindLayerTogglingThreshold(const VideoCodec& codec,
}
}
upper_bound += DataRate::KilobitsPerSec(
- codec.spatialLayers[num_active_layers - 1].minBitrate);
+ codec.spatialLayers[first_active_layer + num_active_layers - 1]
+ .minBitrate);
// Do a binary search until upper and lower bound is the highest bitrate for
// |num_active_layers| - 1 layers and lowest bitrate for |num_active_layers|
diff --git a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc
index d40cf23257..4d5b8497d1 100644
--- a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc
+++ b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc
@@ -16,6 +16,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/vp9_profile.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h"
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "modules/video_coding/codecs/vp9/svc_config.h"
@@ -25,20 +26,33 @@
#include "test/video_codec_settings.h"
namespace webrtc {
+namespace {
using ::testing::ElementsAreArray;
+using ::testing::SizeIs;
+using ::testing::UnorderedElementsAreArray;
using EncoderInfo = webrtc::VideoEncoder::EncoderInfo;
using FramerateFractions =
absl::InlinedVector<uint8_t, webrtc::kMaxTemporalStreams>;
-namespace {
-const size_t kWidth = 1280;
-const size_t kHeight = 720;
+constexpr size_t kWidth = 1280;
+constexpr size_t kHeight = 720;
const VideoEncoder::Capabilities kCapabilities(false);
const VideoEncoder::Settings kSettings(kCapabilities,
/*number_of_cores=*/1,
/*max_payload_size=*/0);
+
+VideoCodec DefaultCodecSettings() {
+ VideoCodec codec_settings;
+ webrtc::test::CodecSettings(kVideoCodecVP9, &codec_settings);
+ codec_settings.width = kWidth;
+ codec_settings.height = kHeight;
+ codec_settings.VP9()->numberOfTemporalLayers = 1;
+ codec_settings.VP9()->numberOfSpatialLayers = 1;
+ return codec_settings;
+}
+
} // namespace
class TestVp9Impl : public VideoCodecUnitTest {
@@ -59,53 +73,6 @@ class TestVp9Impl : public VideoCodecUnitTest {
codec_settings->VP9()->numberOfSpatialLayers = 1;
}
- void ExpectFrameWith(uint8_t temporal_idx) {
- EncodedImage encoded_frame;
- CodecSpecificInfo codec_specific_info;
- ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
- EXPECT_EQ(temporal_idx, codec_specific_info.codecSpecific.VP9.temporal_idx);
- }
-
- void ExpectFrameWith(size_t num_spatial_layers,
- uint8_t temporal_idx,
- bool temporal_up_switch,
- uint8_t num_ref_pics,
- const std::vector<uint8_t>& p_diff) {
- std::vector<EncodedImage> encoded_frame;
- std::vector<CodecSpecificInfo> codec_specific;
- ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_specific));
- for (size_t spatial_idx = 0; spatial_idx < num_spatial_layers;
- ++spatial_idx) {
- const CodecSpecificInfoVP9& vp9 =
- codec_specific[spatial_idx].codecSpecific.VP9;
- if (vp9.temporal_idx == kNoTemporalIdx) {
- EXPECT_EQ(temporal_idx, 0);
- } else {
- EXPECT_EQ(vp9.temporal_idx, temporal_idx);
- }
- if (num_spatial_layers == 1) {
- EXPECT_FALSE(encoded_frame[spatial_idx].SpatialIndex());
- } else {
- EXPECT_EQ(encoded_frame[spatial_idx].SpatialIndex(),
- static_cast<int>(spatial_idx));
- }
- EXPECT_EQ(vp9.temporal_up_switch, temporal_up_switch);
-
- // Ensure there are no duplicates in reference list.
- std::vector<uint8_t> vp9_p_diff(vp9.p_diff,
- vp9.p_diff + vp9.num_ref_pics);
- std::sort(vp9_p_diff.begin(), vp9_p_diff.end());
- EXPECT_EQ(std::unique(vp9_p_diff.begin(), vp9_p_diff.end()),
- vp9_p_diff.end());
-
- for (size_t ref_pic_num = 0; ref_pic_num < num_ref_pics; ++ref_pic_num) {
- EXPECT_NE(
- std::find(p_diff.begin(), p_diff.end(), vp9.p_diff[ref_pic_num]),
- p_diff.end());
- }
- }
- }
-
void ConfigureSvc(size_t num_spatial_layers, size_t num_temporal_layers = 1) {
codec_settings_.VP9()->numberOfSpatialLayers =
static_cast<unsigned char>(num_spatial_layers);
@@ -187,57 +154,61 @@ TEST_F(TestVp9Impl, DecodedQpEqualsEncodedQp) {
EXPECT_EQ(encoded_frame.qp_, *decoded_qp);
}
-TEST_F(TestVp9Impl, ParserQpEqualsEncodedQp) {
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- EncodedImage encoded_frame;
- CodecSpecificInfo codec_specific_info;
- ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
-
+TEST(Vp9ImplTest, ParserQpEqualsEncodedQp) {
+ std::unique_ptr<VideoEncoder> encoder = VP9Encoder::Create();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ encoder->InitEncode(&codec_settings, kSettings);
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(1)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+ ASSERT_THAT(frames, SizeIs(1));
+ const auto& encoded_frame = frames.front().encoded_image;
int qp = 0;
ASSERT_TRUE(vp9::GetQp(encoded_frame.data(), encoded_frame.size(), &qp));
EXPECT_EQ(encoded_frame.qp_, qp);
}
-TEST_F(TestVp9Impl, EncoderWith2TemporalLayers) {
- // Override default settings.
- codec_settings_.VP9()->numberOfTemporalLayers = 2;
+TEST(Vp9ImplTest, EncoderWith2TemporalLayers) {
+ std::unique_ptr<VideoEncoder> encoder = VP9Encoder::Create();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ codec_settings.VP9()->numberOfTemporalLayers = 2;
// Tl0PidIdx is only used in non-flexible mode.
- codec_settings_.VP9()->flexibleMode = false;
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->InitEncode(&codec_settings_, kSettings));
-
- // Temporal layer 0.
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- EncodedImage encoded_frame;
- CodecSpecificInfo codec_specific_info;
- ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
- EXPECT_EQ(0, codec_specific_info.codecSpecific.VP9.temporal_idx);
-
- // Temporal layer 1.
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- ExpectFrameWith(1);
-
- // Temporal layer 0.
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- ExpectFrameWith(0);
-
- // Temporal layer 1.
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- ExpectFrameWith(1);
+ codec_settings.VP9()->flexibleMode = false;
+ EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(4)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+
+ ASSERT_THAT(frames, SizeIs(4));
+ EXPECT_EQ(frames[0].codec_specific_info.codecSpecific.VP9.temporal_idx, 0);
+ EXPECT_EQ(frames[1].codec_specific_info.codecSpecific.VP9.temporal_idx, 1);
+ EXPECT_EQ(frames[2].codec_specific_info.codecSpecific.VP9.temporal_idx, 0);
+ EXPECT_EQ(frames[3].codec_specific_info.codecSpecific.VP9.temporal_idx, 1);
}
-TEST_F(TestVp9Impl, EncoderWith2SpatialLayers) {
- codec_settings_.VP9()->numberOfSpatialLayers = 2;
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->InitEncode(&codec_settings_, kSettings));
-
- SetWaitForEncodedFramesThreshold(2);
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- std::vector<EncodedImage> encoded_frame;
- std::vector<CodecSpecificInfo> codec_info;
- ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_info));
- EXPECT_EQ(encoded_frame[0].SpatialIndex(), 0);
- EXPECT_EQ(encoded_frame[1].SpatialIndex(), 1);
+TEST(Vp9ImplTest, EncoderWith2SpatialLayers) {
+ std::unique_ptr<VideoEncoder> encoder = VP9Encoder::Create();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ codec_settings.VP9()->numberOfSpatialLayers = 2;
+ EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(1)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+
+ ASSERT_THAT(frames, SizeIs(2));
+ EXPECT_EQ(frames[0].encoded_image.SpatialIndex(), 0);
+ EXPECT_EQ(frames[1].encoded_image.SpatialIndex(), 1);
}
TEST_F(TestVp9Impl, EncoderExplicitLayering) {
@@ -1421,29 +1392,34 @@ TEST_F(TestVp9Impl, EncoderInfoFpsAllocationFlexibleMode) {
::testing::ElementsAreArray(expected_fps_allocation));
}
-class TestVp9ImplWithLayering
- : public TestVp9Impl,
- public ::testing::WithParamInterface<::testing::tuple<uint8_t, uint8_t>> {
+class Vp9ImplWithLayeringTest
+ : public ::testing::TestWithParam<std::tuple<int, int, bool>> {
protected:
- TestVp9ImplWithLayering()
- : num_spatial_layers_(::testing::get<0>(GetParam())),
- num_temporal_layers_(::testing::get<1>(GetParam())) {}
+ Vp9ImplWithLayeringTest()
+ : num_spatial_layers_(std::get<0>(GetParam())),
+ num_temporal_layers_(std::get<1>(GetParam())),
+ override_field_trials_(std::get<2>(GetParam())
+ ? "WebRTC-Vp9ExternalRefCtrl/Enabled/"
+ : "") {}
const uint8_t num_spatial_layers_;
const uint8_t num_temporal_layers_;
+ const test::ScopedFieldTrials override_field_trials_;
};
-TEST_P(TestVp9ImplWithLayering, FlexibleMode) {
+TEST_P(Vp9ImplWithLayeringTest, FlexibleMode) {
// In flexible mode encoder wrapper obtains actual list of references from
// encoder and writes it into RTP payload descriptor. Check that reference
// list in payload descriptor matches the predefined one, which is used
// in non-flexible mode.
- codec_settings_.VP9()->flexibleMode = true;
- codec_settings_.VP9()->frameDroppingOn = false;
- codec_settings_.VP9()->numberOfSpatialLayers = num_spatial_layers_;
- codec_settings_.VP9()->numberOfTemporalLayers = num_temporal_layers_;
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->InitEncode(&codec_settings_, kSettings));
+ std::unique_ptr<VideoEncoder> encoder = VP9Encoder::Create();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ codec_settings.VP9()->flexibleMode = true;
+ codec_settings.VP9()->frameDroppingOn = false;
+ codec_settings.VP9()->numberOfSpatialLayers = num_spatial_layers_;
+ codec_settings.VP9()->numberOfTemporalLayers = num_temporal_layers_;
+ EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings),
+ WEBRTC_VIDEO_CODEC_OK);
GofInfoVP9 gof;
if (num_temporal_layers_ == 1) {
@@ -1456,65 +1432,48 @@ TEST_P(TestVp9ImplWithLayering, FlexibleMode) {
// Encode at least (num_frames_in_gof + 1) frames to verify references
// of non-key frame with gof_idx = 0.
- for (size_t frame_num = 0; frame_num < gof.num_frames_in_gof + 1;
- ++frame_num) {
- SetWaitForEncodedFramesThreshold(num_spatial_layers_);
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->Encode(NextInputFrame(), nullptr));
-
- const bool is_key_frame = frame_num == 0;
- const size_t gof_idx = frame_num % gof.num_frames_in_gof;
- const std::vector<uint8_t> p_diff(std::begin(gof.pid_diff[gof_idx]),
- std::end(gof.pid_diff[gof_idx]));
-
- ExpectFrameWith(num_spatial_layers_, gof.temporal_idx[gof_idx],
- gof.temporal_up_switch[gof_idx],
- is_key_frame ? 0 : gof.num_ref_pics[gof_idx], p_diff);
- }
-}
-
-TEST_P(TestVp9ImplWithLayering, ExternalRefControl) {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Vp9ExternalRefCtrl/Enabled/");
- codec_settings_.VP9()->flexibleMode = true;
- codec_settings_.VP9()->frameDroppingOn = false;
- codec_settings_.VP9()->numberOfSpatialLayers = num_spatial_layers_;
- codec_settings_.VP9()->numberOfTemporalLayers = num_temporal_layers_;
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->InitEncode(&codec_settings_, kSettings));
-
- GofInfoVP9 gof;
- if (num_temporal_layers_ == 1) {
- gof.SetGofInfoVP9(kTemporalStructureMode1);
- } else if (num_temporal_layers_ == 2) {
- gof.SetGofInfoVP9(kTemporalStructureMode2);
- } else if (num_temporal_layers_ == 3) {
- gof.SetGofInfoVP9(kTemporalStructureMode3);
- }
-
- // Encode at least (num_frames_in_gof + 1) frames to verify references
- // of non-key frame with gof_idx = 0.
- for (size_t frame_num = 0; frame_num < gof.num_frames_in_gof + 1;
- ++frame_num) {
- SetWaitForEncodedFramesThreshold(num_spatial_layers_);
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->Encode(NextInputFrame(), nullptr));
-
- const bool is_key_frame = frame_num == 0;
- const size_t gof_idx = frame_num % gof.num_frames_in_gof;
- const std::vector<uint8_t> p_diff(std::begin(gof.pid_diff[gof_idx]),
- std::end(gof.pid_diff[gof_idx]));
-
- ExpectFrameWith(num_spatial_layers_, gof.temporal_idx[gof_idx],
- gof.temporal_up_switch[gof_idx],
- is_key_frame ? 0 : gof.num_ref_pics[gof_idx], p_diff);
+ int num_input_frames = gof.num_frames_in_gof + 1;
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(num_input_frames)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+ ASSERT_THAT(frames, SizeIs(num_input_frames * num_spatial_layers_));
+
+ for (size_t i = 0; i < frames.size(); ++i) {
+ const EncodedVideoFrameProducer::EncodedFrame& frame = frames[i];
+ const size_t picture_idx = i / num_spatial_layers_;
+ const size_t gof_idx = picture_idx % gof.num_frames_in_gof;
+
+ const CodecSpecificInfoVP9& vp9 =
+ frame.codec_specific_info.codecSpecific.VP9;
+ EXPECT_EQ(frame.encoded_image.SpatialIndex(),
+ num_spatial_layers_ == 1
+ ? absl::nullopt
+ : absl::optional<int>(i % num_spatial_layers_))
+ << "Frame " << i;
+ EXPECT_EQ(vp9.temporal_idx, num_temporal_layers_ == 1
+ ? kNoTemporalIdx
+ : gof.temporal_idx[gof_idx])
+ << "Frame " << i;
+ EXPECT_EQ(vp9.temporal_up_switch, gof.temporal_up_switch[gof_idx])
+ << "Frame " << i;
+ if (picture_idx == 0) {
+ EXPECT_EQ(vp9.num_ref_pics, 0) << "Frame " << i;
+ } else {
+ EXPECT_THAT(rtc::MakeArrayView(vp9.p_diff, vp9.num_ref_pics),
+ UnorderedElementsAreArray(gof.pid_diff[gof_idx],
+ gof.num_ref_pics[gof_idx]))
+ << "Frame " << i;
+ }
}
}
INSTANTIATE_TEST_SUITE_P(All,
- TestVp9ImplWithLayering,
+ Vp9ImplWithLayeringTest,
::testing::Combine(::testing::Values(1, 2, 3),
- ::testing::Values(1, 2, 3)));
+ ::testing::Values(1, 2, 3),
+ ::testing::Bool()));
class TestVp9ImplFrameDropping : public TestVp9Impl {
protected:
@@ -1774,4 +1733,12 @@ TEST_F(TestVp9Impl, ReenablingUpperLayerAfterKFWithInterlayerPredIsEnabled) {
EXPECT_EQ(encoded_frames[0]._frameType, VideoFrameType::kVideoFrameDelta);
}
+TEST_F(TestVp9Impl, HandlesEmptyInitDecode) {
+ std::unique_ptr<VideoDecoder> decoder = CreateDecoder();
+ // Check that nullptr settings are ok for decoder.
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ decoder->InitDecode(/*codec_settings=*/nullptr, 1));
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder->Release());
+}
+
} // namespace webrtc
diff --git a/modules/video_coding/codecs/vp9/vp9.cc b/modules/video_coding/codecs/vp9/vp9.cc
index 527bce7729..9b0585c059 100644
--- a/modules/video_coding/codecs/vp9/vp9.cc
+++ b/modules/video_coding/codecs/vp9/vp9.cc
@@ -39,6 +39,22 @@ std::vector<SdpVideoFormat> SupportedVP9Codecs() {
cricket::kVp9CodecName,
{{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}}));
}
+
+ return supported_formats;
+#else
+ return std::vector<SdpVideoFormat>();
+#endif
+}
+
+std::vector<SdpVideoFormat> SupportedVP9DecoderCodecs() {
+#ifdef RTC_ENABLE_VP9
+ std::vector<SdpVideoFormat> supported_formats = SupportedVP9Codecs();
+ // The WebRTC internal decoder supports VP9 profile 1. However, there's
+ // currently no way of sending VP9 profile 1 using the internal encoder.
+ // It would require extended support for I444, I422, and I440 buffers.
+ supported_formats.push_back(SdpVideoFormat(
+ cricket::kVp9CodecName,
+ {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}}));
return supported_formats;
#else
return std::vector<SdpVideoFormat>();
diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
index 551ace22a2..4d0a6983ac 100644
--- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
+++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
@@ -58,7 +58,7 @@ Vp9FrameBufferPool::GetFrameBuffer(size_t min_size) {
RTC_DCHECK_GT(min_size, 0);
rtc::scoped_refptr<Vp9FrameBuffer> available_buffer = nullptr;
{
- rtc::CritScope cs(&buffers_lock_);
+ MutexLock lock(&buffers_lock_);
// Do we have a buffer we can recycle?
for (const auto& buffer : allocated_buffers_) {
if (buffer->HasOneRef()) {
@@ -91,7 +91,7 @@ Vp9FrameBufferPool::GetFrameBuffer(size_t min_size) {
int Vp9FrameBufferPool::GetNumBuffersInUse() const {
int num_buffers_in_use = 0;
- rtc::CritScope cs(&buffers_lock_);
+ MutexLock lock(&buffers_lock_);
for (const auto& buffer : allocated_buffers_) {
if (!buffer->HasOneRef())
++num_buffers_in_use;
@@ -100,7 +100,7 @@ int Vp9FrameBufferPool::GetNumBuffersInUse() const {
}
bool Vp9FrameBufferPool::Resize(size_t max_number_of_buffers) {
- rtc::CritScope cs(&buffers_lock_);
+ MutexLock lock(&buffers_lock_);
size_t used_buffers_count = 0;
for (const auto& buffer : allocated_buffers_) {
// If the buffer is in use, the ref count will be >= 2, one from the list we
@@ -130,7 +130,7 @@ bool Vp9FrameBufferPool::Resize(size_t max_number_of_buffers) {
}
void Vp9FrameBufferPool::ClearPool() {
- rtc::CritScope cs(&buffers_lock_);
+ MutexLock lock(&buffers_lock_);
allocated_buffers_.clear();
}
diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h
index 02d2b26273..d37a9fc0e2 100644
--- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h
+++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h
@@ -18,8 +18,8 @@
#include "api/scoped_refptr.h"
#include "rtc_base/buffer.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/ref_count.h"
+#include "rtc_base/synchronization/mutex.h"
struct vpx_codec_ctx;
struct vpx_codec_frame_buffer;
@@ -119,7 +119,7 @@ class Vp9FrameBufferPool {
private:
// Protects |allocated_buffers_|.
- rtc::CriticalSection buffers_lock_;
+ mutable Mutex buffers_lock_;
// All buffers, in use or ready to be recycled.
std::vector<rtc::scoped_refptr<Vp9FrameBuffer>> allocated_buffers_
RTC_GUARDED_BY(buffers_lock_);
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.cc b/modules/video_coding/codecs/vp9/vp9_impl.cc
index 46f72b6e02..5b1efd4ec1 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -25,6 +25,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h"
+#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "rtc_base/checks.h"
#include "rtc_base/experiments/rate_control_settings.h"
#include "rtc_base/keep_ref_until_done.h"
@@ -45,14 +46,21 @@ namespace {
uint8_t kRefBufIdx[4] = {0, 0, 0, 1};
uint8_t kUpdBufIdx[4] = {0, 0, 1, 0};
-int kMaxNumTiles4kVideo = 8;
-
// Maximum allowed PID difference for differnet per-layer frame-rate case.
const int kMaxAllowedPidDiff = 30;
constexpr double kLowRateFactor = 1.0;
constexpr double kHighRateFactor = 2.0;
+// TODO(ilink): Tune these thresholds further.
+// Selected using ConverenceMotion_1280_720_50.yuv clip.
+// No toggling observed on any link capacity from 100-2000kbps.
+// HD was reached consistently when link capacity was 1500kbps.
+// Set resolutions are a bit more conservative than svc_config.cc sets, e.g.
+// for 300kbps resolution converged to 270p instead of 360p.
+constexpr int kLowVp9QpThreshold = 149;
+constexpr int kHighVp9QpThreshold = 205;
+
// These settings correspond to the settings in vpx_codec_enc_cfg.
struct Vp9RateSettings {
uint32_t rc_undershoot_pct;
@@ -249,6 +257,8 @@ VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec)
"WebRTC-VP9VariableFramerateScreenshare")),
variable_framerate_controller_(
variable_framerate_experiment_.framerate_limit),
+ quality_scaler_experiment_(
+ ParseQualityScalerConfig("WebRTC-VP9QualityScaler")),
num_steady_state_frames_(0),
config_changed_(true) {
codec_ = {};
@@ -399,7 +409,6 @@ bool VP9EncoderImpl::SetSvcRates(
expect_no_more_active_layers = seen_active_layer;
}
}
- RTC_DCHECK_GT(num_active_spatial_layers_, 0);
if (higher_layers_enabled && !force_key_frame_) {
// Prohibit drop of all layers for the next frame, so newly enabled
@@ -517,6 +526,11 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
config_->g_profile = 0;
config_->g_input_bit_depth = 8;
break;
+ case VP9Profile::kProfile1:
+ // Encoding of profile 1 is not implemented. It would require extended
+ // support for I444, I422, and I440 buffers.
+ RTC_NOTREACHED();
+ break;
case VP9Profile::kProfile2:
img_fmt = VPX_IMG_FMT_I42016;
bits_for_storage = 16;
@@ -562,7 +576,13 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
// put some key-frames at will even in VPX_KF_DISABLED kf_mode.
config_->kf_max_dist = inst->VP9().keyFrameInterval;
config_->kf_min_dist = config_->kf_max_dist;
- config_->rc_resize_allowed = inst->VP9().automaticResizeOn ? 1 : 0;
+ if (quality_scaler_experiment_.enabled) {
+ // In that experiment webrtc wide quality scaler is used instead of libvpx
+ // internal scaler.
+ config_->rc_resize_allowed = 0;
+ } else {
+ config_->rc_resize_allowed = inst->VP9().automaticResizeOn ? 1 : 0;
+ }
// Determine number of threads based on the image size and #cores.
config_->g_threads =
NumberOfThreads(config_->g_w, config_->g_h, settings.number_of_cores);
@@ -587,13 +607,6 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
(num_spatial_layers_ > 1 &&
codec_.mode == VideoCodecMode::kScreensharing) ||
inter_layer_pred_ == InterLayerPredMode::kOn;
- // TODO(ilnik): Remove this workaround once external reference control works
- // nicely with simulcast SVC mode.
- // Simlucast SVC mode is currently only used in some tests and is impossible
- // to trigger for users without using some field trials.
- if (inter_layer_pred_ == InterLayerPredMode::kOff) {
- external_ref_control_ = false;
- }
if (num_temporal_layers_ == 1) {
gof_.SetGofInfoVP9(kTemporalStructureMode1);
@@ -768,7 +781,13 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
// Screenshare dropping mode: drop a layer only together with all lower
// layers. This ensures that drops on lower layers won't reduce frame-rate
// for higher layers and reference structure is RTP-compatible.
+#if 0
+ // CONSTRAINED_FROM_ABOVE_DROP is not defined in the available version of
+ // libvpx
svc_drop_frame_.framedrop_mode = CONSTRAINED_FROM_ABOVE_DROP;
+#else
+ abort();
+#endif
svc_drop_frame_.max_consec_drop = 5;
for (size_t i = 0; i < num_spatial_layers_; ++i) {
svc_drop_frame_.framedrop_thresh[i] = config_->rc_dropframe_thresh;
@@ -981,6 +1000,10 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV();
break;
}
+ case VP9Profile::kProfile1: {
+ RTC_NOTREACHED();
+ break;
+ }
case VP9Profile::kProfile2: {
// We can inject kI010 frames directly for encode. All other formats
// should be converted to it.
@@ -1554,7 +1577,12 @@ VideoEncoder::EncoderInfo VP9EncoderImpl::GetEncoderInfo() const {
EncoderInfo info;
info.supports_native_handle = false;
info.implementation_name = "libvpx";
- info.scaling_settings = VideoEncoder::ScalingSettings::kOff;
+ if (quality_scaler_experiment_.enabled) {
+ info.scaling_settings = VideoEncoder::ScalingSettings(
+ quality_scaler_experiment_.low_qp, quality_scaler_experiment_.high_qp);
+ } else {
+ info.scaling_settings = VideoEncoder::ScalingSettings::kOff;
+ }
info.has_trusted_rate_controller = trusted_rate_controller_;
info.is_hardware_accelerated = false;
info.has_internal_source = false;
@@ -1627,6 +1655,24 @@ VP9EncoderImpl::ParseVariableFramerateConfig(std::string group_name) {
return config;
}
+// static
+VP9EncoderImpl::QualityScalerExperiment
+VP9EncoderImpl::ParseQualityScalerConfig(std::string group_name) {
+ FieldTrialFlag disabled = FieldTrialFlag("Disabled");
+ FieldTrialParameter<int> low_qp("low_qp", kLowVp9QpThreshold);
+ FieldTrialParameter<int> high_qp("hihg_qp", kHighVp9QpThreshold);
+ ParseFieldTrial({&disabled, &low_qp, &high_qp},
+ field_trial::FindFullName(group_name));
+ QualityScalerExperiment config;
+ config.enabled = !disabled.Get();
+ RTC_LOG(LS_INFO) << "Webrtc quality scaler for vp9 is "
+ << (config.enabled ? "enabled." : "disabled");
+ config.low_qp = low_qp.Get();
+ config.high_qp = high_qp.Get();
+
+ return config;
+}
+
VP9DecoderImpl::VP9DecoderImpl()
: decode_complete_callback_(nullptr),
inited_(false),
@@ -1666,14 +1712,32 @@ int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
// errors earlier than the multi-threads version.
// - Make peak CPU usage under control (not depending on input)
cfg.threads = 1;
- (void)kMaxNumTiles4kVideo; // unused
#else
- // We want to use multithreading when decoding high resolution videos. But,
- // since we don't know resolution of input stream at this stage, we always
- // enable it.
- cfg.threads = std::min(number_of_cores, kMaxNumTiles4kVideo);
+ if (!inst) {
+ // No config provided - don't know resolution to decode yet.
+ // Set thread count to one in the meantime.
+ cfg.threads = 1;
+ } else {
+ // We want to use multithreading when decoding high resolution videos. But
+ // not too many in order to avoid overhead when many stream are decoded
+ // concurrently.
+ // Set 2 thread as target for 1280x720 pixel count, and then scale up
+ // linearly from there - but cap at physical core count.
+ // For common resolutions this results in:
+ // 1 for 360p
+ // 2 for 720p
+ // 4 for 1080p
+ // 8 for 1440p
+ // 18 for 4K
+ int num_threads =
+ std::max(1, 2 * (inst->width * inst->height) / (1280 * 720));
+ cfg.threads = std::min(number_of_cores, num_threads);
+ current_codec_ = *inst;
+ }
#endif
+ num_cores_ = number_of_cores;
+
vpx_codec_flags_t flags = 0;
if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) {
return WEBRTC_VIDEO_CODEC_MEMORY;
@@ -1691,6 +1755,15 @@ int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
}
+
+ vpx_codec_err_t status =
+ vpx_codec_control(decoder_, VP9D_SET_LOOP_FILTER_OPT, 1);
+ if (status != VPX_CODEC_OK) {
+ RTC_LOG(LS_ERROR) << "Failed to enable VP9D_SET_LOOP_FILTER_OPT. "
+ << vpx_codec_error(decoder_);
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -1703,6 +1776,29 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image,
if (decode_complete_callback_ == nullptr) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
+
+ if (input_image._frameType == VideoFrameType::kVideoFrameKey) {
+ absl::optional<vp9::FrameInfo> frame_info =
+ vp9::ParseIntraFrameInfo(input_image.data(), input_image.size());
+ if (frame_info) {
+ if (frame_info->frame_width != current_codec_.width ||
+ frame_info->frame_height != current_codec_.height) {
+ // Resolution has changed, tear down and re-init a new decoder in
+ // order to get correct sizing.
+ Release();
+ current_codec_.width = frame_info->frame_width;
+ current_codec_.height = frame_info->frame_height;
+ int reinit_status = InitDecode(&current_codec_, num_cores_);
+ if (reinit_status != WEBRTC_VIDEO_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "Failed to re-init decoder.";
+ return reinit_status;
+ }
+ }
+ } else {
+ RTC_LOG(LS_WARNING) << "Failed to parse VP9 header from key-frame.";
+ }
+ }
+
// Always start with a complete key frame.
if (key_frame_required_) {
if (input_image._frameType != VideoFrameType::kVideoFrameKey)
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.h b/modules/video_coding/codecs/vp9/vp9_impl.h
index 2126044dcc..fae94c752b 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.h
+++ b/modules/video_coding/codecs/vp9/vp9_impl.h
@@ -175,6 +175,15 @@ class VP9EncoderImpl : public VP9Encoder {
static VariableFramerateExperiment ParseVariableFramerateConfig(
std::string group_name);
FramerateController variable_framerate_controller_;
+
+ const struct QualityScalerExperiment {
+ int low_qp;
+ int high_qp;
+ bool enabled;
+ } quality_scaler_experiment_;
+ static QualityScalerExperiment ParseQualityScalerConfig(
+ std::string group_name);
+
int num_steady_state_frames_;
// Only set config when this flag is set.
bool config_changed_;
@@ -210,6 +219,8 @@ class VP9DecoderImpl : public VP9Decoder {
bool inited_;
vpx_codec_ctx_t* decoder_;
bool key_frame_required_;
+ VideoCodec current_codec_;
+ int num_cores_;
};
} // namespace webrtc
diff --git a/modules/video_coding/decoder_database.cc b/modules/video_coding/decoder_database.cc
index c203721e5d..38a18baa6d 100644
--- a/modules/video_coding/decoder_database.cc
+++ b/modules/video_coding/decoder_database.cc
@@ -169,8 +169,10 @@ std::unique_ptr<VCMGenericDecoder> VCMDecoderDataBase::CreateAndInitDecoder(
decoder_item->settings->width = frame.EncodedImage()._encodedWidth;
decoder_item->settings->height = frame.EncodedImage()._encodedHeight;
}
- if (ptr_decoder->InitDecode(decoder_item->settings.get(),
- decoder_item->number_of_cores) < 0) {
+ int err = ptr_decoder->InitDecode(decoder_item->settings.get(),
+ decoder_item->number_of_cores);
+ if (err < 0) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize decoder. Error code: " << err;
return nullptr;
}
memcpy(new_codec, decoder_item->settings.get(), sizeof(VideoCodec));
diff --git a/modules/video_coding/deprecated/BUILD.gn b/modules/video_coding/deprecated/BUILD.gn
new file mode 100644
index 0000000000..fd3a5fa5fc
--- /dev/null
+++ b/modules/video_coding/deprecated/BUILD.gn
@@ -0,0 +1,34 @@
+# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+rtc_library("nack_module") {
+ sources = [
+ "nack_module.cc",
+ "nack_module.h",
+ ]
+
+ deps = [
+ "..:nack_module",
+ "../..:module_api",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:deprecation",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:macromagic",
+ "../../../rtc_base:rtc_numerics",
+ "../../../rtc_base/experiments:field_trial_parser",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../system_wrappers",
+ "../../../system_wrappers:field_trial",
+ "../../utility",
+ ]
+}
diff --git a/modules/video_coding/nack_module.cc b/modules/video_coding/deprecated/nack_module.cc
index 838af1548b..f8cfd3440b 100644
--- a/modules/video_coding/nack_module.cc
+++ b/modules/video_coding/deprecated/nack_module.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "modules/video_coding/nack_module.h"
+#include "modules/video_coding/deprecated/nack_module.h"
#include <algorithm>
#include <limits>
@@ -45,25 +45,25 @@ int64_t GetSendNackDelay() {
}
} // namespace
-NackModule::NackInfo::NackInfo()
+DEPRECATED_NackModule::NackInfo::NackInfo()
: seq_num(0), send_at_seq_num(0), sent_at_time(-1), retries(0) {}
-NackModule::NackInfo::NackInfo(uint16_t seq_num,
- uint16_t send_at_seq_num,
- int64_t created_at_time)
+DEPRECATED_NackModule::NackInfo::NackInfo(uint16_t seq_num,
+ uint16_t send_at_seq_num,
+ int64_t created_at_time)
: seq_num(seq_num),
send_at_seq_num(send_at_seq_num),
created_at_time(created_at_time),
sent_at_time(-1),
retries(0) {}
-NackModule::BackoffSettings::BackoffSettings(TimeDelta min_retry,
- TimeDelta max_rtt,
- double base)
+DEPRECATED_NackModule::BackoffSettings::BackoffSettings(TimeDelta min_retry,
+ TimeDelta max_rtt,
+ double base)
: min_retry_interval(min_retry), max_rtt(max_rtt), base(base) {}
-absl::optional<NackModule::BackoffSettings>
-NackModule::BackoffSettings::ParseFromFieldTrials() {
+absl::optional<DEPRECATED_NackModule::BackoffSettings>
+DEPRECATED_NackModule::BackoffSettings::ParseFromFieldTrials() {
// Matches magic number in RTPSender::OnReceivedNack().
const TimeDelta kDefaultMinRetryInterval = TimeDelta::Millis(5);
// Upper bound on link-delay considered for exponential backoff.
@@ -82,15 +82,16 @@ NackModule::BackoffSettings::ParseFromFieldTrials() {
field_trial::FindFullName("WebRTC-ExponentialNackBackoff"));
if (enabled) {
- return NackModule::BackoffSettings(min_retry.Get(), max_rtt.Get(),
- base.Get());
+ return DEPRECATED_NackModule::BackoffSettings(min_retry.Get(),
+ max_rtt.Get(), base.Get());
}
return absl::nullopt;
}
-NackModule::NackModule(Clock* clock,
- NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender)
+DEPRECATED_NackModule::DEPRECATED_NackModule(
+ Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender)
: clock_(clock),
nack_sender_(nack_sender),
keyframe_request_sender_(keyframe_request_sender),
@@ -106,14 +107,15 @@ NackModule::NackModule(Clock* clock,
RTC_DCHECK(keyframe_request_sender_);
}
-int NackModule::OnReceivedPacket(uint16_t seq_num, bool is_keyframe) {
+int DEPRECATED_NackModule::OnReceivedPacket(uint16_t seq_num,
+ bool is_keyframe) {
return OnReceivedPacket(seq_num, is_keyframe, false);
}
-int NackModule::OnReceivedPacket(uint16_t seq_num,
- bool is_keyframe,
- bool is_recovered) {
- rtc::CritScope lock(&crit_);
+int DEPRECATED_NackModule::OnReceivedPacket(uint16_t seq_num,
+ bool is_keyframe,
+ bool is_recovered) {
+ MutexLock lock(&mutex_);
// TODO(philipel): When the packet includes information whether it is
// retransmitted or not, use that value instead. For
// now set it to true, which will cause the reordering
@@ -181,8 +183,8 @@ int NackModule::OnReceivedPacket(uint16_t seq_num,
return 0;
}
-void NackModule::ClearUpTo(uint16_t seq_num) {
- rtc::CritScope lock(&crit_);
+void DEPRECATED_NackModule::ClearUpTo(uint16_t seq_num) {
+ MutexLock lock(&mutex_);
nack_list_.erase(nack_list_.begin(), nack_list_.lower_bound(seq_num));
keyframe_list_.erase(keyframe_list_.begin(),
keyframe_list_.lower_bound(seq_num));
@@ -190,28 +192,28 @@ void NackModule::ClearUpTo(uint16_t seq_num) {
recovered_list_.lower_bound(seq_num));
}
-void NackModule::UpdateRtt(int64_t rtt_ms) {
- rtc::CritScope lock(&crit_);
+void DEPRECATED_NackModule::UpdateRtt(int64_t rtt_ms) {
+ MutexLock lock(&mutex_);
rtt_ms_ = rtt_ms;
}
-void NackModule::Clear() {
- rtc::CritScope lock(&crit_);
+void DEPRECATED_NackModule::Clear() {
+ MutexLock lock(&mutex_);
nack_list_.clear();
keyframe_list_.clear();
recovered_list_.clear();
}
-int64_t NackModule::TimeUntilNextProcess() {
+int64_t DEPRECATED_NackModule::TimeUntilNextProcess() {
return std::max<int64_t>(next_process_time_ms_ - clock_->TimeInMilliseconds(),
0);
}
-void NackModule::Process() {
+void DEPRECATED_NackModule::Process() {
if (nack_sender_) {
std::vector<uint16_t> nack_batch;
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
nack_batch = GetNackBatch(kTimeOnly);
}
@@ -236,7 +238,7 @@ void NackModule::Process() {
}
}
-bool NackModule::RemovePacketsUntilKeyFrame() {
+bool DEPRECATED_NackModule::RemovePacketsUntilKeyFrame() {
while (!keyframe_list_.empty()) {
auto it = nack_list_.lower_bound(*keyframe_list_.begin());
@@ -254,8 +256,8 @@ bool NackModule::RemovePacketsUntilKeyFrame() {
return false;
}
-void NackModule::AddPacketsToNack(uint16_t seq_num_start,
- uint16_t seq_num_end) {
+void DEPRECATED_NackModule::AddPacketsToNack(uint16_t seq_num_start,
+ uint16_t seq_num_end) {
// Remove old packets.
auto it = nack_list_.lower_bound(seq_num_end - kMaxPacketAge);
nack_list_.erase(nack_list_.begin(), it);
@@ -289,7 +291,8 @@ void NackModule::AddPacketsToNack(uint16_t seq_num_start,
}
}
-std::vector<uint16_t> NackModule::GetNackBatch(NackFilterOptions options) {
+std::vector<uint16_t> DEPRECATED_NackModule::GetNackBatch(
+ NackFilterOptions options) {
bool consider_seq_num = options != kTimeOnly;
bool consider_timestamp = options != kSeqNumOnly;
Timestamp now = clock_->CurrentTime();
@@ -334,13 +337,13 @@ std::vector<uint16_t> NackModule::GetNackBatch(NackFilterOptions options) {
return nack_batch;
}
-void NackModule::UpdateReorderingStatistics(uint16_t seq_num) {
+void DEPRECATED_NackModule::UpdateReorderingStatistics(uint16_t seq_num) {
RTC_DCHECK(AheadOf(newest_seq_num_, seq_num));
uint16_t diff = ReverseDiff(newest_seq_num_, seq_num);
reordering_histogram_.Add(diff);
}
-int NackModule::WaitNumberOfPackets(float probability) const {
+int DEPRECATED_NackModule::WaitNumberOfPackets(float probability) const {
if (reordering_histogram_.NumValues() == 0)
return 0;
return reordering_histogram_.InverseCdf(probability);
diff --git a/modules/video_coding/nack_module.h b/modules/video_coding/deprecated/nack_module.h
index d4f705b351..f9580ae80c 100644
--- a/modules/video_coding/nack_module.h
+++ b/modules/video_coding/deprecated/nack_module.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef MODULES_VIDEO_CODING_NACK_MODULE_H_
-#define MODULES_VIDEO_CODING_NACK_MODULE_H_
+#ifndef MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_
+#define MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_
#include <stdint.h>
@@ -21,18 +21,19 @@
#include "modules/include/module.h"
#include "modules/include/module_common_types.h"
#include "modules/video_coding/histogram.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecation.h"
#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
-class NackModule : public Module {
+class DEPRECATED_NackModule : public Module {
public:
- NackModule(Clock* clock,
- NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender);
+ DEPRECATED_NackModule(Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender);
int OnReceivedPacket(uint16_t seq_num, bool is_keyframe);
int OnReceivedPacket(uint16_t seq_num, bool is_keyframe, bool is_recovered);
@@ -79,24 +80,24 @@ class NackModule : public Module {
};
void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Removes packets from the nack list until the next keyframe. Returns true
// if packets were removed.
- bool RemovePacketsUntilKeyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ bool RemovePacketsUntilKeyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
std::vector<uint16_t> GetNackBatch(NackFilterOptions options)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Update the reordering distribution.
void UpdateReorderingStatistics(uint16_t seq_num)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Returns how many packets we have to wait in order to receive the packet
// with probability |probabilty| or higher.
int WaitNumberOfPackets(float probability) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- rtc::CriticalSection crit_;
+ Mutex mutex_;
Clock* const clock_;
NackSender* const nack_sender_;
KeyFrameRequestSender* const keyframe_request_sender_;
@@ -105,15 +106,15 @@ class NackModule : public Module {
// known thread (e.g. see |initialized_|). Those probably do not need
// synchronized access.
std::map<uint16_t, NackInfo, DescendingSeqNumComp<uint16_t>> nack_list_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
std::set<uint16_t, DescendingSeqNumComp<uint16_t>> keyframe_list_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
std::set<uint16_t, DescendingSeqNumComp<uint16_t>> recovered_list_
- RTC_GUARDED_BY(crit_);
- video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(crit_);
- bool initialized_ RTC_GUARDED_BY(crit_);
- int64_t rtt_ms_ RTC_GUARDED_BY(crit_);
- uint16_t newest_seq_num_ RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
+ video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(mutex_);
+ bool initialized_ RTC_GUARDED_BY(mutex_);
+ int64_t rtt_ms_ RTC_GUARDED_BY(mutex_);
+ uint16_t newest_seq_num_ RTC_GUARDED_BY(mutex_);
// Only touched on the process thread.
int64_t next_process_time_ms_;
@@ -124,6 +125,8 @@ class NackModule : public Module {
const absl::optional<BackoffSettings> backoff_settings_;
};
+using NackModule = RTC_DEPRECATED DEPRECATED_NackModule;
+
} // namespace webrtc
-#endif // MODULES_VIDEO_CODING_NACK_MODULE_H_
+#endif // MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_
diff --git a/modules/video_coding/encoded_frame.cc b/modules/video_coding/encoded_frame.cc
index 1e9e374c64..3de62da9f5 100644
--- a/modules/video_coding/encoded_frame.cc
+++ b/modules/video_coding/encoded_frame.cc
@@ -135,20 +135,6 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) {
}
case kVideoCodecH264: {
_codecSpecificInfo.codecType = kVideoCodecH264;
-
- // The following H264 codec specific data are not used elsewhere.
- // Instead they are read directly from the frame marking extension.
- // These codec specific data structures should be removed
- // when frame marking is used.
- _codecSpecificInfo.codecSpecific.H264.temporal_idx = kNoTemporalIdx;
- if (header->frame_marking.temporal_id != kNoTemporalIdx) {
- _codecSpecificInfo.codecSpecific.H264.temporal_idx =
- header->frame_marking.temporal_id;
- _codecSpecificInfo.codecSpecific.H264.base_layer_sync =
- header->frame_marking.base_layer_sync;
- _codecSpecificInfo.codecSpecific.H264.idr_frame =
- header->frame_marking.independent_frame;
- }
break;
}
default: {
diff --git a/modules/video_coding/fec_controller_default.cc b/modules/video_coding/fec_controller_default.cc
index 97919f5315..827c853541 100644
--- a/modules/video_coding/fec_controller_default.cc
+++ b/modules/video_coding/fec_controller_default.cc
@@ -20,7 +20,6 @@
#include "system_wrappers/include/field_trial.h"
namespace webrtc {
-using rtc::CritScope;
const float kProtectionOverheadRateThreshold = 0.5;
@@ -54,7 +53,7 @@ void FecControllerDefault::SetEncodingData(size_t width,
size_t height,
size_t num_temporal_layers,
size_t max_payload_size) {
- CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
loss_prot_logic_->UpdateFrameSize(width, height);
loss_prot_logic_->UpdateNumLayers(num_temporal_layers);
max_payload_size_ = max_payload_size;
@@ -94,7 +93,7 @@ uint32_t FecControllerDefault::UpdateFecRates(
FecProtectionParams delta_fec_params;
FecProtectionParams key_fec_params;
{
- CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
loss_prot_logic_->UpdateBitRate(target_bitrate_kbps);
loss_prot_logic_->UpdateRtt(round_trip_time_ms);
// Update frame rate for the loss protection logic class: frame rate should
@@ -175,7 +174,7 @@ void FecControllerDefault::SetProtectionMethod(bool enable_fec,
} else if (enable_fec) {
method = media_optimization::kFec;
}
- CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
loss_prot_logic_->SetMethod(method);
}
@@ -183,7 +182,7 @@ void FecControllerDefault::UpdateWithEncodedData(
const size_t encoded_image_length,
const VideoFrameType encoded_image_frametype) {
const size_t encoded_length = encoded_image_length;
- CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
if (encoded_length > 0) {
const bool delta_frame =
encoded_image_frametype != VideoFrameType::kVideoFrameKey;
diff --git a/modules/video_coding/fec_controller_default.h b/modules/video_coding/fec_controller_default.h
index 02c0ec0d37..6b9e8eb8e5 100644
--- a/modules/video_coding/fec_controller_default.h
+++ b/modules/video_coding/fec_controller_default.h
@@ -20,7 +20,7 @@
#include "api/fec_controller.h"
#include "modules/video_coding/media_opt_util.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
@@ -54,10 +54,10 @@ class FecControllerDefault : public FecController {
enum { kBitrateAverageWinMs = 1000 };
Clock* const clock_;
VCMProtectionCallback* protection_callback_;
- rtc::CriticalSection crit_sect_;
+ Mutex mutex_;
std::unique_ptr<media_optimization::VCMLossProtectionLogic> loss_prot_logic_
- RTC_GUARDED_BY(crit_sect_);
- size_t max_payload_size_ RTC_GUARDED_BY(crit_sect_);
+ RTC_GUARDED_BY(mutex_);
+ size_t max_payload_size_ RTC_GUARDED_BY(mutex_);
RTC_DISALLOW_COPY_AND_ASSIGN(FecControllerDefault);
const float overhead_threshold_;
};
diff --git a/modules/video_coding/frame_buffer2.cc b/modules/video_coding/frame_buffer2.cc
index 944f97bf87..88ac09c496 100644
--- a/modules/video_coding/frame_buffer2.cc
+++ b/modules/video_coding/frame_buffer2.cc
@@ -63,20 +63,26 @@ FrameBuffer::FrameBuffer(Clock* clock,
last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs),
add_rtt_to_playout_delay_(
webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")),
- rtt_mult_settings_(RttMultExperiment::GetRttMultValue()) {}
+ rtt_mult_settings_(RttMultExperiment::GetRttMultValue()) {
+ callback_checker_.Detach();
+}
-FrameBuffer::~FrameBuffer() {}
+FrameBuffer::~FrameBuffer() {
+ RTC_DCHECK_RUN_ON(&construction_checker_);
+}
void FrameBuffer::NextFrame(
int64_t max_wait_time_ms,
bool keyframe_required,
rtc::TaskQueue* callback_queue,
std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)> handler) {
- RTC_DCHECK_RUN_ON(callback_queue);
+ RTC_DCHECK_RUN_ON(&callback_checker_);
+ RTC_DCHECK(callback_queue->IsCurrent());
TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
int64_t latest_return_time_ms =
clock_->TimeInMilliseconds() + max_wait_time_ms;
- rtc::CritScope lock(&crit_);
+
+ MutexLock lock(&mutex_);
if (stopped_) {
return;
}
@@ -93,9 +99,10 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() {
int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
callback_task_ = RepeatingTaskHandle::DelayedStart(
callback_queue_->Get(), TimeDelta::Millis(wait_ms), [this] {
+ RTC_DCHECK_RUN_ON(&callback_checker_);
// If this task has not been cancelled, we did not get any new frames
// while waiting. Continue with frame delivery.
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (!frames_to_decode_.empty()) {
// We have frames, deliver!
frame_handler_(absl::WrapUnique(GetNextFrame()), kFrameFound);
@@ -211,6 +218,7 @@ int64_t FrameBuffer::FindNextFrame(int64_t now_ms) {
}
EncodedFrame* FrameBuffer::GetNextFrame() {
+ RTC_DCHECK_RUN_ON(&callback_checker_);
int64_t now_ms = clock_->TimeInMilliseconds();
// TODO(ilnik): remove |frames_out| use frames_to_decode_ directly.
std::vector<EncodedFrame*> frames_out;
@@ -321,30 +329,33 @@ bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
protection_mode_ = mode;
}
void FrameBuffer::Start() {
TRACE_EVENT0("webrtc", "FrameBuffer::Start");
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
stopped_ = false;
}
void FrameBuffer::Stop() {
TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
+ if (stopped_)
+ return;
stopped_ = true;
+
CancelCallback();
}
void FrameBuffer::Clear() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
ClearFramesAndHistory();
}
void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
jitter_estimator_.UpdateRtt(rtt_ms);
}
@@ -366,9 +377,11 @@ bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
}
void FrameBuffer::CancelCallback() {
+ // Called from the callback queue or from within Stop().
frame_handler_ = {};
callback_task_.Stop();
callback_queue_ = nullptr;
+ callback_checker_.Detach();
}
bool FrameBuffer::IsCompleteSuperFrame(const EncodedFrame& frame) {
@@ -418,7 +431,7 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
RTC_DCHECK(frame);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
const VideoLayerFrameId& id = frame->id;
int64_t last_continuous_picture_id =
@@ -516,7 +529,7 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
// to return from NextFrame.
if (callback_queue_) {
callback_queue_->PostTask([this] {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (!callback_task_.Running())
return;
RTC_CHECK(frame_handler_);
diff --git a/modules/video_coding/frame_buffer2.h b/modules/video_coding/frame_buffer2.h
index 51f3820d31..7909000a22 100644
--- a/modules/video_coding/frame_buffer2.h
+++ b/modules/video_coding/frame_buffer2.h
@@ -24,10 +24,11 @@
#include "modules/video_coding/jitter_estimator.h"
#include "modules/video_coding/utility/decoded_frames_history.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/experiments/rtt_mult_experiment.h"
#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/thread_annotations.h"
@@ -117,40 +118,40 @@ class FrameBuffer {
// Check that the references of |frame| are valid.
bool ValidReferences(const EncodedFrame& frame) const;
- int64_t FindNextFrame(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
- EncodedFrame* GetNextFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ int64_t FindNextFrame(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ EncodedFrame* GetNextFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- void StartWaitForNextFrameOnQueue() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void CancelCallback() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void StartWaitForNextFrameOnQueue() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ void CancelCallback() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Update all directly dependent and indirectly dependent frames and mark
// them as continuous if all their references has been fulfilled.
void PropagateContinuity(FrameMap::iterator start)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Marks the frame as decoded and updates all directly dependent frames.
void PropagateDecodability(const FrameInfo& info)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Update the corresponding FrameInfo of |frame| and all FrameInfos that
// |frame| references.
// Return false if |frame| will never be decodable, true otherwise.
bool UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
FrameMap::iterator info)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- void UpdateJitterDelay() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void UpdateJitterDelay() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- void UpdateTimingFrameInfo() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void UpdateTimingFrameInfo() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Checks if the superframe, which current frame belongs to, is complete.
bool IsCompleteSuperFrame(const EncodedFrame& frame)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
bool HasBadRenderTiming(const EncodedFrame& frame, int64_t now_ms)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// The cleaner solution would be to have the NextFrame function return a
// vector of frames, but until the decoding pipeline can support decoding
@@ -159,30 +160,33 @@ class FrameBuffer {
EncodedFrame* CombineAndDeleteFrames(
const std::vector<EncodedFrame*>& frames) const;
+ SequenceChecker construction_checker_;
+ SequenceChecker callback_checker_;
+
// Stores only undecoded frames.
- FrameMap frames_ RTC_GUARDED_BY(crit_);
- DecodedFramesHistory decoded_frames_history_ RTC_GUARDED_BY(crit_);
+ FrameMap frames_ RTC_GUARDED_BY(mutex_);
+ DecodedFramesHistory decoded_frames_history_ RTC_GUARDED_BY(mutex_);
- rtc::CriticalSection crit_;
+ Mutex mutex_;
Clock* const clock_;
- rtc::TaskQueue* callback_queue_ RTC_GUARDED_BY(crit_);
- RepeatingTaskHandle callback_task_ RTC_GUARDED_BY(crit_);
+ rtc::TaskQueue* callback_queue_ RTC_GUARDED_BY(mutex_);
+ RepeatingTaskHandle callback_task_ RTC_GUARDED_BY(mutex_);
std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)>
- frame_handler_ RTC_GUARDED_BY(crit_);
- int64_t latest_return_time_ms_ RTC_GUARDED_BY(crit_);
- bool keyframe_required_ RTC_GUARDED_BY(crit_);
+ frame_handler_ RTC_GUARDED_BY(mutex_);
+ int64_t latest_return_time_ms_ RTC_GUARDED_BY(mutex_);
+ bool keyframe_required_ RTC_GUARDED_BY(mutex_);
- VCMJitterEstimator jitter_estimator_ RTC_GUARDED_BY(crit_);
- VCMTiming* const timing_ RTC_GUARDED_BY(crit_);
- VCMInterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(crit_);
+ VCMJitterEstimator jitter_estimator_ RTC_GUARDED_BY(mutex_);
+ VCMTiming* const timing_ RTC_GUARDED_BY(mutex_);
+ VCMInterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(mutex_);
absl::optional<VideoLayerFrameId> last_continuous_frame_
- RTC_GUARDED_BY(crit_);
- std::vector<FrameMap::iterator> frames_to_decode_ RTC_GUARDED_BY(crit_);
- bool stopped_ RTC_GUARDED_BY(crit_);
- VCMVideoProtection protection_mode_ RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
+ std::vector<FrameMap::iterator> frames_to_decode_ RTC_GUARDED_BY(mutex_);
+ bool stopped_ RTC_GUARDED_BY(mutex_);
+ VCMVideoProtection protection_mode_ RTC_GUARDED_BY(mutex_);
VCMReceiveStatisticsCallback* const stats_callback_;
- int64_t last_log_non_decoded_ms_ RTC_GUARDED_BY(crit_);
+ int64_t last_log_non_decoded_ms_ RTC_GUARDED_BY(mutex_);
const bool add_rtt_to_playout_delay_;
diff --git a/modules/video_coding/frame_buffer2_unittest.cc b/modules/video_coding/frame_buffer2_unittest.cc
index b4d663ee06..2de3f3362b 100644
--- a/modules/video_coding/frame_buffer2_unittest.cc
+++ b/modules/video_coding/frame_buffer2_unittest.cc
@@ -108,21 +108,26 @@ class FrameObjectFake : public EncodedFrame {
class VCMReceiveStatisticsCallbackMock : public VCMReceiveStatisticsCallback {
public:
- MOCK_METHOD3(OnCompleteFrame,
- void(bool is_keyframe,
- size_t size_bytes,
- VideoContentType content_type));
- MOCK_METHOD1(OnDroppedFrames, void(uint32_t frames_dropped));
- MOCK_METHOD1(OnDiscardedPacketsUpdated, void(int discarded_packets));
- MOCK_METHOD1(OnFrameCountsUpdated, void(const FrameCounts& frame_counts));
- MOCK_METHOD6(OnFrameBufferTimingsUpdated,
- void(int max_decode_ms,
- int current_delay_ms,
- int target_delay_ms,
- int jitter_buffer_ms,
- int min_playout_delay_ms,
- int render_delay_ms));
- MOCK_METHOD1(OnTimingFrameInfoUpdated, void(const TimingFrameInfo& info));
+ MOCK_METHOD(void,
+ OnCompleteFrame,
+ (bool is_keyframe,
+ size_t size_bytes,
+ VideoContentType content_type),
+ (override));
+ MOCK_METHOD(void, OnDroppedFrames, (uint32_t frames_dropped), (override));
+ MOCK_METHOD(void,
+ OnFrameBufferTimingsUpdated,
+ (int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms),
+ (override));
+ MOCK_METHOD(void,
+ OnTimingFrameInfoUpdated,
+ (const TimingFrameInfo& info),
+ (override));
};
class TestFrameBuffer2 : public ::testing::Test {
diff --git a/modules/video_coding/frame_object.cc b/modules/video_coding/frame_object.cc
index cb83999c94..bdb43f7992 100644
--- a/modules/video_coding/frame_object.cc
+++ b/modules/video_coding/frame_object.cc
@@ -17,7 +17,6 @@
#include "api/video/encoded_image.h"
#include "api/video/video_timing.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
namespace webrtc {
namespace video_coding {
@@ -129,9 +128,5 @@ const RTPVideoHeader& RtpFrameObject::GetRtpVideoHeader() const {
return rtp_video_header_;
}
-const FrameMarking& RtpFrameObject::GetFrameMarking() const {
- return rtp_video_header_.frame_marking;
-}
-
} // namespace video_coding
} // namespace webrtc
diff --git a/modules/video_coding/frame_object.h b/modules/video_coding/frame_object.h
index f7988763d3..831b444df4 100644
--- a/modules/video_coding/frame_object.h
+++ b/modules/video_coding/frame_object.h
@@ -47,7 +47,6 @@ class RtpFrameObject : public EncodedFrame {
int64_t RenderTime() const override;
bool delayed_by_retransmission() const override;
const RTPVideoHeader& GetRtpVideoHeader() const;
- const FrameMarking& GetFrameMarking() const;
private:
RTPVideoHeader rtp_video_header_;
diff --git a/modules/video_coding/generic_decoder.cc b/modules/video_coding/generic_decoder.cc
index ca9b5e2d47..50ecd8da8d 100644
--- a/modules/video_coding/generic_decoder.cc
+++ b/modules/video_coding/generic_decoder.cc
@@ -86,7 +86,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
// callbacks from one call to Decode().
VCMFrameInformation* frameInfo;
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
frameInfo = _timestampMap.Pop(decodedImage.timestamp());
}
@@ -172,12 +172,12 @@ void VCMDecodedFrameCallback::OnDecoderImplementationName(
void VCMDecodedFrameCallback::Map(uint32_t timestamp,
VCMFrameInformation* frameInfo) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
_timestampMap.Add(timestamp, frameInfo);
}
int32_t VCMDecodedFrameCallback::Pop(uint32_t timestamp) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (_timestampMap.Pop(timestamp) == NULL) {
return VCM_GENERAL_ERROR;
}
@@ -211,7 +211,10 @@ int32_t VCMGenericDecoder::InitDecode(const VideoCodec* settings,
TRACE_EVENT0("webrtc", "VCMGenericDecoder::InitDecode");
_codecType = settings->codecType;
- return decoder_->InitDecode(settings, numberOfCores);
+ int err = decoder_->InitDecode(settings, numberOfCores);
+ implementation_name_ = decoder_->ImplementationName();
+ RTC_LOG(LS_INFO) << "Decoder implementation: " << implementation_name_;
+ return err;
}
int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) {
@@ -239,8 +242,13 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) {
_nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
int32_t ret = decoder_->Decode(frame.EncodedImage(), frame.MissingFrame(),
frame.RenderTimeMs());
-
- _callback->OnDecoderImplementationName(decoder_->ImplementationName());
+ const char* new_implementation_name = decoder_->ImplementationName();
+ if (new_implementation_name != implementation_name_) {
+ implementation_name_ = new_implementation_name;
+ RTC_LOG(LS_INFO) << "Changed decoder implementation to: "
+ << new_implementation_name;
+ }
+ _callback->OnDecoderImplementationName(implementation_name_.c_str());
if (ret < WEBRTC_VIDEO_CODEC_OK) {
RTC_LOG(LS_WARNING) << "Failed to decode frame with timestamp "
<< frame.Timestamp() << ", error code: " << ret;
diff --git a/modules/video_coding/generic_decoder.h b/modules/video_coding/generic_decoder.h
index 4b4d83ecd5..b89d3f4368 100644
--- a/modules/video_coding/generic_decoder.h
+++ b/modules/video_coding/generic_decoder.h
@@ -12,14 +12,15 @@
#define MODULES_VIDEO_CODING_GENERIC_DECODER_H_
#include <memory>
+#include <string>
#include "api/units/time_delta.h"
#include "modules/video_coding/encoded_frame.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/timestamp_map.h"
#include "modules/video_coding/timing.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/experiments/field_trial_parser.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_checker.h"
namespace webrtc {
@@ -69,7 +70,7 @@ class VCMDecodedFrameCallback : public DecodedImageCallback {
// from the same thread, and therfore a lock is not required to access it.
VCMReceiveCallback* _receiveCallback = nullptr;
VCMTiming* _timing;
- rtc::CriticalSection lock_;
+ Mutex lock_;
VCMTimestampMap _timestampMap RTC_GUARDED_BY(lock_);
int64_t ntp_offset_;
// Set by the field trial WebRTC-SlowDownDecoder to simulate a slow decoder.
@@ -112,6 +113,7 @@ class VCMGenericDecoder {
VideoCodecType _codecType;
const bool _isExternal;
VideoContentType _last_keyframe_content_type;
+ std::string implementation_name_;
};
} // namespace webrtc
diff --git a/modules/video_coding/generic_decoder_unittest.cc b/modules/video_coding/generic_decoder_unittest.cc
index 3e07a2a81c..dbceb187be 100644
--- a/modules/video_coding/generic_decoder_unittest.cc
+++ b/modules/video_coding/generic_decoder_unittest.cc
@@ -16,8 +16,8 @@
#include "api/task_queue/default_task_queue_factory.h"
#include "common_video/test/utilities.h"
#include "modules/video_coding/timing.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/clock.h"
#include "test/fake_decoder.h"
#include "test/gmock.h"
@@ -33,7 +33,7 @@ class ReceiveCallback : public VCMReceiveCallback {
int32_t decode_time_ms,
VideoContentType content_type) override {
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
last_frame_ = videoFrame;
}
received_frame_event_.Set();
@@ -41,13 +41,13 @@ class ReceiveCallback : public VCMReceiveCallback {
}
absl::optional<VideoFrame> GetLastFrame() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
return last_frame_;
}
absl::optional<VideoFrame> WaitForFrame(int64_t wait_ms) {
if (received_frame_event_.Wait(wait_ms)) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
return last_frame_;
} else {
return absl::nullopt;
@@ -55,7 +55,7 @@ class ReceiveCallback : public VCMReceiveCallback {
}
private:
- rtc::CriticalSection lock_;
+ Mutex lock_;
rtc::Event received_frame_event_;
absl::optional<VideoFrame> last_frame_ RTC_GUARDED_BY(lock_);
};
diff --git a/modules/video_coding/h264_sps_pps_tracker.cc b/modules/video_coding/h264_sps_pps_tracker.cc
index 3965b28e8e..4becdb7608 100644
--- a/modules/video_coding/h264_sps_pps_tracker.cc
+++ b/modules/video_coding/h264_sps_pps_tracker.cc
@@ -49,6 +49,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream(
RTPVideoHeader* video_header) {
RTC_DCHECK(video_header);
RTC_DCHECK(video_header->codec == kVideoCodecH264);
+ RTC_DCHECK_GT(bitstream.size(), 0);
auto& h264_header =
absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
@@ -128,7 +129,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream(
if (h264_header.packetization_type == kH264StapA) {
const uint8_t* nalu_ptr = bitstream.data() + 1;
- while (nalu_ptr < bitstream.data() + bitstream.size()) {
+ while (nalu_ptr < bitstream.data() + bitstream.size() - 1) {
RTC_DCHECK(video_header->is_first_packet_in_frame);
required_size += sizeof(start_code_h264);
@@ -180,7 +181,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream(
// Copy the rest of the bitstream and insert start codes.
if (h264_header.packetization_type == kH264StapA) {
const uint8_t* nalu_ptr = bitstream.data() + 1;
- while (nalu_ptr < bitstream.data() + bitstream.size()) {
+ while (nalu_ptr < bitstream.data() + bitstream.size() - 1) {
fixed.bitstream.AppendData(start_code_h264);
// The first two bytes describe the length of a segment.
diff --git a/modules/video_coding/jitter_buffer.cc b/modules/video_coding/jitter_buffer.cc
index 0873285f39..9d2d3a2d1d 100644
--- a/modules/video_coding/jitter_buffer.cc
+++ b/modules/video_coding/jitter_buffer.cc
@@ -153,7 +153,7 @@ VCMJitterBuffer::~VCMJitterBuffer() {
}
void VCMJitterBuffer::Start() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
running_ = true;
num_consecutive_old_packets_ = 0;
@@ -172,7 +172,7 @@ void VCMJitterBuffer::Start() {
}
void VCMJitterBuffer::Stop() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
running_ = false;
last_decoded_state_.Reset();
@@ -181,12 +181,12 @@ void VCMJitterBuffer::Stop() {
}
bool VCMJitterBuffer::Running() const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return running_;
}
void VCMJitterBuffer::Flush() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
decodable_frames_.Reset(&free_frames_);
incomplete_frames_.Reset(&free_frames_);
last_decoded_state_.Reset(); // TODO(mikhal): sync reset.
@@ -202,21 +202,20 @@ void VCMJitterBuffer::Flush() {
}
int VCMJitterBuffer::num_packets() const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return num_packets_;
}
int VCMJitterBuffer::num_duplicated_packets() const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return num_duplicated_packets_;
}
// Returns immediately or a |max_wait_time_ms| ms event hang waiting for a
// complete frame, |max_wait_time_ms| decided by caller.
VCMEncodedFrame* VCMJitterBuffer::NextCompleteFrame(uint32_t max_wait_time_ms) {
- crit_sect_.Enter();
+ MutexLock lock(&mutex_);
if (!running_) {
- crit_sect_.Leave();
return nullptr;
}
CleanUpOldOrEmptyFrames();
@@ -227,14 +226,13 @@ VCMEncodedFrame* VCMJitterBuffer::NextCompleteFrame(uint32_t max_wait_time_ms) {
clock_->TimeInMilliseconds() + max_wait_time_ms;
int64_t wait_time_ms = max_wait_time_ms;
while (wait_time_ms > 0) {
- crit_sect_.Leave();
+ mutex_.Unlock();
const EventTypeWrapper ret =
frame_event_->Wait(static_cast<uint32_t>(wait_time_ms));
- crit_sect_.Enter();
+ mutex_.Lock();
if (ret == kEventSignaled) {
// Are we shutting down the jitter buffer?
if (!running_) {
- crit_sect_.Leave();
return nullptr;
}
// Finding oldest frame ready for decoder.
@@ -252,16 +250,13 @@ VCMEncodedFrame* VCMJitterBuffer::NextCompleteFrame(uint32_t max_wait_time_ms) {
}
if (decodable_frames_.empty() ||
decodable_frames_.Front()->GetState() != kStateComplete) {
- crit_sect_.Leave();
return nullptr;
}
- VCMEncodedFrame* encoded_frame = decodable_frames_.Front();
- crit_sect_.Leave();
- return encoded_frame;
+ return decodable_frames_.Front();
}
VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
if (!running_) {
return NULL;
}
@@ -313,7 +308,7 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
// frames from within the jitter buffer.
void VCMJitterBuffer::ReleaseFrame(VCMEncodedFrame* frame) {
RTC_CHECK(frame != nullptr);
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
VCMFrameBuffer* frame_buffer = static_cast<VCMFrameBuffer*>(frame);
RecycleFrameBuffer(frame_buffer);
}
@@ -354,7 +349,7 @@ VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
int64_t VCMJitterBuffer::LastPacketTime(const VCMEncodedFrame* frame,
bool* retransmitted) const {
assert(retransmitted);
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
const VCMFrameBuffer* frame_buffer =
static_cast<const VCMFrameBuffer*>(frame);
*retransmitted = (frame_buffer->GetNackCount() > 0);
@@ -363,7 +358,7 @@ int64_t VCMJitterBuffer::LastPacketTime(const VCMEncodedFrame* frame,
VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
bool* retransmitted) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
++num_packets_;
// Does this packet belong to an old frame?
@@ -577,7 +572,7 @@ void VCMJitterBuffer::FindAndInsertContinuousFramesWithState(
}
uint32_t VCMJitterBuffer::EstimatedJitterMs() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
const double rtt_mult = 1.0f;
return jitter_estimate_.GetJitterEstimate(rtt_mult, absl::nullopt);
}
@@ -585,7 +580,7 @@ uint32_t VCMJitterBuffer::EstimatedJitterMs() {
void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size,
int max_packet_age_to_nack,
int max_incomplete_time_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
assert(max_packet_age_to_nack >= 0);
assert(max_incomplete_time_ms_ >= 0);
max_nack_list_size_ = max_nack_list_size;
@@ -616,7 +611,7 @@ uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber(
}
std::vector<uint16_t> VCMJitterBuffer::GetNackList(bool* request_key_frame) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
*request_key_frame = false;
if (last_decoded_state_.in_initial_state()) {
VCMFrameBuffer* next_frame = NextFrame();
@@ -827,7 +822,7 @@ void VCMJitterBuffer::UpdateAveragePacketsPerFrame(int current_number_packets) {
}
}
-// Must be called under the critical section |crit_sect_|.
+// Must be called under the critical section |mutex_|.
void VCMJitterBuffer::CleanUpOldOrEmptyFrames() {
decodable_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
&free_frames_);
@@ -838,13 +833,13 @@ void VCMJitterBuffer::CleanUpOldOrEmptyFrames() {
}
}
-// Must be called from within |crit_sect_|.
+// Must be called from within |mutex_|.
bool VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const {
return missing_sequence_numbers_.find(packet.seqNum) !=
missing_sequence_numbers_.end();
}
-// Must be called under the critical section |crit_sect_|. Should never be
+// Must be called under the critical section |mutex_|. Should never be
// called with retransmitted frames, they must be filtered out before this
// function is called.
void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample,
@@ -856,7 +851,7 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample,
sample.frame_size, incomplete_frame);
}
-// Must be called under the critical section crit_sect_. Should never be
+// Must be called under the critical section mutex_. Should never be
// called with retransmitted frames, they must be filtered out before this
// function is called.
void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
@@ -870,7 +865,7 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
frame.size(), incomplete_frame);
}
-// Must be called under the critical section |crit_sect_|. Should never be
+// Must be called under the critical section |mutex_|. Should never be
// called with retransmitted frames, they must be filtered out before this
// function is called.
void VCMJitterBuffer::UpdateJitterEstimate(int64_t latest_packet_time_ms,
diff --git a/modules/video_coding/jitter_buffer.h b/modules/video_coding/jitter_buffer.h
index 2505845d4f..b15ca75ffa 100644
--- a/modules/video_coding/jitter_buffer.h
+++ b/modules/video_coding/jitter_buffer.h
@@ -28,7 +28,7 @@
#include "modules/video_coding/jitter_buffer_common.h"
#include "modules/video_coding/jitter_estimator.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -143,66 +143,66 @@ class VCMJitterBuffer {
VCMFrameBufferEnum GetFrame(const VCMPacket& packet,
VCMFrameBuffer** frame,
FrameList** frame_list)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Returns true if |frame| is continuous in |decoding_state|, not taking
// decodable frames into account.
bool IsContinuousInState(const VCMFrameBuffer& frame,
const VCMDecodingState& decoding_state) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Returns true if |frame| is continuous in the |last_decoded_state_|, taking
// all decodable frames into account.
bool IsContinuous(const VCMFrameBuffer& frame) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Looks for frames in |incomplete_frames_| which are continuous in the
// provided |decoded_state|. Starts the search from the timestamp of
// |decoded_state|.
void FindAndInsertContinuousFramesWithState(
const VCMDecodingState& decoded_state)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Looks for frames in |incomplete_frames_| which are continuous in
// |last_decoded_state_| taking all decodable frames into account. Starts
// the search from |new_frame|.
void FindAndInsertContinuousFrames(const VCMFrameBuffer& new_frame)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- VCMFrameBuffer* NextFrame() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ VCMFrameBuffer* NextFrame() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Returns true if the NACK list was updated to cover sequence numbers up to
// |sequence_number|. If false a key frame is needed to get into a state where
// we can continue decoding.
bool UpdateNackList(uint16_t sequence_number)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
bool TooLargeNackList() const;
// Returns true if the NACK list was reduced without problem. If false a key
// frame is needed to get into a state where we can continue decoding.
- bool HandleTooLargeNackList() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool HandleTooLargeNackList() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
bool MissingTooOldPacket(uint16_t latest_sequence_number) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Returns true if the too old packets was successfully removed from the NACK
// list. If false, a key frame is needed to get into a state where we can
// continue decoding.
bool HandleTooOldPackets(uint16_t latest_sequence_number)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Drops all packets in the NACK list up until |last_decoded_sequence_number|.
void DropPacketsFromNackList(uint16_t last_decoded_sequence_number);
// Gets an empty frame, creating a new frame if necessary (i.e. increases
// jitter buffer size).
- VCMFrameBuffer* GetEmptyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ VCMFrameBuffer* GetEmptyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Attempts to increase the size of the jitter buffer. Returns true on
// success, false otherwise.
- bool TryToIncreaseJitterBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool TryToIncreaseJitterBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Recycles oldest frames until a key frame is found. Used if jitter buffer is
// completely full. Returns true if a key frame was found.
- bool RecycleFramesUntilKeyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool RecycleFramesUntilKeyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Update rolling average of packets per frame.
void UpdateAveragePacketsPerFrame(int current_number_packets_);
// Cleans the frame list in the JB from old/empty frames.
// Should only be called prior to actual use.
- void CleanUpOldOrEmptyFrames() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ void CleanUpOldOrEmptyFrames() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Returns true if |packet| is likely to have been retransmitted.
bool IsPacketRetransmitted(const VCMPacket& packet) const;
@@ -217,35 +217,34 @@ class VCMJitterBuffer {
unsigned int frame_size,
bool incomplete_frame);
- int NonContinuousOrIncompleteDuration()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ int NonContinuousOrIncompleteDuration() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
uint16_t EstimatedLowSequenceNumber(const VCMFrameBuffer& frame) const;
// Reset frame buffer and return it to free_frames_.
void RecycleFrameBuffer(VCMFrameBuffer* frame)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
Clock* clock_;
// If we are running (have started) or not.
bool running_;
- rtc::CriticalSection crit_sect_;
+ mutable Mutex mutex_;
// Event to signal when we have a frame ready for decoder.
std::unique_ptr<EventWrapper> frame_event_;
// Number of allocated frames.
int max_number_of_frames_;
- UnorderedFrameList free_frames_ RTC_GUARDED_BY(crit_sect_);
- FrameList decodable_frames_ RTC_GUARDED_BY(crit_sect_);
- FrameList incomplete_frames_ RTC_GUARDED_BY(crit_sect_);
- VCMDecodingState last_decoded_state_ RTC_GUARDED_BY(crit_sect_);
+ UnorderedFrameList free_frames_ RTC_GUARDED_BY(mutex_);
+ FrameList decodable_frames_ RTC_GUARDED_BY(mutex_);
+ FrameList incomplete_frames_ RTC_GUARDED_BY(mutex_);
+ VCMDecodingState last_decoded_state_ RTC_GUARDED_BY(mutex_);
bool first_packet_since_reset_;
// Number of packets in a row that have been too old.
int num_consecutive_old_packets_;
// Number of packets received.
- int num_packets_ RTC_GUARDED_BY(crit_sect_);
+ int num_packets_ RTC_GUARDED_BY(mutex_);
// Number of duplicated packets received.
- int num_duplicated_packets_ RTC_GUARDED_BY(crit_sect_);
+ int num_duplicated_packets_ RTC_GUARDED_BY(mutex_);
// Jitter estimation.
// Filter for estimating jitter.
diff --git a/modules/video_coding/jitter_estimator.cc b/modules/video_coding/jitter_estimator.cc
index cd505835d1..44e2a9811e 100644
--- a/modules/video_coding/jitter_estimator.cc
+++ b/modules/video_coding/jitter_estimator.cc
@@ -23,6 +23,7 @@
#include "rtc_base/experiments/jitter_upper_bound_experiment.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "system_wrappers/include/clock.h"
+#include "system_wrappers/include/field_trial.h"
namespace webrtc {
namespace {
@@ -50,6 +51,8 @@ VCMJitterEstimator::VCMJitterEstimator(Clock* clock)
time_deviation_upper_bound_(
JitterUpperBoundExperiment::GetUpperBoundSigmas().value_or(
kDefaultMaxTimestampDeviationInSigmas)),
+ enable_reduced_delay_(
+ !field_trial::IsEnabled("WebRTC-ReducedJitterDelayKillSwitch")),
clock_(clock) {
Reset();
}
@@ -395,22 +398,25 @@ int VCMJitterEstimator::GetJitterEstimate(
}
}
- static const double kJitterScaleLowThreshold = 5.0;
- static const double kJitterScaleHighThreshold = 10.0;
- double fps = GetFrameRate();
- // Ignore jitter for very low fps streams.
- if (fps < kJitterScaleLowThreshold) {
- if (fps == 0.0) {
- return rtc::checked_cast<int>(std::max(0.0, jitterMS) + 0.5);
+ if (enable_reduced_delay_) {
+ static const double kJitterScaleLowThreshold = 5.0;
+ static const double kJitterScaleHighThreshold = 10.0;
+ double fps = GetFrameRate();
+ // Ignore jitter for very low fps streams.
+ if (fps < kJitterScaleLowThreshold) {
+ if (fps == 0.0) {
+ return rtc::checked_cast<int>(std::max(0.0, jitterMS) + 0.5);
+ }
+ return 0;
}
- return 0;
- }
- // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at
- // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold.
- if (fps < kJitterScaleHighThreshold) {
- jitterMS = (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) *
- (fps - kJitterScaleLowThreshold) * jitterMS;
+ // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at
+ // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold.
+ if (fps < kJitterScaleHighThreshold) {
+ jitterMS =
+ (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) *
+ (fps - kJitterScaleLowThreshold) * jitterMS;
+ }
}
return rtc::checked_cast<int>(std::max(0.0, jitterMS) + 0.5);
diff --git a/modules/video_coding/jitter_estimator.h b/modules/video_coding/jitter_estimator.h
index d9798b40a1..1d69b95769 100644
--- a/modules/video_coding/jitter_estimator.h
+++ b/modules/video_coding/jitter_estimator.h
@@ -150,6 +150,7 @@ class VCMJitterEstimator {
rtc::RollingAccumulator<uint64_t> fps_counter_;
const double time_deviation_upper_bound_;
+ const bool enable_reduced_delay_;
Clock* clock_;
};
diff --git a/modules/video_coding/jitter_estimator_tests.cc b/modules/video_coding/jitter_estimator_tests.cc
index 1ad9abb56f..14baae7e81 100644
--- a/modules/video_coding/jitter_estimator_tests.cc
+++ b/modules/video_coding/jitter_estimator_tests.cc
@@ -72,6 +72,22 @@ TEST_F(TestVCMJitterEstimator, TestLowRate) {
}
}
+TEST_F(TestVCMJitterEstimator, TestLowRateDisabled) {
+ test::ScopedFieldTrials field_trials(
+ "WebRTC-ReducedJitterDelayKillSwitch/Enabled/");
+ SetUp();
+
+ ValueGenerator gen(10);
+ uint64_t time_delta_us = rtc::kNumMicrosecsPerSec / 5;
+ for (int i = 0; i < 60; ++i) {
+ estimator_->UpdateEstimate(gen.Delay(), gen.FrameSize());
+ AdvanceClock(time_delta_us);
+ if (i > 2)
+ EXPECT_GT(estimator_->GetJitterEstimate(0, absl::nullopt), 0);
+ gen.Advance();
+ }
+}
+
TEST_F(TestVCMJitterEstimator, TestUpperBound) {
struct TestContext {
TestContext()
diff --git a/modules/video_coding/nack_module2.cc b/modules/video_coding/nack_module2.cc
new file mode 100644
index 0000000000..8a3a731ed0
--- /dev/null
+++ b/modules/video_coding/nack_module2.cc
@@ -0,0 +1,343 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/nack_module2.h"
+
+#include <algorithm>
+#include <limits>
+
+#include "api/units/timestamp.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/experiments/field_trial_parser.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/task_queue.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+
+namespace {
+const int kMaxPacketAge = 10000;
+const int kMaxNackPackets = 1000;
+const int kDefaultRttMs = 100;
+const int kMaxNackRetries = 10;
+const int kMaxReorderedPackets = 128;
+const int kNumReorderingBuckets = 10;
+const int kDefaultSendNackDelayMs = 0;
+
+int64_t GetSendNackDelay() {
+ int64_t delay_ms = strtol(
+ webrtc::field_trial::FindFullName("WebRTC-SendNackDelayMs").c_str(),
+ nullptr, 10);
+ if (delay_ms > 0 && delay_ms <= 20) {
+ RTC_LOG(LS_INFO) << "SendNackDelay is set to " << delay_ms;
+ return delay_ms;
+ }
+ return kDefaultSendNackDelayMs;
+}
+} // namespace
+
+constexpr TimeDelta NackModule2::kUpdateInterval;
+
+NackModule2::NackInfo::NackInfo()
+ : seq_num(0), send_at_seq_num(0), sent_at_time(-1), retries(0) {}
+
+NackModule2::NackInfo::NackInfo(uint16_t seq_num,
+ uint16_t send_at_seq_num,
+ int64_t created_at_time)
+ : seq_num(seq_num),
+ send_at_seq_num(send_at_seq_num),
+ created_at_time(created_at_time),
+ sent_at_time(-1),
+ retries(0) {}
+
+NackModule2::BackoffSettings::BackoffSettings(TimeDelta min_retry,
+ TimeDelta max_rtt,
+ double base)
+ : min_retry_interval(min_retry), max_rtt(max_rtt), base(base) {}
+
+absl::optional<NackModule2::BackoffSettings>
+NackModule2::BackoffSettings::ParseFromFieldTrials() {
+ // Matches magic number in RTPSender::OnReceivedNack().
+ const TimeDelta kDefaultMinRetryInterval = TimeDelta::Millis(5);
+ // Upper bound on link-delay considered for exponential backoff.
+ // Selected so that cumulative delay with 1.25 base and 10 retries ends up
+ // below 3s, since above that there will be a FIR generated instead.
+ const TimeDelta kDefaultMaxRtt = TimeDelta::Millis(160);
+ // Default base for exponential backoff, adds 25% RTT delay for each retry.
+ const double kDefaultBase = 1.25;
+
+ FieldTrialParameter<bool> enabled("enabled", false);
+ FieldTrialParameter<TimeDelta> min_retry("min_retry",
+ kDefaultMinRetryInterval);
+ FieldTrialParameter<TimeDelta> max_rtt("max_rtt", kDefaultMaxRtt);
+ FieldTrialParameter<double> base("base", kDefaultBase);
+ ParseFieldTrial({&enabled, &min_retry, &max_rtt, &base},
+ field_trial::FindFullName("WebRTC-ExponentialNackBackoff"));
+
+ if (enabled) {
+ return NackModule2::BackoffSettings(min_retry.Get(), max_rtt.Get(),
+ base.Get());
+ }
+ return absl::nullopt;
+}
+
+NackModule2::NackModule2(TaskQueueBase* current_queue,
+ Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender,
+ TimeDelta update_interval /*= kUpdateInterval*/)
+ : worker_thread_(current_queue),
+ update_interval_(update_interval),
+ clock_(clock),
+ nack_sender_(nack_sender),
+ keyframe_request_sender_(keyframe_request_sender),
+ reordering_histogram_(kNumReorderingBuckets, kMaxReorderedPackets),
+ initialized_(false),
+ rtt_ms_(kDefaultRttMs),
+ newest_seq_num_(0),
+ send_nack_delay_ms_(GetSendNackDelay()),
+ backoff_settings_(BackoffSettings::ParseFromFieldTrials()) {
+ RTC_DCHECK(clock_);
+ RTC_DCHECK(nack_sender_);
+ RTC_DCHECK(keyframe_request_sender_);
+ RTC_DCHECK_GT(update_interval.ms(), 0);
+ RTC_DCHECK(worker_thread_);
+ RTC_DCHECK(worker_thread_->IsCurrent());
+
+ repeating_task_ = RepeatingTaskHandle::DelayedStart(
+ TaskQueueBase::Current(), update_interval_,
+ [this]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ std::vector<uint16_t> nack_batch = GetNackBatch(kTimeOnly);
+ if (!nack_batch.empty()) {
+ // This batch of NACKs is triggered externally; there is no external
+ // initiator who can batch them with other feedback messages.
+ nack_sender_->SendNack(nack_batch, /*buffering_allowed=*/false);
+ }
+ return update_interval_;
+ },
+ clock_);
+}
+
+NackModule2::~NackModule2() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ repeating_task_.Stop();
+}
+
+int NackModule2::OnReceivedPacket(uint16_t seq_num, bool is_keyframe) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return OnReceivedPacket(seq_num, is_keyframe, false);
+}
+
+int NackModule2::OnReceivedPacket(uint16_t seq_num,
+ bool is_keyframe,
+ bool is_recovered) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ // TODO(philipel): When the packet includes information whether it is
+ // retransmitted or not, use that value instead. For
+ // now set it to true, which will cause the reordering
+ // statistics to never be updated.
+ bool is_retransmitted = true;
+
+ if (!initialized_) {
+ newest_seq_num_ = seq_num;
+ if (is_keyframe)
+ keyframe_list_.insert(seq_num);
+ initialized_ = true;
+ return 0;
+ }
+
+ // Since the |newest_seq_num_| is a packet we have actually received we know
+ // that packet has never been Nacked.
+ if (seq_num == newest_seq_num_)
+ return 0;
+
+ if (AheadOf(newest_seq_num_, seq_num)) {
+ // An out of order packet has been received.
+ auto nack_list_it = nack_list_.find(seq_num);
+ int nacks_sent_for_packet = 0;
+ if (nack_list_it != nack_list_.end()) {
+ nacks_sent_for_packet = nack_list_it->second.retries;
+ nack_list_.erase(nack_list_it);
+ }
+ if (!is_retransmitted)
+ UpdateReorderingStatistics(seq_num);
+ return nacks_sent_for_packet;
+ }
+
+ // Keep track of new keyframes.
+ if (is_keyframe)
+ keyframe_list_.insert(seq_num);
+
+ // And remove old ones so we don't accumulate keyframes.
+ auto it = keyframe_list_.lower_bound(seq_num - kMaxPacketAge);
+ if (it != keyframe_list_.begin())
+ keyframe_list_.erase(keyframe_list_.begin(), it);
+
+ if (is_recovered) {
+ recovered_list_.insert(seq_num);
+
+ // Remove old ones so we don't accumulate recovered packets.
+ auto it = recovered_list_.lower_bound(seq_num - kMaxPacketAge);
+ if (it != recovered_list_.begin())
+ recovered_list_.erase(recovered_list_.begin(), it);
+
+ // Do not send nack for packets recovered by FEC or RTX.
+ return 0;
+ }
+
+ AddPacketsToNack(newest_seq_num_ + 1, seq_num);
+ newest_seq_num_ = seq_num;
+
+ // Are there any nacks that are waiting for this seq_num.
+ std::vector<uint16_t> nack_batch = GetNackBatch(kSeqNumOnly);
+ if (!nack_batch.empty()) {
+ // This batch of NACKs is triggered externally; the initiator can
+ // batch them with other feedback messages.
+ nack_sender_->SendNack(nack_batch, /*buffering_allowed=*/true);
+ }
+
+ return 0;
+}
+
+void NackModule2::ClearUpTo(uint16_t seq_num) {
+ // Called via RtpVideoStreamReceiver2::FrameContinuous on the network thread.
+ worker_thread_->PostTask(ToQueuedTask(task_safety_, [seq_num, this]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ nack_list_.erase(nack_list_.begin(), nack_list_.lower_bound(seq_num));
+ keyframe_list_.erase(keyframe_list_.begin(),
+ keyframe_list_.lower_bound(seq_num));
+ recovered_list_.erase(recovered_list_.begin(),
+ recovered_list_.lower_bound(seq_num));
+ }));
+}
+
+void NackModule2::UpdateRtt(int64_t rtt_ms) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ rtt_ms_ = rtt_ms;
+}
+
+bool NackModule2::RemovePacketsUntilKeyFrame() {
+ // Called on worker_thread_.
+ while (!keyframe_list_.empty()) {
+ auto it = nack_list_.lower_bound(*keyframe_list_.begin());
+
+ if (it != nack_list_.begin()) {
+ // We have found a keyframe that actually is newer than at least one
+ // packet in the nack list.
+ nack_list_.erase(nack_list_.begin(), it);
+ return true;
+ }
+
+ // If this keyframe is so old it does not remove any packets from the list,
+ // remove it from the list of keyframes and try the next keyframe.
+ keyframe_list_.erase(keyframe_list_.begin());
+ }
+ return false;
+}
+
+void NackModule2::AddPacketsToNack(uint16_t seq_num_start,
+ uint16_t seq_num_end) {
+ // Called on worker_thread_.
+ // Remove old packets.
+ auto it = nack_list_.lower_bound(seq_num_end - kMaxPacketAge);
+ nack_list_.erase(nack_list_.begin(), it);
+
+ // If the nack list is too large, remove packets from the nack list until
+ // the latest first packet of a keyframe. If the list is still too large,
+ // clear it and request a keyframe.
+ uint16_t num_new_nacks = ForwardDiff(seq_num_start, seq_num_end);
+ if (nack_list_.size() + num_new_nacks > kMaxNackPackets) {
+ while (RemovePacketsUntilKeyFrame() &&
+ nack_list_.size() + num_new_nacks > kMaxNackPackets) {
+ }
+
+ if (nack_list_.size() + num_new_nacks > kMaxNackPackets) {
+ nack_list_.clear();
+ RTC_LOG(LS_WARNING) << "NACK list full, clearing NACK"
+ " list and requesting keyframe.";
+ keyframe_request_sender_->RequestKeyFrame();
+ return;
+ }
+ }
+
+ for (uint16_t seq_num = seq_num_start; seq_num != seq_num_end; ++seq_num) {
+ // Do not send nack for packets that are already recovered by FEC or RTX
+ if (recovered_list_.find(seq_num) != recovered_list_.end())
+ continue;
+ NackInfo nack_info(seq_num, seq_num + WaitNumberOfPackets(0.5),
+ clock_->TimeInMilliseconds());
+ RTC_DCHECK(nack_list_.find(seq_num) == nack_list_.end());
+ nack_list_[seq_num] = nack_info;
+ }
+}
+
+std::vector<uint16_t> NackModule2::GetNackBatch(NackFilterOptions options) {
+ // Called on worker_thread_.
+
+ bool consider_seq_num = options != kTimeOnly;
+ bool consider_timestamp = options != kSeqNumOnly;
+ Timestamp now = clock_->CurrentTime();
+ std::vector<uint16_t> nack_batch;
+ auto it = nack_list_.begin();
+ while (it != nack_list_.end()) {
+ TimeDelta resend_delay = TimeDelta::Millis(rtt_ms_);
+ if (backoff_settings_) {
+ resend_delay =
+ std::max(resend_delay, backoff_settings_->min_retry_interval);
+ if (it->second.retries > 1) {
+ TimeDelta exponential_backoff =
+ std::min(TimeDelta::Millis(rtt_ms_), backoff_settings_->max_rtt) *
+ std::pow(backoff_settings_->base, it->second.retries - 1);
+ resend_delay = std::max(resend_delay, exponential_backoff);
+ }
+ }
+
+ bool delay_timed_out =
+ now.ms() - it->second.created_at_time >= send_nack_delay_ms_;
+ bool nack_on_rtt_passed =
+ now.ms() - it->second.sent_at_time >= resend_delay.ms();
+ bool nack_on_seq_num_passed =
+ it->second.sent_at_time == -1 &&
+ AheadOrAt(newest_seq_num_, it->second.send_at_seq_num);
+ if (delay_timed_out && ((consider_seq_num && nack_on_seq_num_passed) ||
+ (consider_timestamp && nack_on_rtt_passed))) {
+ nack_batch.emplace_back(it->second.seq_num);
+ ++it->second.retries;
+ it->second.sent_at_time = now.ms();
+ if (it->second.retries >= kMaxNackRetries) {
+ RTC_LOG(LS_WARNING) << "Sequence number " << it->second.seq_num
+ << " removed from NACK list due to max retries.";
+ it = nack_list_.erase(it);
+ } else {
+ ++it;
+ }
+ continue;
+ }
+ ++it;
+ }
+ return nack_batch;
+}
+
+void NackModule2::UpdateReorderingStatistics(uint16_t seq_num) {
+ // Running on worker_thread_.
+ RTC_DCHECK(AheadOf(newest_seq_num_, seq_num));
+ uint16_t diff = ReverseDiff(newest_seq_num_, seq_num);
+ reordering_histogram_.Add(diff);
+}
+
+int NackModule2::WaitNumberOfPackets(float probability) const {
+ // Called on worker_thread_;
+ if (reordering_histogram_.NumValues() == 0)
+ return 0;
+ return reordering_histogram_.InverseCdf(probability);
+}
+
+} // namespace webrtc
diff --git a/modules/video_coding/nack_module2.h b/modules/video_coding/nack_module2.h
new file mode 100644
index 0000000000..89dd082192
--- /dev/null
+++ b/modules/video_coding/nack_module2.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CODING_NACK_MODULE2_H_
+#define MODULES_VIDEO_CODING_NACK_MODULE2_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <set>
+#include <vector>
+
+#include "api/units/time_delta.h"
+#include "modules/include/module_common_types.h"
+#include "modules/video_coding/histogram.h"
+#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+// TODO(bugs.webrtc.org/11594): This class no longer implements the Module
+// interface and therefore "NackModule" may not be a descriptive name anymore.
+// Consider renaming to e.g. NackTracker or NackRequester.
+class NackModule2 final {
+ public:
+ static constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(20);
+
+ NackModule2(TaskQueueBase* current_queue,
+ Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender,
+ TimeDelta update_interval = kUpdateInterval);
+ ~NackModule2();
+
+ int OnReceivedPacket(uint16_t seq_num, bool is_keyframe);
+ int OnReceivedPacket(uint16_t seq_num, bool is_keyframe, bool is_recovered);
+
+ void ClearUpTo(uint16_t seq_num);
+ void UpdateRtt(int64_t rtt_ms);
+
+ private:
+ // Which fields to consider when deciding which packet to nack in
+ // GetNackBatch.
+ enum NackFilterOptions { kSeqNumOnly, kTimeOnly, kSeqNumAndTime };
+
+ // This class holds the sequence number of the packet that is in the nack list
+ // as well as the meta data about when it should be nacked and how many times
+ // we have tried to nack this packet.
+ struct NackInfo {
+ NackInfo();
+ NackInfo(uint16_t seq_num,
+ uint16_t send_at_seq_num,
+ int64_t created_at_time);
+
+ uint16_t seq_num;
+ uint16_t send_at_seq_num;
+ int64_t created_at_time;
+ int64_t sent_at_time;
+ int retries;
+ };
+
+ struct BackoffSettings {
+ BackoffSettings(TimeDelta min_retry, TimeDelta max_rtt, double base);
+ static absl::optional<BackoffSettings> ParseFromFieldTrials();
+
+ // Min time between nacks.
+ const TimeDelta min_retry_interval;
+ // Upper bound on link-delay considered for exponential backoff.
+ const TimeDelta max_rtt;
+ // Base for the exponential backoff.
+ const double base;
+ };
+
+ void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+
+ // Removes packets from the nack list until the next keyframe. Returns true
+ // if packets were removed.
+ bool RemovePacketsUntilKeyFrame()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+ std::vector<uint16_t> GetNackBatch(NackFilterOptions options)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+
+ // Update the reordering distribution.
+ void UpdateReorderingStatistics(uint16_t seq_num)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+
+ // Returns how many packets we have to wait in order to receive the packet
+ // with probability |probabilty| or higher.
+ int WaitNumberOfPackets(float probability) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+
+ TaskQueueBase* const worker_thread_;
+
+ // Used to regularly call SendNack if needed.
+ RepeatingTaskHandle repeating_task_ RTC_GUARDED_BY(worker_thread_);
+ const TimeDelta update_interval_;
+
+ Clock* const clock_;
+ NackSender* const nack_sender_;
+ KeyFrameRequestSender* const keyframe_request_sender_;
+
+ // TODO(philipel): Some of the variables below are consistently used on a
+ // known thread (e.g. see |initialized_|). Those probably do not need
+ // synchronized access.
+ std::map<uint16_t, NackInfo, DescendingSeqNumComp<uint16_t>> nack_list_
+ RTC_GUARDED_BY(worker_thread_);
+ std::set<uint16_t, DescendingSeqNumComp<uint16_t>> keyframe_list_
+ RTC_GUARDED_BY(worker_thread_);
+ std::set<uint16_t, DescendingSeqNumComp<uint16_t>> recovered_list_
+ RTC_GUARDED_BY(worker_thread_);
+ video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(worker_thread_);
+ bool initialized_ RTC_GUARDED_BY(worker_thread_);
+ int64_t rtt_ms_ RTC_GUARDED_BY(worker_thread_);
+ uint16_t newest_seq_num_ RTC_GUARDED_BY(worker_thread_);
+
+ // Adds a delay before send nack on packet received.
+ const int64_t send_nack_delay_ms_;
+
+ const absl::optional<BackoffSettings> backoff_settings_;
+
+ // Used to signal destruction to potentially pending tasks.
+ ScopedTaskSafety task_safety_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_NACK_MODULE2_H_
diff --git a/modules/video_coding/nack_module2_unittest.cc b/modules/video_coding/nack_module2_unittest.cc
new file mode 100644
index 0000000000..acd1eead01
--- /dev/null
+++ b/modules/video_coding/nack_module2_unittest.cc
@@ -0,0 +1,411 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/nack_module2.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <cstring>
+#include <memory>
+
+#include "system_wrappers/include/clock.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/run_loop.h"
+
+namespace webrtc {
+// TODO(bugs.webrtc.org/11594): Use the use the GlobalSimulatedTimeController
+// instead of RunLoop. At the moment we mix use of the Clock and the underlying
+// implementation of RunLoop, which is realtime.
+class TestNackModule2 : public ::testing::TestWithParam<bool>,
+ public NackSender,
+ public KeyFrameRequestSender {
+ protected:
+ TestNackModule2()
+ : clock_(new SimulatedClock(0)),
+ field_trial_(GetParam()
+ ? "WebRTC-ExponentialNackBackoff/enabled:true/"
+ : "WebRTC-ExponentialNackBackoff/enabled:false/"),
+ keyframes_requested_(0) {}
+
+ void SetUp() override {}
+
+ void SendNack(const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed) override {
+ sent_nacks_.insert(sent_nacks_.end(), sequence_numbers.begin(),
+ sequence_numbers.end());
+ if (waiting_for_send_nack_) {
+ waiting_for_send_nack_ = false;
+ loop_.Quit();
+ }
+ }
+
+ void RequestKeyFrame() override { ++keyframes_requested_; }
+
+ void Flush() {
+ // nack_module.Process();
+ loop_.Flush();
+ }
+
+ bool WaitForSendNack() {
+ if (timed_out_) {
+ RTC_NOTREACHED();
+ return false;
+ }
+
+ RTC_DCHECK(!waiting_for_send_nack_);
+
+ waiting_for_send_nack_ = true;
+ loop_.PostDelayedTask(
+ [this]() {
+ timed_out_ = true;
+ loop_.Quit();
+ },
+ 1000);
+
+ loop_.Run();
+
+ if (timed_out_)
+ return false;
+
+ RTC_DCHECK(!waiting_for_send_nack_);
+ return true;
+ }
+
+ NackModule2& CreateNackModule(
+ TimeDelta interval = NackModule2::kUpdateInterval) {
+ RTC_DCHECK(!nack_module_.get());
+ nack_module_ = std::make_unique<NackModule2>(
+ TaskQueueBase::Current(), clock_.get(), this, this, interval);
+ nack_module_->UpdateRtt(kDefaultRttMs);
+ return *nack_module_.get();
+ }
+
+ static constexpr int64_t kDefaultRttMs = 20;
+ test::RunLoop loop_;
+ std::unique_ptr<SimulatedClock> clock_;
+ test::ScopedFieldTrials field_trial_;
+ std::unique_ptr<NackModule2> nack_module_;
+ std::vector<uint16_t> sent_nacks_;
+ int keyframes_requested_;
+ bool waiting_for_send_nack_ = false;
+ bool timed_out_ = false;
+};
+
+TEST_P(TestNackModule2, NackOnePacket) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(1, false, false);
+ nack_module.OnReceivedPacket(3, false, false);
+ ASSERT_EQ(1u, sent_nacks_.size());
+ EXPECT_EQ(2, sent_nacks_[0]);
+}
+
+TEST_P(TestNackModule2, WrappingSeqNum) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0xfffe, false, false);
+ nack_module.OnReceivedPacket(1, false, false);
+ ASSERT_EQ(2u, sent_nacks_.size());
+ EXPECT_EQ(0xffff, sent_nacks_[0]);
+ EXPECT_EQ(0, sent_nacks_[1]);
+}
+
+TEST_P(TestNackModule2, WrappingSeqNumClearToKeyframe) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(10));
+ nack_module.OnReceivedPacket(0xfffe, false, false);
+ nack_module.OnReceivedPacket(1, false, false);
+ ASSERT_EQ(2u, sent_nacks_.size());
+ EXPECT_EQ(0xffff, sent_nacks_[0]);
+ EXPECT_EQ(0, sent_nacks_[1]);
+
+ sent_nacks_.clear();
+ nack_module.OnReceivedPacket(2, true, false);
+ ASSERT_EQ(0u, sent_nacks_.size());
+
+ nack_module.OnReceivedPacket(501, true, false);
+ ASSERT_EQ(498u, sent_nacks_.size());
+ for (int seq_num = 3; seq_num < 501; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 3]);
+
+ sent_nacks_.clear();
+ nack_module.OnReceivedPacket(1001, false, false);
+ EXPECT_EQ(499u, sent_nacks_.size());
+ for (int seq_num = 502; seq_num < 1001; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 502]);
+
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ ASSERT_TRUE(WaitForSendNack());
+ ASSERT_EQ(999u, sent_nacks_.size());
+ EXPECT_EQ(0xffff, sent_nacks_[0]);
+ EXPECT_EQ(0, sent_nacks_[1]);
+ for (int seq_num = 3; seq_num < 501; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 1]);
+ for (int seq_num = 502; seq_num < 1001; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 2]);
+
+ // Adding packet 1004 will cause the nack list to reach it's max limit.
+ // It will then clear all nacks up to the next keyframe (seq num 2),
+ // thus removing 0xffff and 0 from the nack list.
+ sent_nacks_.clear();
+ nack_module.OnReceivedPacket(1004, false, false);
+ ASSERT_EQ(2u, sent_nacks_.size());
+ EXPECT_EQ(1002, sent_nacks_[0]);
+ EXPECT_EQ(1003, sent_nacks_[1]);
+
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ ASSERT_TRUE(WaitForSendNack());
+ ASSERT_EQ(999u, sent_nacks_.size());
+ for (int seq_num = 3; seq_num < 501; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 3]);
+ for (int seq_num = 502; seq_num < 1001; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 4]);
+
+ // Adding packet 1007 will cause the nack module to overflow again, thus
+ // clearing everything up to 501 which is the next keyframe.
+ nack_module.OnReceivedPacket(1007, false, false);
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ ASSERT_TRUE(WaitForSendNack());
+ ASSERT_EQ(503u, sent_nacks_.size());
+ for (int seq_num = 502; seq_num < 1001; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 502]);
+ EXPECT_EQ(1005, sent_nacks_[501]);
+ EXPECT_EQ(1006, sent_nacks_[502]);
+}
+
+TEST_P(TestNackModule2, ResendNack) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1));
+ nack_module.OnReceivedPacket(1, false, false);
+ nack_module.OnReceivedPacket(3, false, false);
+ size_t expected_nacks_sent = 1;
+ ASSERT_EQ(expected_nacks_sent, sent_nacks_.size());
+ EXPECT_EQ(2, sent_nacks_[0]);
+
+ if (GetParam()) {
+ // Retry has to wait at least 5ms by default.
+ nack_module.UpdateRtt(1);
+ clock_->AdvanceTimeMilliseconds(4);
+ Flush(); // Too early.
+ EXPECT_EQ(expected_nacks_sent, sent_nacks_.size());
+
+ clock_->AdvanceTimeMilliseconds(1);
+ WaitForSendNack(); // Now allowed.
+ EXPECT_EQ(++expected_nacks_sent, sent_nacks_.size());
+ } else {
+ nack_module.UpdateRtt(1);
+ clock_->AdvanceTimeMilliseconds(1);
+ WaitForSendNack(); // Fast retransmit allowed.
+ EXPECT_EQ(++expected_nacks_sent, sent_nacks_.size());
+ }
+
+ // N:th try has to wait b^(N-1) * rtt by default.
+ const double b = GetParam() ? 1.25 : 1.0;
+ for (int i = 2; i < 10; ++i) {
+ // Change RTT, above the 40ms max for exponential backoff.
+ TimeDelta rtt = TimeDelta::Millis(160); // + (i * 10 - 40)
+ nack_module.UpdateRtt(rtt.ms());
+
+ // RTT gets capped at 160ms in backoff calculations.
+ TimeDelta expected_backoff_delay =
+ std::pow(b, i - 1) * std::min(rtt, TimeDelta::Millis(160));
+
+ // Move to one millisecond before next allowed NACK.
+ clock_->AdvanceTimeMilliseconds(expected_backoff_delay.ms() - 1);
+ Flush();
+ EXPECT_EQ(expected_nacks_sent, sent_nacks_.size());
+
+ // Move to one millisecond after next allowed NACK.
+ // After rather than on to avoid rounding errors.
+ clock_->AdvanceTimeMilliseconds(2);
+ WaitForSendNack(); // Now allowed.
+ EXPECT_EQ(++expected_nacks_sent, sent_nacks_.size());
+ }
+
+ // Giving up after 10 tries.
+ clock_->AdvanceTimeMilliseconds(3000);
+ Flush();
+ EXPECT_EQ(expected_nacks_sent, sent_nacks_.size());
+}
+
+TEST_P(TestNackModule2, ResendPacketMaxRetries) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1));
+ nack_module.OnReceivedPacket(1, false, false);
+ nack_module.OnReceivedPacket(3, false, false);
+ ASSERT_EQ(1u, sent_nacks_.size());
+ EXPECT_EQ(2, sent_nacks_[0]);
+
+ int backoff_factor = 1;
+ for (size_t retries = 1; retries < 10; ++retries) {
+ // Exponential backoff, so that we don't reject NACK because of time.
+ clock_->AdvanceTimeMilliseconds(backoff_factor * kDefaultRttMs);
+ backoff_factor *= 2;
+ WaitForSendNack();
+ EXPECT_EQ(retries + 1, sent_nacks_.size());
+ }
+
+ clock_->AdvanceTimeMilliseconds(backoff_factor * kDefaultRttMs);
+ Flush();
+ EXPECT_EQ(10u, sent_nacks_.size());
+}
+
+TEST_P(TestNackModule2, TooLargeNackList) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0, false, false);
+ nack_module.OnReceivedPacket(1001, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(0, keyframes_requested_);
+ nack_module.OnReceivedPacket(1003, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(1, keyframes_requested_);
+ nack_module.OnReceivedPacket(1004, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(1, keyframes_requested_);
+}
+
+TEST_P(TestNackModule2, TooLargeNackListWithKeyFrame) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0, false, false);
+ nack_module.OnReceivedPacket(1, true, false);
+ nack_module.OnReceivedPacket(1001, false, false);
+ EXPECT_EQ(999u, sent_nacks_.size());
+ EXPECT_EQ(0, keyframes_requested_);
+ nack_module.OnReceivedPacket(1003, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(0, keyframes_requested_);
+ nack_module.OnReceivedPacket(1005, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(1, keyframes_requested_);
+}
+
+TEST_P(TestNackModule2, ClearUpTo) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1));
+ nack_module.OnReceivedPacket(0, false, false);
+ nack_module.OnReceivedPacket(100, false, false);
+ EXPECT_EQ(99u, sent_nacks_.size());
+
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ nack_module.ClearUpTo(50);
+ WaitForSendNack();
+ ASSERT_EQ(50u, sent_nacks_.size());
+ EXPECT_EQ(50, sent_nacks_[0]);
+}
+
+TEST_P(TestNackModule2, ClearUpToWrap) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0xfff0, false, false);
+ nack_module.OnReceivedPacket(0xf, false, false);
+ EXPECT_EQ(30u, sent_nacks_.size());
+
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ nack_module.ClearUpTo(0);
+ WaitForSendNack();
+ ASSERT_EQ(15u, sent_nacks_.size());
+ EXPECT_EQ(0, sent_nacks_[0]);
+}
+
+TEST_P(TestNackModule2, PacketNackCount) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1));
+ EXPECT_EQ(0, nack_module.OnReceivedPacket(0, false, false));
+ EXPECT_EQ(0, nack_module.OnReceivedPacket(2, false, false));
+ EXPECT_EQ(1, nack_module.OnReceivedPacket(1, false, false));
+
+ sent_nacks_.clear();
+ nack_module.UpdateRtt(100);
+ EXPECT_EQ(0, nack_module.OnReceivedPacket(5, false, false));
+ clock_->AdvanceTimeMilliseconds(100);
+ WaitForSendNack();
+ EXPECT_EQ(4u, sent_nacks_.size());
+
+ clock_->AdvanceTimeMilliseconds(125);
+ WaitForSendNack();
+
+ EXPECT_EQ(6u, sent_nacks_.size());
+
+ EXPECT_EQ(3, nack_module.OnReceivedPacket(3, false, false));
+ EXPECT_EQ(3, nack_module.OnReceivedPacket(4, false, false));
+ EXPECT_EQ(0, nack_module.OnReceivedPacket(4, false, false));
+}
+
+TEST_P(TestNackModule2, NackListFullAndNoOverlapWithKeyframes) {
+ NackModule2& nack_module = CreateNackModule();
+ const int kMaxNackPackets = 1000;
+ const unsigned int kFirstGap = kMaxNackPackets - 20;
+ const unsigned int kSecondGap = 200;
+ uint16_t seq_num = 0;
+ nack_module.OnReceivedPacket(seq_num++, true, false);
+ seq_num += kFirstGap;
+ nack_module.OnReceivedPacket(seq_num++, true, false);
+ EXPECT_EQ(kFirstGap, sent_nacks_.size());
+ sent_nacks_.clear();
+ seq_num += kSecondGap;
+ nack_module.OnReceivedPacket(seq_num, true, false);
+ EXPECT_EQ(kSecondGap, sent_nacks_.size());
+}
+
+TEST_P(TestNackModule2, HandleFecRecoveredPacket) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(1, false, false);
+ nack_module.OnReceivedPacket(4, false, true);
+ EXPECT_EQ(0u, sent_nacks_.size());
+ nack_module.OnReceivedPacket(5, false, false);
+ EXPECT_EQ(2u, sent_nacks_.size());
+}
+
+TEST_P(TestNackModule2, SendNackWithoutDelay) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0, false, false);
+ nack_module.OnReceivedPacket(100, false, false);
+ EXPECT_EQ(99u, sent_nacks_.size());
+}
+
+INSTANTIATE_TEST_SUITE_P(WithAndWithoutBackoff,
+ TestNackModule2,
+ ::testing::Values(true, false));
+
+class TestNackModule2WithFieldTrial : public ::testing::Test,
+ public NackSender,
+ public KeyFrameRequestSender {
+ protected:
+ TestNackModule2WithFieldTrial()
+ : nack_delay_field_trial_("WebRTC-SendNackDelayMs/10/"),
+ clock_(new SimulatedClock(0)),
+ nack_module_(TaskQueueBase::Current(), clock_.get(), this, this),
+ keyframes_requested_(0) {}
+
+ void SendNack(const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed) override {
+ sent_nacks_.insert(sent_nacks_.end(), sequence_numbers.begin(),
+ sequence_numbers.end());
+ }
+
+ void RequestKeyFrame() override { ++keyframes_requested_; }
+
+ test::ScopedFieldTrials nack_delay_field_trial_;
+ std::unique_ptr<SimulatedClock> clock_;
+ NackModule2 nack_module_;
+ std::vector<uint16_t> sent_nacks_;
+ int keyframes_requested_;
+};
+
+TEST_F(TestNackModule2WithFieldTrial, SendNackWithDelay) {
+ nack_module_.OnReceivedPacket(0, false, false);
+ nack_module_.OnReceivedPacket(100, false, false);
+ EXPECT_EQ(0u, sent_nacks_.size());
+ clock_->AdvanceTimeMilliseconds(10);
+ nack_module_.OnReceivedPacket(106, false, false);
+ EXPECT_EQ(99u, sent_nacks_.size());
+ clock_->AdvanceTimeMilliseconds(10);
+ nack_module_.OnReceivedPacket(109, false, false);
+ EXPECT_EQ(104u, sent_nacks_.size());
+}
+} // namespace webrtc
diff --git a/modules/video_coding/nack_module_unittest.cc b/modules/video_coding/nack_module_unittest.cc
index ab1c76f1b5..f91eb750f0 100644
--- a/modules/video_coding/nack_module_unittest.cc
+++ b/modules/video_coding/nack_module_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "modules/video_coding/nack_module.h"
+#include "modules/video_coding/deprecated/nack_module.h"
#include <algorithm>
#include <cstdint>
@@ -45,7 +45,7 @@ class TestNackModule : public ::testing::TestWithParam<bool>,
static constexpr int64_t kDefaultRttMs = 20;
std::unique_ptr<SimulatedClock> clock_;
test::ScopedFieldTrials field_trial_;
- NackModule nack_module_;
+ DEPRECATED_NackModule nack_module_;
std::vector<uint16_t> sent_nacks_;
int keyframes_requested_;
};
@@ -352,7 +352,7 @@ class TestNackModuleWithFieldTrial : public ::testing::Test,
test::ScopedFieldTrials nack_delay_field_trial_;
std::unique_ptr<SimulatedClock> clock_;
- NackModule nack_module_;
+ DEPRECATED_NackModule nack_module_;
std::vector<uint16_t> sent_nacks_;
int keyframes_requested_;
};
diff --git a/modules/video_coding/packet_buffer.cc b/modules/video_coding/packet_buffer.cc
index 5db3c0f670..7da8a1c301 100644
--- a/modules/video_coding/packet_buffer.cc
+++ b/modules/video_coding/packet_buffer.cc
@@ -78,7 +78,7 @@ PacketBuffer::~PacketBuffer() {
PacketBuffer::InsertResult PacketBuffer::InsertPacket(
std::unique_ptr<PacketBuffer::Packet> packet) {
PacketBuffer::InsertResult result;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
uint16_t seq_num = packet->seq_num;
size_t index = seq_num % buffer_.size();
@@ -112,7 +112,7 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket(
// Clear the buffer, delete payload, and return false to signal that a
// new keyframe is needed.
RTC_LOG(LS_WARNING) << "Clear PacketBuffer and request key frame.";
- Clear();
+ ClearInternal();
result.buffer_cleared = true;
return result;
}
@@ -136,7 +136,7 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket(
}
void PacketBuffer::ClearTo(uint16_t seq_num) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// We have already cleared past this sequence number, no need to do anything.
if (is_cleared_to_first_seq_num_ &&
AheadOf<uint16_t>(first_seq_num_, seq_num)) {
@@ -173,37 +173,41 @@ void PacketBuffer::ClearTo(uint16_t seq_num) {
}
void PacketBuffer::Clear() {
- rtc::CritScope lock(&crit_);
- for (auto& entry : buffer_) {
- entry = nullptr;
- }
-
- first_packet_received_ = false;
- is_cleared_to_first_seq_num_ = false;
- last_received_packet_ms_.reset();
- last_received_keyframe_packet_ms_.reset();
- newest_inserted_seq_num_.reset();
- missing_packets_.clear();
+ MutexLock lock(&mutex_);
+ ClearInternal();
}
PacketBuffer::InsertResult PacketBuffer::InsertPadding(uint16_t seq_num) {
PacketBuffer::InsertResult result;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
UpdateMissingPackets(seq_num);
result.packets = FindFrames(static_cast<uint16_t>(seq_num + 1));
return result;
}
absl::optional<int64_t> PacketBuffer::LastReceivedPacketMs() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return last_received_packet_ms_;
}
absl::optional<int64_t> PacketBuffer::LastReceivedKeyframePacketMs() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return last_received_keyframe_packet_ms_;
}
+void PacketBuffer::ClearInternal() {
+ for (auto& entry : buffer_) {
+ entry = nullptr;
+ }
+
+ first_packet_received_ = false;
+ is_cleared_to_first_seq_num_ = false;
+ last_received_packet_ms_.reset();
+ last_received_keyframe_packet_ms_.reset();
+ newest_inserted_seq_num_.reset();
+ missing_packets_.clear();
+}
+
bool PacketBuffer::ExpandBufferSize() {
if (buffer_.size() == max_size_) {
RTC_LOG(LS_WARNING) << "PacketBuffer is already at max size (" << max_size_
@@ -359,15 +363,10 @@ std::vector<std::unique_ptr<PacketBuffer::Packet>> PacketBuffer::FindFrames(
VideoFrameType::kVideoFrameDelta;
}
- // With IPPP, if this is not a keyframe, make sure there are no gaps
- // in the packet sequence numbers up until this point.
- const uint8_t h264tid =
- buffer_[start_index] != nullptr
- ? buffer_[start_index]->video_header.frame_marking.temporal_id
- : kNoTemporalIdx;
- if (h264tid == kNoTemporalIdx && !is_h264_keyframe &&
- missing_packets_.upper_bound(start_seq_num) !=
- missing_packets_.begin()) {
+ // If this is not a keyframe, make sure there are no gaps in the packet
+ // sequence numbers up until this point.
+ if (!is_h264_keyframe && missing_packets_.upper_bound(start_seq_num) !=
+ missing_packets_.begin()) {
return found_frames;
}
}
diff --git a/modules/video_coding/packet_buffer.h b/modules/video_coding/packet_buffer.h
index c480e37239..508fa8395f 100644
--- a/modules/video_coding/packet_buffer.h
+++ b/modules/video_coding/packet_buffer.h
@@ -22,8 +22,8 @@
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
#include "rtc_base/copy_on_write_buffer.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
@@ -82,62 +82,68 @@ class PacketBuffer {
PacketBuffer(Clock* clock, size_t start_buffer_size, size_t max_buffer_size);
~PacketBuffer();
- InsertResult InsertPacket(std::unique_ptr<Packet> packet)
- ABSL_MUST_USE_RESULT;
- InsertResult InsertPadding(uint16_t seq_num) ABSL_MUST_USE_RESULT;
- void ClearTo(uint16_t seq_num);
- void Clear();
+ InsertResult InsertPacket(std::unique_ptr<Packet> packet) ABSL_MUST_USE_RESULT
+ RTC_LOCKS_EXCLUDED(mutex_);
+ InsertResult InsertPadding(uint16_t seq_num) ABSL_MUST_USE_RESULT
+ RTC_LOCKS_EXCLUDED(mutex_);
+ void ClearTo(uint16_t seq_num) RTC_LOCKS_EXCLUDED(mutex_);
+ void Clear() RTC_LOCKS_EXCLUDED(mutex_);
// Timestamp (not RTP timestamp) of the last received packet/keyframe packet.
- absl::optional<int64_t> LastReceivedPacketMs() const;
- absl::optional<int64_t> LastReceivedKeyframePacketMs() const;
+ absl::optional<int64_t> LastReceivedPacketMs() const
+ RTC_LOCKS_EXCLUDED(mutex_);
+ absl::optional<int64_t> LastReceivedKeyframePacketMs() const
+ RTC_LOCKS_EXCLUDED(mutex_);
private:
Clock* const clock_;
+ // Clears with |mutex_| taken.
+ void ClearInternal() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
// Tries to expand the buffer.
- bool ExpandBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ bool ExpandBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Test if all previous packets has arrived for the given sequence number.
bool PotentialNewFrame(uint16_t seq_num) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Test if all packets of a frame has arrived, and if so, returns packets to
// create frames.
std::vector<std::unique_ptr<Packet>> FindFrames(uint16_t seq_num)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void UpdateMissingPackets(uint16_t seq_num)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
// buffer_.size() and max_size_ must always be a power of two.
const size_t max_size_;
// The fist sequence number currently in the buffer.
- uint16_t first_seq_num_ RTC_GUARDED_BY(crit_);
+ uint16_t first_seq_num_ RTC_GUARDED_BY(mutex_);
// If the packet buffer has received its first packet.
- bool first_packet_received_ RTC_GUARDED_BY(crit_);
+ bool first_packet_received_ RTC_GUARDED_BY(mutex_);
// If the buffer is cleared to |first_seq_num_|.
- bool is_cleared_to_first_seq_num_ RTC_GUARDED_BY(crit_);
+ bool is_cleared_to_first_seq_num_ RTC_GUARDED_BY(mutex_);
// Buffer that holds the the inserted packets and information needed to
// determine continuity between them.
- std::vector<std::unique_ptr<Packet>> buffer_ RTC_GUARDED_BY(crit_);
+ std::vector<std::unique_ptr<Packet>> buffer_ RTC_GUARDED_BY(mutex_);
// Timestamp of the last received packet/keyframe packet.
- absl::optional<int64_t> last_received_packet_ms_ RTC_GUARDED_BY(crit_);
+ absl::optional<int64_t> last_received_packet_ms_ RTC_GUARDED_BY(mutex_);
absl::optional<int64_t> last_received_keyframe_packet_ms_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
absl::optional<uint32_t> last_received_keyframe_rtp_timestamp_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
- absl::optional<uint16_t> newest_inserted_seq_num_ RTC_GUARDED_BY(crit_);
+ absl::optional<uint16_t> newest_inserted_seq_num_ RTC_GUARDED_BY(mutex_);
std::set<uint16_t, DescendingSeqNumComp<uint16_t>> missing_packets_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
// Indicates if we should require SPS, PPS, and IDR for a particular
// RTP timestamp to treat the corresponding frame as a keyframe.
diff --git a/modules/video_coding/receiver.h b/modules/video_coding/receiver.h
index 64a157f05e..8f6b041a5a 100644
--- a/modules/video_coding/receiver.h
+++ b/modules/video_coding/receiver.h
@@ -20,7 +20,6 @@
#include "modules/video_coding/jitter_buffer.h"
#include "modules/video_coding/packet.h"
#include "modules/video_coding/timing.h"
-#include "rtc_base/critical_section.h"
namespace webrtc {
diff --git a/modules/video_coding/rtp_frame_reference_finder.cc b/modules/video_coding/rtp_frame_reference_finder.cc
index bdef991b8b..2a43c275d6 100644
--- a/modules/video_coding/rtp_frame_reference_finder.cc
+++ b/modules/video_coding/rtp_frame_reference_finder.cc
@@ -108,8 +108,6 @@ RtpFrameReferenceFinder::ManageFrameInternal(RtpFrameObject* frame) {
return ManageFrameVp8(frame);
case kVideoCodecVP9:
return ManageFrameVp9(frame);
- case kVideoCodecH264:
- return ManageFrameH264(frame);
case kVideoCodecGeneric:
if (auto* generic_header = absl::get_if<RTPVideoHeaderLegacyGeneric>(
&frame->GetRtpVideoHeader().video_type_header)) {
@@ -715,130 +713,6 @@ void RtpFrameReferenceFinder::UnwrapPictureIds(RtpFrameObject* frame) {
frame->id.picture_id = unwrapper_.Unwrap(frame->id.picture_id);
}
-RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameH264(
- RtpFrameObject* frame) {
- const FrameMarking& rtp_frame_marking = frame->GetFrameMarking();
-
- uint8_t tid = rtp_frame_marking.temporal_id;
- bool blSync = rtp_frame_marking.base_layer_sync;
-
- if (tid == kNoTemporalIdx)
- return ManageFramePidOrSeqNum(std::move(frame), kNoPictureId);
-
- // Protect against corrupted packets with arbitrary large temporal idx.
- if (tid >= kMaxTemporalLayers)
- return kDrop;
-
- frame->id.picture_id = frame->last_seq_num();
-
- if (frame->frame_type() == VideoFrameType::kVideoFrameKey) {
- // For H264, use last_seq_num_gop_ to simply store last picture id
- // as a pair of unpadded and padded sequence numbers.
- if (last_seq_num_gop_.empty()) {
- last_seq_num_gop_.insert(std::make_pair(
- 0, std::make_pair(frame->id.picture_id, frame->id.picture_id)));
- }
- }
-
- // Stash if we have no keyframe yet.
- if (last_seq_num_gop_.empty())
- return kStash;
-
- // Check for gap in sequence numbers. Store in |not_yet_received_seq_num_|.
- if (frame->frame_type() == VideoFrameType::kVideoFrameDelta) {
- uint16_t last_pic_id_padded = last_seq_num_gop_.begin()->second.second;
- if (AheadOf<uint16_t>(frame->id.picture_id, last_pic_id_padded)) {
- do {
- last_pic_id_padded = last_pic_id_padded + 1;
- not_yet_received_seq_num_.insert(last_pic_id_padded);
- } while (last_pic_id_padded != frame->id.picture_id);
- }
- }
-
- int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(rtp_frame_marking.tl0_pic_idx);
-
- // Clean up info for base layers that are too old.
- int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxLayerInfo;
- auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx);
- layer_info_.erase(layer_info_.begin(), clean_layer_info_to);
-
- // Clean up info about not yet received frames that are too old.
- uint16_t old_picture_id = frame->id.picture_id - kMaxNotYetReceivedFrames * 2;
- auto clean_frames_to = not_yet_received_seq_num_.lower_bound(old_picture_id);
- not_yet_received_seq_num_.erase(not_yet_received_seq_num_.begin(),
- clean_frames_to);
-
- if (frame->frame_type() == VideoFrameType::kVideoFrameKey) {
- frame->num_references = 0;
- layer_info_[unwrapped_tl0].fill(-1);
- UpdateDataH264(frame, unwrapped_tl0, tid);
- return kHandOff;
- }
-
- auto layer_info_it =
- layer_info_.find(tid == 0 ? unwrapped_tl0 - 1 : unwrapped_tl0);
-
- // Stash if we have no base layer frame yet.
- if (layer_info_it == layer_info_.end())
- return kStash;
-
- // Base layer frame. Copy layer info from previous base layer frame.
- if (tid == 0) {
- layer_info_it =
- layer_info_.insert(std::make_pair(unwrapped_tl0, layer_info_it->second))
- .first;
- frame->num_references = 1;
- frame->references[0] = layer_info_it->second[0];
- UpdateDataH264(frame, unwrapped_tl0, tid);
- return kHandOff;
- }
-
- // This frame only references its base layer frame.
- if (blSync) {
- frame->num_references = 1;
- frame->references[0] = layer_info_it->second[0];
- UpdateDataH264(frame, unwrapped_tl0, tid);
- return kHandOff;
- }
-
- // Find all references for general frame.
- frame->num_references = 0;
- for (uint8_t layer = 0; layer <= tid; ++layer) {
- // Stash if we have not yet received frames on this temporal layer.
- if (layer_info_it->second[layer] == -1)
- return kStash;
-
- // Drop if the last frame on this layer is ahead of this frame. A layer sync
- // frame was received after this frame for the same base layer frame.
- uint16_t last_frame_in_layer = layer_info_it->second[layer];
- if (AheadOf<uint16_t>(last_frame_in_layer, frame->id.picture_id))
- return kDrop;
-
- // Stash and wait for missing frame between this frame and the reference
- auto not_received_seq_num_it =
- not_yet_received_seq_num_.upper_bound(last_frame_in_layer);
- if (not_received_seq_num_it != not_yet_received_seq_num_.end() &&
- AheadOf<uint16_t>(frame->id.picture_id, *not_received_seq_num_it)) {
- return kStash;
- }
-
- if (!(AheadOf<uint16_t>(frame->id.picture_id, last_frame_in_layer))) {
- RTC_LOG(LS_WARNING) << "Frame with picture id " << frame->id.picture_id
- << " and packet range [" << frame->first_seq_num()
- << ", " << frame->last_seq_num()
- << "] already received, "
- " dropping frame.";
- return kDrop;
- }
-
- ++frame->num_references;
- frame->references[layer] = last_frame_in_layer;
- }
-
- UpdateDataH264(frame, unwrapped_tl0, tid);
- return kHandOff;
-}
-
void RtpFrameReferenceFinder::UpdateLastPictureIdWithPaddingH264() {
auto seq_num_it = last_seq_num_gop_.begin();
diff --git a/modules/video_coding/rtp_frame_reference_finder.h b/modules/video_coding/rtp_frame_reference_finder.h
index d9c7c72d1e..ed67b91fed 100644
--- a/modules/video_coding/rtp_frame_reference_finder.h
+++ b/modules/video_coding/rtp_frame_reference_finder.h
@@ -21,7 +21,6 @@
#include "modules/include/module_common_types_public.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/numerics/sequence_number_util.h"
#include "rtc_base/thread_annotations.h"
diff --git a/modules/video_coding/rtp_frame_reference_finder_unittest.cc b/modules/video_coding/rtp_frame_reference_finder_unittest.cc
index 9ded6bcb96..0c08ddd302 100644
--- a/modules/video_coding/rtp_frame_reference_finder_unittest.cc
+++ b/modules/video_coding/rtp_frame_reference_finder_unittest.cc
@@ -32,13 +32,11 @@ std::unique_ptr<RtpFrameObject> CreateFrame(
uint16_t seq_num_end,
bool keyframe,
VideoCodecType codec,
- const RTPVideoTypeHeader& video_type_header,
- const FrameMarking& frame_markings) {
+ const RTPVideoTypeHeader& video_type_header) {
RTPVideoHeader video_header;
video_header.frame_type = keyframe ? VideoFrameType::kVideoFrameKey
: VideoFrameType::kVideoFrameDelta;
video_header.video_type_header = video_type_header;
- video_header.frame_marking = frame_markings;
// clang-format off
return std::make_unique<RtpFrameObject>(
@@ -92,7 +90,7 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
bool keyframe) {
std::unique_ptr<RtpFrameObject> frame =
CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecGeneric,
- RTPVideoTypeHeader(), FrameMarking());
+ RTPVideoTypeHeader());
reference_finder_->ManageFrame(std::move(frame));
}
@@ -110,9 +108,8 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
vp8_header.tl0PicIdx = tl0;
vp8_header.layerSync = sync;
- std::unique_ptr<RtpFrameObject> frame =
- CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecVP8,
- vp8_header, FrameMarking());
+ std::unique_ptr<RtpFrameObject> frame = CreateFrame(
+ seq_num_start, seq_num_end, keyframe, kVideoCodecVP8, vp8_header);
reference_finder_->ManageFrame(std::move(frame));
}
@@ -140,9 +137,8 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
vp9_header.gof = *ss;
}
- std::unique_ptr<RtpFrameObject> frame =
- CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecVP9,
- vp9_header, FrameMarking());
+ std::unique_ptr<RtpFrameObject> frame = CreateFrame(
+ seq_num_start, seq_num_end, keyframe, kVideoCodecVP9, vp9_header);
reference_finder_->ManageFrame(std::move(frame));
}
@@ -166,26 +162,15 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
for (size_t i = 0; i < refs.size(); ++i)
vp9_header.pid_diff[i] = refs[i];
- std::unique_ptr<RtpFrameObject> frame =
- CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecVP9,
- vp9_header, FrameMarking());
+ std::unique_ptr<RtpFrameObject> frame = CreateFrame(
+ seq_num_start, seq_num_end, keyframe, kVideoCodecVP9, vp9_header);
reference_finder_->ManageFrame(std::move(frame));
}
- void InsertH264(uint16_t seq_num_start,
- uint16_t seq_num_end,
- bool keyframe,
- uint8_t tid = kNoTemporalIdx,
- int32_t tl0 = kNoTl0PicIdx,
- bool sync = false) {
- FrameMarking frame_marking{};
- frame_marking.temporal_id = tid;
- frame_marking.tl0_pic_idx = tl0;
- frame_marking.base_layer_sync = sync;
-
+ void InsertH264(uint16_t seq_num_start, uint16_t seq_num_end, bool keyframe) {
std::unique_ptr<RtpFrameObject> frame =
CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecH264,
- RTPVideoTypeHeader(), frame_marking);
+ RTPVideoTypeHeader());
reference_finder_->ManageFrame(std::move(frame));
}
@@ -1440,53 +1425,46 @@ TEST_F(TestRtpFrameReferenceFinder, H264KeyFrameReferences) {
CheckReferencesH264(sn);
}
-// Test with 1 temporal layer.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayers_0) {
- uint16_t sn = Rand();
+TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrap) {
+ uint16_t sn = 0xFFFF;
- InsertH264(sn, sn, true, 0, 1);
- InsertH264(sn + 1, sn + 1, false, 0, 2);
- InsertH264(sn + 2, sn + 2, false, 0, 3);
- InsertH264(sn + 3, sn + 3, false, 0, 4);
+ InsertH264(sn - 1, sn - 1, true);
+ InsertH264(sn, sn, false);
+ InsertH264(sn + 1, sn + 1, false);
+ InsertH264(sn + 2, sn + 2, false);
ASSERT_EQ(4UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
+ CheckReferencesH264(sn - 1);
+ CheckReferencesH264(sn, sn - 1);
CheckReferencesH264(sn + 1, sn);
CheckReferencesH264(sn + 2, sn + 1);
- CheckReferencesH264(sn + 3, sn + 2);
}
-TEST_F(TestRtpFrameReferenceFinder, H264DuplicateTl1Frames) {
+TEST_F(TestRtpFrameReferenceFinder, H264Frames) {
uint16_t sn = Rand();
- InsertH264(sn, sn, true, 0, 0);
- InsertH264(sn + 1, sn + 1, false, 1, 0, true);
- InsertH264(sn + 2, sn + 2, false, 0, 1);
- InsertH264(sn + 3, sn + 3, false, 1, 1);
- InsertH264(sn + 3, sn + 3, false, 1, 1);
- InsertH264(sn + 4, sn + 4, false, 0, 2);
- InsertH264(sn + 5, sn + 5, false, 1, 2);
+ InsertH264(sn, sn, true);
+ InsertH264(sn + 1, sn + 1, false);
+ InsertH264(sn + 2, sn + 2, false);
+ InsertH264(sn + 3, sn + 3, false);
- ASSERT_EQ(6UL, frames_from_callback_.size());
+ ASSERT_EQ(4UL, frames_from_callback_.size());
CheckReferencesH264(sn);
CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn + 1, sn + 2);
- CheckReferencesH264(sn + 4, sn + 2);
- CheckReferencesH264(sn + 5, sn + 3, sn + 4);
+ CheckReferencesH264(sn + 2, sn + 1);
+ CheckReferencesH264(sn + 3, sn + 2);
}
-// Test with 1 temporal layer.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_0) {
+TEST_F(TestRtpFrameReferenceFinder, H264Reordering) {
uint16_t sn = Rand();
- InsertH264(sn, sn, true, 0, 1);
- InsertH264(sn + 1, sn + 1, false, 0, 2);
- InsertH264(sn + 3, sn + 3, false, 0, 4);
- InsertH264(sn + 2, sn + 2, false, 0, 3);
- InsertH264(sn + 5, sn + 5, false, 0, 6);
- InsertH264(sn + 6, sn + 6, false, 0, 7);
- InsertH264(sn + 4, sn + 4, false, 0, 5);
+ InsertH264(sn, sn, true);
+ InsertH264(sn + 1, sn + 1, false);
+ InsertH264(sn + 3, sn + 3, false);
+ InsertH264(sn + 2, sn + 2, false);
+ InsertH264(sn + 5, sn + 5, false);
+ InsertH264(sn + 6, sn + 6, false);
+ InsertH264(sn + 4, sn + 4, false);
ASSERT_EQ(7UL, frames_from_callback_.size());
CheckReferencesH264(sn);
@@ -1498,258 +1476,13 @@ TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_0) {
CheckReferencesH264(sn + 6, sn + 5);
}
-// Test with 2 temporal layers in a 01 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayers_01) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn, true, 0, 255);
- InsertH264(sn + 1, sn + 1, false, 1, 255, true);
- InsertH264(sn + 2, sn + 2, false, 0, 0);
- InsertH264(sn + 3, sn + 3, false, 1, 0);
-
- ASSERT_EQ(4UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn + 1, sn + 2);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersMultiSn_01) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn + 3, true, 0, 255);
- InsertH264(sn + 4, sn + 5, false, 1, 255, true);
- InsertH264(sn + 6, sn + 8, false, 0, 0);
- InsertH264(sn + 9, sn + 9, false, 1, 0);
-
- ASSERT_EQ(4UL, frames_from_callback_.size());
- CheckReferencesH264(sn + 3);
- CheckReferencesH264(sn + 5, sn + 3);
- CheckReferencesH264(sn + 8, sn + 3);
- CheckReferencesH264(sn + 9, sn + 5, sn + 8);
-}
-
-// Test with 2 temporal layers in a 01 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_01) {
- uint16_t sn = Rand();
-
- InsertH264(sn + 1, sn + 1, false, 1, 255, true);
- InsertH264(sn, sn, true, 0, 255);
- InsertH264(sn + 3, sn + 3, false, 1, 0);
- InsertH264(sn + 5, sn + 5, false, 1, 1);
- InsertH264(sn + 2, sn + 2, false, 0, 0);
- InsertH264(sn + 4, sn + 4, false, 0, 1);
- InsertH264(sn + 6, sn + 6, false, 0, 2);
- InsertH264(sn + 7, sn + 7, false, 1, 2);
-
- ASSERT_EQ(8UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn + 1, sn + 2);
- CheckReferencesH264(sn + 4, sn + 2);
- CheckReferencesH264(sn + 5, sn + 3, sn + 4);
- CheckReferencesH264(sn + 6, sn + 4);
- CheckReferencesH264(sn + 7, sn + 5, sn + 6);
-}
-
-// Test with 3 temporal layers in a 0212 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayers_0212) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn, true, 0, 55);
- InsertH264(sn + 1, sn + 1, false, 2, 55, true);
- InsertH264(sn + 2, sn + 2, false, 1, 55, true);
- InsertH264(sn + 3, sn + 3, false, 2, 55);
- InsertH264(sn + 4, sn + 4, false, 0, 56);
- InsertH264(sn + 5, sn + 5, false, 2, 56, true);
- InsertH264(sn + 6, sn + 6, false, 1, 56, true);
- InsertH264(sn + 7, sn + 7, false, 2, 56);
- InsertH264(sn + 8, sn + 8, false, 0, 57);
- InsertH264(sn + 9, sn + 9, false, 2, 57, true);
- InsertH264(sn + 10, sn + 10, false, 1, 57, true);
- InsertH264(sn + 11, sn + 11, false, 2, 57);
-
- ASSERT_EQ(12UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn, sn + 1, sn + 2);
- CheckReferencesH264(sn + 4, sn);
- CheckReferencesH264(sn + 5, sn + 4);
- CheckReferencesH264(sn + 6, sn + 4);
- CheckReferencesH264(sn + 7, sn + 4, sn + 5, sn + 6);
- CheckReferencesH264(sn + 8, sn + 4);
- CheckReferencesH264(sn + 9, sn + 8);
- CheckReferencesH264(sn + 10, sn + 8);
- CheckReferencesH264(sn + 11, sn + 8, sn + 9, sn + 10);
-}
-
-// Test with 3 temporal layers in a 0212 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersMissingFrame_0212) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn, true, 0, 55, false);
- InsertH264(sn + 2, sn + 2, false, 1, 55, true);
- InsertH264(sn + 3, sn + 3, false, 2, 55, false);
-
- ASSERT_EQ(2UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 2, sn);
-}
-
-// Test with 3 temporal layers in a 0212 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_0212) {
- uint16_t sn = Rand();
-
- InsertH264(sn + 1, sn + 1, false, 2, 55, true);
- InsertH264(sn, sn, true, 0, 55, false);
- InsertH264(sn + 2, sn + 2, false, 1, 55, true);
- InsertH264(sn + 4, sn + 4, false, 0, 56, false);
- InsertH264(sn + 5, sn + 5, false, 2, 56, false);
- InsertH264(sn + 3, sn + 3, false, 2, 55, false);
- InsertH264(sn + 7, sn + 7, false, 2, 56, false);
- InsertH264(sn + 9, sn + 9, false, 2, 57, true);
- InsertH264(sn + 6, sn + 6, false, 1, 56, false);
- InsertH264(sn + 8, sn + 8, false, 0, 57, false);
- InsertH264(sn + 11, sn + 11, false, 2, 57, false);
- InsertH264(sn + 10, sn + 10, false, 1, 57, true);
-
- ASSERT_EQ(12UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn, sn + 1, sn + 2);
- CheckReferencesH264(sn + 4, sn);
- CheckReferencesH264(sn + 5, sn + 2, sn + 3, sn + 4);
- CheckReferencesH264(sn + 6, sn + 2, sn + 4);
- CheckReferencesH264(sn + 7, sn + 4, sn + 5, sn + 6);
- CheckReferencesH264(sn + 8, sn + 4);
- CheckReferencesH264(sn + 9, sn + 8);
- CheckReferencesH264(sn + 10, sn + 8);
- CheckReferencesH264(sn + 11, sn + 8, sn + 9, sn + 10);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264InsertManyFrames_0212) {
- uint16_t sn = Rand();
-
- const int keyframes_to_insert = 50;
- const int frames_per_keyframe = 120; // Should be a multiple of 4.
- uint8_t tl0 = 128;
-
- for (int k = 0; k < keyframes_to_insert; ++k) {
- InsertH264(sn, sn, true, 0, tl0, false);
- InsertH264(sn + 1, sn + 1, false, 2, tl0, true);
- InsertH264(sn + 2, sn + 2, false, 1, tl0, true);
- InsertH264(sn + 3, sn + 3, false, 2, tl0, false);
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn, sn + 1, sn + 2);
- frames_from_callback_.clear();
- ++tl0;
-
- for (int f = 4; f < frames_per_keyframe; f += 4) {
- uint16_t sf = sn + f;
-
- InsertH264(sf, sf, false, 0, tl0, false);
- InsertH264(sf + 1, sf + 1, false, 2, tl0, false);
- InsertH264(sf + 2, sf + 2, false, 1, tl0, false);
- InsertH264(sf + 3, sf + 3, false, 2, tl0, false);
- CheckReferencesH264(sf, sf - 4);
- CheckReferencesH264(sf + 1, sf, sf - 1, sf - 2);
- CheckReferencesH264(sf + 2, sf, sf - 2);
- CheckReferencesH264(sf + 3, sf, sf + 1, sf + 2);
- frames_from_callback_.clear();
- ++tl0;
- }
-
- sn += frames_per_keyframe;
- }
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264LayerSync) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn, true, 0, 0, false);
- InsertH264(sn + 1, sn + 1, false, 1, 0, true);
- InsertH264(sn + 2, sn + 2, false, 0, 1, false);
- ASSERT_EQ(3UL, frames_from_callback_.size());
-
- InsertH264(sn + 4, sn + 4, false, 0, 2, false);
- InsertH264(sn + 5, sn + 5, false, 1, 2, true);
- InsertH264(sn + 6, sn + 6, false, 0, 3, false);
- InsertH264(sn + 7, sn + 7, false, 1, 3, false);
-
- ASSERT_EQ(7UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 4, sn + 2);
- CheckReferencesH264(sn + 5, sn + 4);
- CheckReferencesH264(sn + 6, sn + 4);
- CheckReferencesH264(sn + 7, sn + 6, sn + 5);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264Tl1SyncFrameAfterTl1Frame) {
- InsertH264(1000, 1000, true, 0, 247, true);
- InsertH264(1001, 1001, false, 0, 248, false);
- InsertH264(1002, 1002, false, 1, 248, false); // Will be dropped
- InsertH264(1003, 1003, false, 1, 248, true); // due to this frame.
-
- ASSERT_EQ(3UL, frames_from_callback_.size());
- CheckReferencesH264(1000);
- CheckReferencesH264(1001, 1000);
- CheckReferencesH264(1003, 1001);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264DetectMissingFrame_0212) {
- InsertH264(1, 1, true, 0, 1, false);
- InsertH264(2, 2, false, 2, 1, true);
- InsertH264(3, 3, false, 1, 1, true);
- InsertH264(4, 4, false, 2, 1, false);
-
- InsertH264(6, 6, false, 2, 2, false);
- InsertH264(7, 7, false, 1, 2, false);
- InsertH264(8, 8, false, 2, 2, false);
- ASSERT_EQ(4UL, frames_from_callback_.size());
-
- InsertH264(5, 5, false, 0, 2, false);
- ASSERT_EQ(8UL, frames_from_callback_.size());
-
- CheckReferencesH264(1);
- CheckReferencesH264(2, 1);
- CheckReferencesH264(3, 1);
- CheckReferencesH264(4, 3, 2, 1);
-
- CheckReferencesH264(5, 1);
- CheckReferencesH264(6, 5, 4, 3);
- CheckReferencesH264(7, 5, 3);
- CheckReferencesH264(8, 7, 6, 5);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrap) {
- uint16_t sn = 0xFFFF;
-
- InsertH264(sn - 1, sn - 1, true, 0, 1);
- InsertH264(sn, sn, false, 0, 2);
- InsertH264(sn + 1, sn + 1, false, 0, 3);
- InsertH264(sn + 2, sn + 2, false, 0, 4);
-
- ASSERT_EQ(4UL, frames_from_callback_.size());
- CheckReferencesH264(sn - 1);
- CheckReferencesH264(sn, sn - 1);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn + 1);
-}
-
TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrapMulti) {
uint16_t sn = 0xFFFF;
- InsertH264(sn - 3, sn - 2, true, 0, 1);
- InsertH264(sn - 1, sn + 1, false, 0, 2);
- InsertH264(sn + 2, sn + 3, false, 0, 3);
- InsertH264(sn + 4, sn + 7, false, 0, 4);
+ InsertH264(sn - 3, sn - 2, true);
+ InsertH264(sn - 1, sn + 1, false);
+ InsertH264(sn + 2, sn + 3, false);
+ InsertH264(sn + 4, sn + 7, false);
ASSERT_EQ(4UL, frames_from_callback_.size());
CheckReferencesH264(sn - 2);
@@ -1758,35 +1491,5 @@ TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrapMulti) {
CheckReferencesH264(sn + 7, sn + 3);
}
-TEST_F(TestRtpFrameReferenceFinder, H264Tl0PicIdxWrap) {
- int numTl0Wraps = 1000;
- int64_t sn = Rand();
-
- for (int i = 0; i < numTl0Wraps; i++) {
- for (int tl0 = 0; tl0 < 256; tl0 += 16, sn += 16) {
- InsertH264(sn, sn, true, 0, tl0);
- reference_finder_->ClearTo(sn); // Too many stashed frames cause errors.
-
- for (int k = 1; k < 8; k++) {
- InsertH264(sn + k, sn + k, false, 0, tl0 + k);
- }
-
- // Skip a TL0 index.
- for (int k = 9; k < 16; k++) {
- InsertH264(sn + k, sn + k, false, 0, tl0 + k);
- }
-
- ASSERT_EQ(8UL, frames_from_callback_.size());
-
- CheckReferencesH264(sn);
- for (int k = 1; k < 8; k++) {
- CheckReferencesH264(sn + k, sn + k - 1);
- }
-
- frames_from_callback_.clear();
- }
- }
-}
-
} // namespace video_coding
} // namespace webrtc
diff --git a/modules/video_coding/session_info.cc b/modules/video_coding/session_info.cc
index e51d293607..07b9a9d6b5 100644
--- a/modules/video_coding/session_info.cc
+++ b/modules/video_coding/session_info.cc
@@ -95,8 +95,6 @@ int VCMSessionInfo::TemporalId() const {
return absl::get<RTPVideoHeaderVP9>(
packets_.front().video_header.video_type_header)
.temporal_idx;
- } else if (packets_.front().video_header.codec == kVideoCodecH264) {
- return packets_.front().video_header.frame_marking.temporal_id;
} else {
return kNoTemporalIdx;
}
@@ -113,8 +111,6 @@ bool VCMSessionInfo::LayerSync() const {
return absl::get<RTPVideoHeaderVP9>(
packets_.front().video_header.video_type_header)
.temporal_up_switch;
- } else if (packets_.front().video_header.codec == kVideoCodecH264) {
- return packets_.front().video_header.frame_marking.base_layer_sync;
} else {
return false;
}
@@ -131,8 +127,6 @@ int VCMSessionInfo::Tl0PicId() const {
return absl::get<RTPVideoHeaderVP9>(
packets_.front().video_header.video_type_header)
.tl0_pic_idx;
- } else if (packets_.front().video_header.codec == kVideoCodecH264) {
- return packets_.front().video_header.frame_marking.tl0_pic_idx;
} else {
return kNoTl0PicIdx;
}
diff --git a/modules/video_coding/timing.cc b/modules/video_coding/timing.cc
index c62c848c09..f046edf497 100644
--- a/modules/video_coding/timing.cc
+++ b/modules/video_coding/timing.cc
@@ -47,7 +47,7 @@ VCMTiming::~VCMTiming() {
}
void VCMTiming::Reset() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ts_extrapolator_->Reset(clock_->TimeInMilliseconds());
codec_timer_.reset(new VCMCodecTimer());
render_delay_ms_ = kDefaultRenderDelayMs;
@@ -58,32 +58,32 @@ void VCMTiming::Reset() {
}
void VCMTiming::set_render_delay(int render_delay_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
render_delay_ms_ = render_delay_ms;
}
void VCMTiming::set_min_playout_delay(int min_playout_delay_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
min_playout_delay_ms_ = min_playout_delay_ms;
}
int VCMTiming::min_playout_delay() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return min_playout_delay_ms_;
}
void VCMTiming::set_max_playout_delay(int max_playout_delay_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
max_playout_delay_ms_ = max_playout_delay_ms;
}
int VCMTiming::max_playout_delay() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return max_playout_delay_ms_;
}
void VCMTiming::SetJitterDelay(int jitter_delay_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
if (jitter_delay_ms != jitter_delay_ms_) {
jitter_delay_ms_ = jitter_delay_ms;
// When in initial state, set current delay to minimum delay.
@@ -94,7 +94,7 @@ void VCMTiming::SetJitterDelay(int jitter_delay_ms) {
}
void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
int target_delay_ms = TargetDelayInternal();
if (current_delay_ms_ == 0) {
@@ -135,7 +135,7 @@ void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) {
void VCMTiming::UpdateCurrentDelay(int64_t render_time_ms,
int64_t actual_decode_time_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
uint32_t target_delay_ms = TargetDelayInternal();
int64_t delayed_ms =
actual_decode_time_ms -
@@ -158,20 +158,20 @@ void VCMTiming::StopDecodeTimer(uint32_t /*time_stamp*/,
}
void VCMTiming::StopDecodeTimer(int32_t decode_time_ms, int64_t now_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
codec_timer_->AddTiming(decode_time_ms, now_ms);
assert(decode_time_ms >= 0);
++num_decoded_frames_;
}
void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
ts_extrapolator_->Update(now_ms, time_stamp);
}
int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp,
int64_t now_ms) const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return RenderTimeMsInternal(frame_timestamp, now_ms);
}
@@ -202,7 +202,7 @@ int VCMTiming::RequiredDecodeTimeMs() const {
int64_t VCMTiming::MaxWaitingTime(int64_t render_time_ms,
int64_t now_ms) const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
const int64_t max_wait_time_ms =
render_time_ms - now_ms - RequiredDecodeTimeMs() - render_delay_ms_;
@@ -211,7 +211,7 @@ int64_t VCMTiming::MaxWaitingTime(int64_t render_time_ms,
}
int VCMTiming::TargetVideoDelay() const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return TargetDelayInternal();
}
@@ -226,7 +226,7 @@ bool VCMTiming::GetTimings(int* max_decode_ms,
int* jitter_buffer_ms,
int* min_playout_delay_ms,
int* render_delay_ms) const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
*max_decode_ms = RequiredDecodeTimeMs();
*current_delay_ms = current_delay_ms_;
*target_delay_ms = TargetDelayInternal();
@@ -237,12 +237,12 @@ bool VCMTiming::GetTimings(int* max_decode_ms,
}
void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
timing_frame_info_.emplace(info);
}
absl::optional<TimingFrameInfo> VCMTiming::GetTimingFrameInfo() {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return timing_frame_info_;
}
diff --git a/modules/video_coding/timing.h b/modules/video_coding/timing.h
index c9efcb13b0..75b8e7d99d 100644
--- a/modules/video_coding/timing.h
+++ b/modules/video_coding/timing.h
@@ -16,7 +16,7 @@
#include "absl/types/optional.h"
#include "api/video/video_timing.h"
#include "modules/video_coding/codec_timer.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -104,30 +104,30 @@ class VCMTiming {
enum { kDelayMaxChangeMsPerS = 100 };
protected:
- int RequiredDecodeTimeMs() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ int RequiredDecodeTimeMs() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
int64_t RenderTimeMsInternal(uint32_t frame_timestamp, int64_t now_ms) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- int TargetDelayInternal() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ int TargetDelayInternal() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
private:
- rtc::CriticalSection crit_sect_;
+ mutable Mutex mutex_;
Clock* const clock_;
- bool master_ RTC_GUARDED_BY(crit_sect_);
- TimestampExtrapolator* ts_extrapolator_ RTC_GUARDED_BY(crit_sect_);
- std::unique_ptr<VCMCodecTimer> codec_timer_ RTC_GUARDED_BY(crit_sect_);
- int render_delay_ms_ RTC_GUARDED_BY(crit_sect_);
+ bool master_ RTC_GUARDED_BY(mutex_);
+ TimestampExtrapolator* ts_extrapolator_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<VCMCodecTimer> codec_timer_ RTC_GUARDED_BY(mutex_);
+ int render_delay_ms_ RTC_GUARDED_BY(mutex_);
// Best-effort playout delay range for frames from capture to render.
// The receiver tries to keep the delay between |min_playout_delay_ms_|
// and |max_playout_delay_ms_| taking the network jitter into account.
// A special case is where min_playout_delay_ms_ = max_playout_delay_ms_ = 0,
// in which case the receiver tries to play the frames as they arrive.
- int min_playout_delay_ms_ RTC_GUARDED_BY(crit_sect_);
- int max_playout_delay_ms_ RTC_GUARDED_BY(crit_sect_);
- int jitter_delay_ms_ RTC_GUARDED_BY(crit_sect_);
- int current_delay_ms_ RTC_GUARDED_BY(crit_sect_);
- uint32_t prev_frame_timestamp_ RTC_GUARDED_BY(crit_sect_);
- absl::optional<TimingFrameInfo> timing_frame_info_ RTC_GUARDED_BY(crit_sect_);
- size_t num_decoded_frames_ RTC_GUARDED_BY(crit_sect_);
+ int min_playout_delay_ms_ RTC_GUARDED_BY(mutex_);
+ int max_playout_delay_ms_ RTC_GUARDED_BY(mutex_);
+ int jitter_delay_ms_ RTC_GUARDED_BY(mutex_);
+ int current_delay_ms_ RTC_GUARDED_BY(mutex_);
+ uint32_t prev_frame_timestamp_ RTC_GUARDED_BY(mutex_);
+ absl::optional<TimingFrameInfo> timing_frame_info_ RTC_GUARDED_BY(mutex_);
+ size_t num_decoded_frames_ RTC_GUARDED_BY(mutex_);
};
} // namespace webrtc
diff --git a/modules/video_coding/utility/ivf_file_reader.h b/modules/video_coding/utility/ivf_file_reader.h
index eb5a21d55d..5e0634f9fd 100644
--- a/modules/video_coding/utility/ivf_file_reader.h
+++ b/modules/video_coding/utility/ivf_file_reader.h
@@ -16,6 +16,7 @@
#include "absl/types/optional.h"
#include "api/video/encoded_image.h"
+#include "api/video_codecs/video_codec.h"
#include "rtc_base/system/file_wrapper.h"
namespace webrtc {
diff --git a/modules/video_coding/utility/ivf_file_writer.h b/modules/video_coding/utility/ivf_file_writer.h
index 5de67acdb2..140b9c06ff 100644
--- a/modules/video_coding/utility/ivf_file_writer.h
+++ b/modules/video_coding/utility/ivf_file_writer.h
@@ -17,6 +17,7 @@
#include <memory>
#include "api/video/encoded_image.h"
+#include "api/video/video_codec_type.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/system/file_wrapper.h"
#include "rtc_base/time_utils.h"
diff --git a/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc b/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc
index db104c49d1..871e5a1692 100644
--- a/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc
+++ b/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc
@@ -38,14 +38,16 @@ constexpr uint32_t kSimulcastScreenshareMaxBitrateKbps = 1250;
class MockTemporalLayers : public Vp8FrameBufferController {
public:
- MOCK_METHOD2(NextFrameConfig, Vp8FrameConfig(size_t, uint32_t));
- MOCK_METHOD3(OnRatesUpdated, void(size_t, const std::vector<uint32_t>&, int));
- MOCK_METHOD1(UpdateConfiguration, Vp8EncoderConfig(size_t));
- MOCK_METHOD6(OnEncodeDone,
- void(size_t, uint32_t, size_t, bool, int, CodecSpecificInfo*));
- MOCK_METHOD4(FrameEncoded, void(size_t, uint32_t, size_t, int));
- MOCK_CONST_METHOD0(Tl0PicIdx, uint8_t());
- MOCK_CONST_METHOD1(GetTemporalLayerId, int(const Vp8FrameConfig&));
+ MOCK_METHOD(Vp8FrameConfig, NextFrameConfig, (size_t, uint32_t), (override));
+ MOCK_METHOD(void,
+ OnRatesUpdated,
+ (size_t, const std::vector<uint32_t>&, int),
+ (override));
+ MOCK_METHOD(Vp8EncoderConfig, UpdateConfiguration, (size_t), (override));
+ MOCK_METHOD(void,
+ OnEncodeDone,
+ (size_t, uint32_t, size_t, bool, int, CodecSpecificInfo*),
+ (override));
};
} // namespace
diff --git a/modules/video_coding/utility/vp9_uncompressed_header_parser.cc b/modules/video_coding/utility/vp9_uncompressed_header_parser.cc
index 9c89235fe2..f8ddd4db41 100644
--- a/modules/video_coding/utility/vp9_uncompressed_header_parser.cc
+++ b/modules/video_coding/utility/vp9_uncompressed_header_parser.cc
@@ -52,40 +52,65 @@ bool Vp9ReadSyncCode(rtc::BitBuffer* br) {
return true;
}
-bool Vp9ReadColorConfig(rtc::BitBuffer* br, uint8_t profile) {
- if (profile == 2 || profile == 3) {
- // Bitdepth.
- RETURN_FALSE_IF_ERROR(br->ConsumeBits(1));
+bool Vp9ReadColorConfig(rtc::BitBuffer* br,
+ uint8_t profile,
+ FrameInfo* frame_info) {
+ if (profile == 0 || profile == 1) {
+ frame_info->bit_detph = BitDept::k8Bit;
+ } else if (profile == 2 || profile == 3) {
+ uint32_t ten_or_twelve_bits;
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&ten_or_twelve_bits, 1));
+ frame_info->bit_detph =
+ ten_or_twelve_bits ? BitDept::k12Bit : BitDept::k10Bit;
}
uint32_t color_space;
RETURN_FALSE_IF_ERROR(br->ReadBits(&color_space, 3));
+ frame_info->color_space = static_cast<ColorSpace>(color_space);
// SRGB is 7.
if (color_space != 7) {
- // YUV range flag.
- RETURN_FALSE_IF_ERROR(br->ConsumeBits(1));
+ uint32_t color_range;
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&color_range, 1));
+ frame_info->color_range =
+ color_range ? ColorRange::kFull : ColorRange::kStudio;
+
if (profile == 1 || profile == 3) {
- // 1 bit: subsampling x.
- // 1 bit: subsampling y.
- RETURN_FALSE_IF_ERROR(br->ConsumeBits(2));
+ uint32_t subsampling_x;
+ uint32_t subsampling_y;
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&subsampling_x, 1));
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&subsampling_y, 1));
+ if (subsampling_x) {
+ frame_info->sub_sampling =
+ subsampling_y ? YuvSubsampling::k420 : YuvSubsampling::k422;
+ } else {
+ frame_info->sub_sampling =
+ subsampling_y ? YuvSubsampling::k440 : YuvSubsampling::k444;
+ }
+
uint32_t reserved_bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1));
if (reserved_bit) {
- RTC_LOG(LS_WARNING) << "Failed to get QP. Reserved bit set.";
+ RTC_LOG(LS_WARNING) << "Failed to parse header. Reserved bit set.";
return false;
}
+ } else {
+ // Profile 0 or 2.
+ frame_info->sub_sampling = YuvSubsampling::k420;
}
} else {
+ // SRGB
+ frame_info->color_range = ColorRange::kFull;
if (profile == 1 || profile == 3) {
+ frame_info->sub_sampling = YuvSubsampling::k444;
uint32_t reserved_bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1));
if (reserved_bit) {
- RTC_LOG(LS_WARNING) << "Failed to get QP. Reserved bit set.";
+ RTC_LOG(LS_WARNING) << "Failed to parse header. Reserved bit set.";
return false;
}
} else {
- RTC_LOG(LS_WARNING) << "Failed to get QP. 4:4:4 color not supported in "
- "profile 0 or 2.";
+ RTC_LOG(LS_WARNING) << "Failed to parse header. 4:4:4 color not supported"
+ " in profile 0 or 2.";
return false;
}
}
@@ -93,24 +118,38 @@ bool Vp9ReadColorConfig(rtc::BitBuffer* br, uint8_t profile) {
return true;
}
-bool Vp9ReadFrameSize(rtc::BitBuffer* br) {
- // 2 bytes: frame width.
- // 2 bytes: frame height.
- return br->ConsumeBytes(4);
+bool Vp9ReadFrameSize(rtc::BitBuffer* br, FrameInfo* frame_info) {
+ // 16 bits: frame width - 1.
+ uint16_t frame_width_minus_one;
+ RETURN_FALSE_IF_ERROR(br->ReadUInt16(&frame_width_minus_one));
+ // 16 bits: frame height - 1.
+ uint16_t frame_height_minus_one;
+ RETURN_FALSE_IF_ERROR(br->ReadUInt16(&frame_height_minus_one));
+ frame_info->frame_width = frame_width_minus_one + 1;
+ frame_info->frame_height = frame_height_minus_one + 1;
+ return true;
}
-bool Vp9ReadRenderSize(rtc::BitBuffer* br) {
- uint32_t bit;
- RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1));
- if (bit) {
- // 2 bytes: render width.
- // 2 bytes: render height.
- RETURN_FALSE_IF_ERROR(br->ConsumeBytes(4));
+bool Vp9ReadRenderSize(rtc::BitBuffer* br, FrameInfo* frame_info) {
+ uint32_t render_and_frame_size_different;
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&render_and_frame_size_different, 1));
+ if (render_and_frame_size_different) {
+ // 16 bits: render width - 1.
+ uint16_t render_width_minus_one;
+ RETURN_FALSE_IF_ERROR(br->ReadUInt16(&render_width_minus_one));
+ // 16 bits: render height - 1.
+ uint16_t render_height_minus_one;
+ RETURN_FALSE_IF_ERROR(br->ReadUInt16(&render_height_minus_one));
+ frame_info->render_width = render_width_minus_one + 1;
+ frame_info->render_height = render_height_minus_one + 1;
+ } else {
+ frame_info->render_width = frame_info->frame_width;
+ frame_info->render_height = frame_info->frame_height;
}
return true;
}
-bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br) {
+bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br, FrameInfo* frame_info) {
uint32_t found_ref = 0;
for (size_t i = 0; i < kVp9NumRefsPerFrame; i++) {
// Size in refs.
@@ -120,11 +159,11 @@ bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br) {
}
if (!found_ref) {
- if (!Vp9ReadFrameSize(br)) {
+ if (!Vp9ReadFrameSize(br, frame_info)) {
return false;
}
}
- return Vp9ReadRenderSize(br);
+ return Vp9ReadRenderSize(br, frame_info);
}
bool Vp9ReadInterpolationFilter(rtc::BitBuffer* br) {
@@ -166,14 +205,14 @@ bool Vp9ReadLoopfilter(rtc::BitBuffer* br) {
}
} // namespace
-bool GetQp(const uint8_t* buf, size_t length, int* qp) {
+bool Parse(const uint8_t* buf, size_t length, int* qp, FrameInfo* frame_info) {
rtc::BitBuffer br(buf, length);
// Frame marker.
uint32_t frame_marker;
RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_marker, 2));
if (frame_marker != 0x2) {
- RTC_LOG(LS_WARNING) << "Failed to get QP. Frame marker should be 2.";
+ RTC_LOG(LS_WARNING) << "Failed to parse header. Frame marker should be 2.";
return false;
}
@@ -181,6 +220,7 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
uint8_t profile;
if (!Vp9ReadProfile(&br, &profile))
return false;
+ frame_info->profile = profile;
// Show existing frame.
uint32_t show_existing_frame;
@@ -195,18 +235,21 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_type, 1));
RETURN_FALSE_IF_ERROR(br.ReadBits(&show_frame, 1));
RETURN_FALSE_IF_ERROR(br.ReadBits(&error_resilient, 1));
+ frame_info->show_frame = show_frame;
+ frame_info->error_resilient = error_resilient;
- if (!frame_type) {
+ if (frame_type == 0) {
+ // Key-frame.
if (!Vp9ReadSyncCode(&br))
return false;
- if (!Vp9ReadColorConfig(&br, profile))
+ if (!Vp9ReadColorConfig(&br, profile, frame_info))
return false;
- if (!Vp9ReadFrameSize(&br))
+ if (!Vp9ReadFrameSize(&br, frame_info))
return false;
- if (!Vp9ReadRenderSize(&br))
+ if (!Vp9ReadRenderSize(&br, frame_info))
return false;
-
} else {
+ // Non-keyframe.
uint32_t intra_only = 0;
if (!show_frame)
RETURN_FALSE_IF_ERROR(br.ReadBits(&intra_only, 1));
@@ -218,14 +261,14 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
return false;
if (profile > 0) {
- if (!Vp9ReadColorConfig(&br, profile))
+ if (!Vp9ReadColorConfig(&br, profile, frame_info))
return false;
}
// Refresh frame flags.
RETURN_FALSE_IF_ERROR(br.ConsumeBits(8));
- if (!Vp9ReadFrameSize(&br))
+ if (!Vp9ReadFrameSize(&br, frame_info))
return false;
- if (!Vp9ReadRenderSize(&br))
+ if (!Vp9ReadRenderSize(&br, frame_info))
return false;
} else {
// Refresh frame flags.
@@ -237,7 +280,7 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
RETURN_FALSE_IF_ERROR(br.ConsumeBits(4));
}
- if (!Vp9ReadFrameSizeFromRefs(&br))
+ if (!Vp9ReadFrameSizeFromRefs(&br, frame_info))
return false;
// Allow high precision mv.
@@ -267,6 +310,20 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
return true;
}
-} // namespace vp9
+bool GetQp(const uint8_t* buf, size_t length, int* qp) {
+ FrameInfo frame_info;
+ return Parse(buf, length, qp, &frame_info);
+}
+absl::optional<FrameInfo> ParseIntraFrameInfo(const uint8_t* buf,
+ size_t length) {
+ int qp = 0;
+ FrameInfo frame_info;
+ if (Parse(buf, length, &qp, &frame_info) && frame_info.frame_width > 0) {
+ return frame_info;
+ }
+ return absl::nullopt;
+}
+
+} // namespace vp9
} // namespace webrtc
diff --git a/modules/video_coding/utility/vp9_uncompressed_header_parser.h b/modules/video_coding/utility/vp9_uncompressed_header_parser.h
index 69e8de87df..a7f04670d2 100644
--- a/modules/video_coding/utility/vp9_uncompressed_header_parser.h
+++ b/modules/video_coding/utility/vp9_uncompressed_header_parser.h
@@ -13,6 +13,7 @@
#include <stddef.h>
#include <stdint.h>
+#include "absl/types/optional.h"
namespace webrtc {
@@ -22,6 +23,65 @@ namespace vp9 {
// Returns true on success, false otherwise.
bool GetQp(const uint8_t* buf, size_t length, int* qp);
+// Bit depth per channel. Support varies by profile.
+enum class BitDept : uint8_t {
+ k8Bit = 8,
+ k10Bit = 10,
+ k12Bit = 12,
+};
+
+enum class ColorSpace : uint8_t {
+ CS_UNKNOWN = 0, // Unknown (in this case the color space must be signaled
+ // outside the VP9 bitstream).
+ CS_BT_601 = 1, // CS_BT_601 Rec. ITU-R BT.601-7
+ CS_BT_709 = 2, // Rec. ITU-R BT.709-6
+ CS_SMPTE_170 = 3, // SMPTE-170
+ CS_SMPTE_240 = 4, // SMPTE-240
+ CS_BT_2020 = 5, // Rec. ITU-R BT.2020-2
+ CS_RESERVED = 6, // Reserved
+ CS_RGB = 7, // sRGB (IEC 61966-2-1)
+};
+
+enum class ColorRange {
+ kStudio, // Studio swing:
+ // For BitDepth equals 8:
+ // Y is between 16 and 235 inclusive.
+ // U and V are between 16 and 240 inclusive.
+ // For BitDepth equals 10:
+ // Y is between 64 and 940 inclusive.
+ // U and V are between 64 and 960 inclusive.
+ // For BitDepth equals 12:
+ // Y is between 256 and 3760.
+ // U and V are between 256 and 3840 inclusive.
+ kFull // Full swing; no restriction on Y, U, V values.
+};
+
+enum class YuvSubsampling {
+ k444,
+ k440,
+ k422,
+ k420,
+};
+
+struct FrameInfo {
+ int profile = 0; // Profile 0-3 are valid.
+ bool show_frame = false;
+ bool error_resilient = false;
+ BitDept bit_detph = BitDept::k8Bit;
+ ColorSpace color_space = ColorSpace::CS_UNKNOWN;
+ ColorRange color_range;
+ YuvSubsampling sub_sampling;
+ int frame_width = 0;
+ int frame_height = 0;
+ int render_width = 0;
+ int render_height = 0;
+};
+
+// Parses frame information for a VP9 key-frame or all-intra frame from a
+// bitstream. Returns nullopt on failure or if not a key-frame.
+absl::optional<FrameInfo> ParseIntraFrameInfo(const uint8_t* buf,
+ size_t length);
+
} // namespace vp9
} // namespace webrtc
diff --git a/modules/video_coding/video_codec_initializer.cc b/modules/video_coding/video_codec_initializer.cc
index e8665b9557..7f36f99f89 100644
--- a/modules/video_coding/video_codec_initializer.cc
+++ b/modules/video_coding/video_codec_initializer.cc
@@ -75,7 +75,9 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
static_cast<unsigned char>(streams.size());
video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
bool codec_active = false;
- for (const VideoStream& stream : streams) {
+ // Active configuration might not be fully copied to |streams| for SVC yet.
+ // Therefore the |config| is checked here.
+ for (const VideoStream& stream : config.simulcast_layers) {
if (stream.active) {
codec_active = true;
break;
@@ -205,7 +207,7 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
spatial_layers.back().maxBitrate = video_codec.maxBitrate;
}
- for (size_t spatial_idx = 0;
+ for (size_t spatial_idx = first_active_layer;
spatial_idx < config.simulcast_layers.size() &&
spatial_idx < spatial_layers.size();
++spatial_idx) {
@@ -219,6 +221,14 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
video_codec.spatialLayers[i] = spatial_layers[i];
}
+ // The top spatial layer dimensions may not be equal to the input
+ // resolution because of the rounding or explicit configuration.
+ // This difference must be propagated to the stream configuration.
+ video_codec.width = spatial_layers.back().width;
+ video_codec.height = spatial_layers.back().height;
+ video_codec.simulcastStream[0].width = spatial_layers.back().width;
+ video_codec.simulcastStream[0].height = spatial_layers.back().height;
+
// Update layering settings.
video_codec.VP9()->numberOfSpatialLayers =
static_cast<unsigned char>(spatial_layers.size());
diff --git a/modules/video_coding/video_coding_impl.cc b/modules/video_coding/video_coding_impl.cc
index 1d12ac93f0..829a3f0c11 100644
--- a/modules/video_coding/video_coding_impl.cc
+++ b/modules/video_coding/video_coding_impl.cc
@@ -16,7 +16,6 @@
#include "api/video/encoded_image.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/timing.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/thread_checker.h"
#include "system_wrappers/include/clock.h"
diff --git a/modules/video_coding/video_coding_impl.h b/modules/video_coding/video_coding_impl.h
index eaab639dbf..440d1998bc 100644
--- a/modules/video_coding/video_coding_impl.h
+++ b/modules/video_coding/video_coding_impl.h
@@ -24,6 +24,7 @@
#include "modules/video_coding/receiver.h"
#include "modules/video_coding/timing.h"
#include "rtc_base/one_time_event.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
@@ -100,7 +101,7 @@ class VideoReceiver : public Module {
rtc::ThreadChecker decoder_thread_checker_;
rtc::ThreadChecker module_thread_checker_;
Clock* const clock_;
- rtc::CriticalSection process_crit_;
+ Mutex process_mutex_;
VCMTiming* _timing;
VCMReceiver _receiver;
VCMDecodedFrameCallback _decodedFrameCallback;
@@ -111,8 +112,8 @@ class VideoReceiver : public Module {
VCMPacketRequestCallback* _packetRequestCallback;
// Used on both the module and decoder thread.
- bool _scheduleKeyRequest RTC_GUARDED_BY(process_crit_);
- bool drop_frames_until_keyframe_ RTC_GUARDED_BY(process_crit_);
+ bool _scheduleKeyRequest RTC_GUARDED_BY(process_mutex_);
+ bool drop_frames_until_keyframe_ RTC_GUARDED_BY(process_mutex_);
// Modified on the construction thread while not attached to the process
// thread. Once attached to the process thread, its value is only read
diff --git a/modules/video_coding/video_receiver.cc b/modules/video_coding/video_receiver.cc
index a817293f2f..a227a8c337 100644
--- a/modules/video_coding/video_receiver.cc
+++ b/modules/video_coding/video_receiver.cc
@@ -31,7 +31,6 @@
#include "modules/video_coding/timing.h"
#include "modules/video_coding/video_coding_impl.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/one_time_event.h"
@@ -71,7 +70,7 @@ void VideoReceiver::Process() {
_keyRequestTimer.Processed();
bool request_key_frame = _frameTypeCallback != nullptr;
if (request_key_frame) {
- rtc::CritScope cs(&process_crit_);
+ MutexLock lock(&process_mutex_);
request_key_frame = _scheduleKeyRequest;
}
if (request_key_frame)
@@ -94,7 +93,7 @@ void VideoReceiver::Process() {
ret = RequestKeyFrame();
}
if (ret == VCM_OK && !nackList.empty()) {
- rtc::CritScope cs(&process_crit_);
+ MutexLock lock(&process_mutex_);
if (_packetRequestCallback != nullptr) {
_packetRequestCallback->ResendPackets(&nackList[0], nackList.size());
}
@@ -183,7 +182,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
bool drop_frame = false;
{
- rtc::CritScope cs(&process_crit_);
+ MutexLock lock(&process_mutex_);
if (drop_frames_until_keyframe_) {
// Still getting delta frames, schedule another keyframe request as if
// decode failed.
@@ -229,7 +228,7 @@ int32_t VideoReceiver::RequestKeyFrame() {
if (ret < 0) {
return ret;
}
- rtc::CritScope cs(&process_crit_);
+ MutexLock lock(&process_mutex_);
_scheduleKeyRequest = false;
} else {
return VCM_MISSING_CALLBACK;
@@ -291,7 +290,7 @@ int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload,
// request scheduling to throttle the requests.
if (ret == VCM_FLUSH_INDICATOR) {
{
- rtc::CritScope cs(&process_crit_);
+ MutexLock lock(&process_mutex_);
drop_frames_until_keyframe_ = true;
}
RequestKeyFrame();
diff --git a/modules/video_coding/video_receiver_unittest.cc b/modules/video_coding/video_receiver_unittest.cc
index 363838b846..2872c8d2a9 100644
--- a/modules/video_coding/video_receiver_unittest.cc
+++ b/modules/video_coding/video_receiver_unittest.cc
@@ -26,8 +26,10 @@ namespace {
class MockPacketRequestCallback : public VCMPacketRequestCallback {
public:
- MOCK_METHOD2(ResendPackets,
- int32_t(const uint16_t* sequenceNumbers, uint16_t length));
+ MOCK_METHOD(int32_t,
+ ResendPackets,
+ (const uint16_t* sequenceNumbers, uint16_t length),
+ (override));
};
class MockVCMReceiveCallback : public VCMReceiveCallback {
@@ -35,11 +37,12 @@ class MockVCMReceiveCallback : public VCMReceiveCallback {
MockVCMReceiveCallback() {}
virtual ~MockVCMReceiveCallback() {}
- MOCK_METHOD4(
- FrameToRender,
- int32_t(VideoFrame&, absl::optional<uint8_t>, int32_t, VideoContentType));
- MOCK_METHOD1(OnIncomingPayloadType, void(int));
- MOCK_METHOD1(OnDecoderImplementationName, void(const char*));
+ MOCK_METHOD(int32_t,
+ FrameToRender,
+ (VideoFrame&, absl::optional<uint8_t>, int32_t, VideoContentType),
+ (override));
+ MOCK_METHOD(void, OnIncomingPayloadType, (int), (override));
+ MOCK_METHOD(void, OnDecoderImplementationName, (const char*), (override));
};
class TestVideoReceiver : public ::testing::Test {
diff --git a/p2p/BUILD.gn b/p2p/BUILD.gn
index ae49deb264..98680f62d2 100644
--- a/p2p/BUILD.gn
+++ b/p2p/BUILD.gn
@@ -99,7 +99,6 @@ rtc_library("rtc_p2p") {
"../rtc_base:checks",
"../rtc_base:rtc_numerics",
"../rtc_base/experiments:field_trial_parser",
- "//third_party/abseil-cpp/absl/memory",
# Needed by pseudo_tcp, which should move to a separate target.
"../rtc_base:safe_minmax",
@@ -107,11 +106,15 @@ rtc_library("rtc_p2p") {
"../rtc_base/memory:fifo_buffer",
"../rtc_base/network:sent_packet",
"../rtc_base/system:rtc_export",
+ "../rtc_base/task_utils:to_queued_task",
"../rtc_base/third_party/base64",
"../rtc_base/third_party/sigslot",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -127,6 +130,8 @@ if (rtc_include_tests) {
"../api:libjingle_peerconnection_api",
"../rtc_base",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -169,6 +174,8 @@ if (rtc_include_tests) {
"../rtc_base:rtc_base_tests_utils",
"../rtc_base/third_party/sigslot",
"../test:test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -221,8 +228,11 @@ if (rtc_include_tests) {
"../test:field_trial",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
}
@@ -243,8 +253,8 @@ rtc_library("p2p_server_utils") {
"../rtc_base:checks",
"../rtc_base:rtc_base_tests_utils",
"../rtc_base/third_party/sigslot",
- "//third_party/abseil-cpp/absl/algorithm:container",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
}
rtc_library("libstunprober") {
diff --git a/p2p/base/basic_async_resolver_factory_unittest.cc b/p2p/base/basic_async_resolver_factory_unittest.cc
index 0c21c682fb..8242146bae 100644
--- a/p2p/base/basic_async_resolver_factory_unittest.cc
+++ b/p2p/base/basic_async_resolver_factory_unittest.cc
@@ -30,6 +30,7 @@ class BasicAsyncResolverFactoryTest : public ::testing::Test,
rtc::SocketAddress address("", 0);
resolver->Start(address);
ASSERT_TRUE_WAIT(address_resolved_, 10000 /*ms*/);
+ resolver->Destroy(false);
}
void SetAddressResolved(rtc::AsyncResolverInterface* resolver) {
diff --git a/p2p/base/connection.cc b/p2p/base/connection.cc
index a9d570652a..0863865a04 100644
--- a/p2p/base/connection.cc
+++ b/p2p/base/connection.cc
@@ -461,6 +461,7 @@ void Connection::OnReadPacket(const char* data,
last_data_received_ = rtc::TimeMillis();
UpdateReceiving(last_data_received_);
recv_rate_tracker_.AddSamples(size);
+ stats_.packets_received++;
SignalReadPacket(this, data, size, packet_time_us);
// If timed out sending writability checks, start up again
@@ -918,12 +919,31 @@ void Connection::ReceivedPingResponse(
bool Connection::dead(int64_t now) const {
if (last_received() > 0) {
- // If it has ever received anything, we keep it alive until it hasn't
- // received anything for DEAD_CONNECTION_RECEIVE_TIMEOUT. This covers the
- // normal case of a successfully used connection that stops working. This
- // also allows a remote peer to continue pinging over a locally inactive
- // (pruned) connection.
- return (now > (last_received() + DEAD_CONNECTION_RECEIVE_TIMEOUT));
+ // If it has ever received anything, we keep it alive
+ // - if it has recevied last DEAD_CONNECTION_RECEIVE_TIMEOUT (30s)
+ // - if it has a ping outstanding shorter than
+ // DEAD_CONNECTION_RECEIVE_TIMEOUT (30s)
+ // - else if IDLE let it live field_trials_->dead_connection_timeout_ms
+ //
+ // This covers the normal case of a successfully used connection that stops
+ // working. This also allows a remote peer to continue pinging over a
+ // locally inactive (pruned) connection. This also allows the local agent to
+ // ping with longer interval than 30s as long as it shorter than
+ // |dead_connection_timeout_ms|.
+ if (now <= (last_received() + DEAD_CONNECTION_RECEIVE_TIMEOUT)) {
+ // Not dead since we have received the last 30s.
+ return false;
+ }
+ if (!pings_since_last_response_.empty()) {
+ // Outstanding pings: let it live until the ping is unreplied for
+ // DEAD_CONNECTION_RECEIVE_TIMEOUT.
+ return now > (pings_since_last_response_[0].sent_time +
+ DEAD_CONNECTION_RECEIVE_TIMEOUT);
+ }
+
+ // No outstanding pings: let it live until
+ // field_trials_->dead_connection_timeout_ms has passed.
+ return now > (last_received() + field_trials_->dead_connection_timeout_ms);
}
if (active()) {
diff --git a/p2p/base/connection_info.cc b/p2p/base/connection_info.cc
index a4f8036769..ebea2ab5b0 100644
--- a/p2p/base/connection_info.cc
+++ b/p2p/base/connection_info.cc
@@ -28,6 +28,7 @@ ConnectionInfo::ConnectionInfo()
sent_ping_responses(0),
recv_total_bytes(0),
recv_bytes_second(0),
+ packets_received(0),
recv_ping_requests(0),
recv_ping_responses(0),
key(nullptr),
diff --git a/p2p/base/connection_info.h b/p2p/base/connection_info.h
index a62e8aec00..b5e1c14433 100644
--- a/p2p/base/connection_info.h
+++ b/p2p/base/connection_info.h
@@ -54,6 +54,7 @@ struct ConnectionInfo {
size_t recv_total_bytes; // Total bytes received on this connection.
size_t recv_bytes_second; // Bps over the last measurement interval.
+ size_t packets_received; // Number of packets that were received.
size_t recv_ping_requests; // Number of STUN ping request received.
size_t recv_ping_responses; // Number of STUN ping response received.
Candidate local_candidate; // The local candidate for this connection.
diff --git a/p2p/base/ice_controller_interface.h b/p2p/base/ice_controller_interface.h
index cc4cf4d0d7..d5dc29e782 100644
--- a/p2p/base/ice_controller_interface.h
+++ b/p2p/base/ice_controller_interface.h
@@ -51,12 +51,20 @@ struct IceControllerEvent {
// - which connection to ping
// - which connection to use
// - which connection to prune
+// - which connection to forget learned state on
//
-// P2PTransportChannel creates a |Connection| and adds a const pointer
-// to the IceController using |AddConnection|, i.e the IceController
-// should not call any non-const methods on a Connection.
+// The P2PTransportChannel owns (creates and destroys) Connections,
+// but P2PTransportChannel gives const pointers to the the IceController using
+// |AddConnection|, i.e the IceController should not call any non-const methods
+// on a Connection but signal back in the interface if any mutable function
+// shall be called.
//
-// The IceController shall keeps track of all connections added
+// Current these are limited to:
+// Connection::Ping - returned in PingResult
+// Connection::Prune - retuned in PruneConnections
+// Connection::ForgetLearnedState - return in SwitchResult
+//
+// The IceController shall keep track of all connections added
// (and not destroyed) and give them back using the connections()-function-
//
// When a Connection gets destroyed
@@ -71,6 +79,9 @@ class IceControllerInterface {
// An optional recheck event for when a Switch() should be attempted again.
absl::optional<IceControllerEvent> recheck_event;
+
+ // A vector with connection to run ForgetLearnedState on.
+ std::vector<const Connection*> connections_to_forget_state_on;
};
// This represents the result of a call to SelectConnectionToPing.
diff --git a/p2p/base/mock_async_resolver.h b/p2p/base/mock_async_resolver.h
index 7d3be5b0b0..8bc0eb9cff 100644
--- a/p2p/base/mock_async_resolver.h
+++ b/p2p/base/mock_async_resolver.h
@@ -29,14 +29,17 @@ class MockAsyncResolver : public AsyncResolverInterface {
}
~MockAsyncResolver() = default;
- MOCK_METHOD1(Start, void(const rtc::SocketAddress&));
- MOCK_CONST_METHOD2(GetResolvedAddress, bool(int family, SocketAddress* addr));
- MOCK_CONST_METHOD0(GetError, int());
+ MOCK_METHOD(void, Start, (const rtc::SocketAddress&), (override));
+ MOCK_METHOD(bool,
+ GetResolvedAddress,
+ (int family, SocketAddress* addr),
+ (const, override));
+ MOCK_METHOD(int, GetError, (), (const, override));
// Note that this won't delete the object like AsyncResolverInterface says in
// order to avoid sanitizer failures caused by this being a synchronous
// implementation. The test code should delete the object instead.
- MOCK_METHOD1(Destroy, void(bool));
+ MOCK_METHOD(void, Destroy, (bool), (override));
};
} // namespace rtc
@@ -45,7 +48,7 @@ namespace webrtc {
class MockAsyncResolverFactory : public AsyncResolverFactory {
public:
- MOCK_METHOD0(Create, rtc::AsyncResolverInterface*());
+ MOCK_METHOD(rtc::AsyncResolverInterface*, Create, (), (override));
};
} // namespace webrtc
diff --git a/p2p/base/mock_ice_transport.h b/p2p/base/mock_ice_transport.h
index 1436cacb50..ef9f1b18ea 100644
--- a/p2p/base/mock_ice_transport.h
+++ b/p2p/base/mock_ice_transport.h
@@ -32,15 +32,20 @@ class MockIceTransport : public IceTransportInternal {
SignalWritableState(this);
}
- MOCK_METHOD4(SendPacket,
- int(const char* data,
- size_t len,
- const rtc::PacketOptions& options,
- int flags));
- MOCK_METHOD2(SetOption, int(rtc::Socket::Option opt, int value));
- MOCK_METHOD0(GetError, int());
- MOCK_CONST_METHOD0(GetIceRole, cricket::IceRole());
- MOCK_METHOD1(GetStats, bool(cricket::IceTransportStats* ice_transport_stats));
+ MOCK_METHOD(int,
+ SendPacket,
+ (const char* data,
+ size_t len,
+ const rtc::PacketOptions& options,
+ int flags),
+ (override));
+ MOCK_METHOD(int, SetOption, (rtc::Socket::Option opt, int value), (override));
+ MOCK_METHOD(int, GetError, (), (override));
+ MOCK_METHOD(cricket::IceRole, GetIceRole, (), (const, override));
+ MOCK_METHOD(bool,
+ GetStats,
+ (cricket::IceTransportStats * ice_transport_stats),
+ (override));
IceTransportState GetState() const override {
return IceTransportState::STATE_INIT;
diff --git a/p2p/base/p2p_transport_channel.cc b/p2p/base/p2p_transport_channel.cc
index 33325693b9..6f0df04150 100644
--- a/p2p/base/p2p_transport_channel.cc
+++ b/p2p/base/p2p_transport_channel.cc
@@ -30,6 +30,7 @@
#include "rtc_base/net_helper.h"
#include "rtc_base/net_helpers.h"
#include "rtc_base/string_encode.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/time_utils.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
@@ -274,8 +275,7 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection(
if (result.connection.has_value()) {
RTC_LOG(LS_INFO) << "Switching selected connection due to: "
<< reason.ToString();
- SwitchSelectedConnection(const_cast<Connection*>(*result.connection),
- reason);
+ SwitchSelectedConnection(FromIceController(*result.connection), reason);
}
if (result.recheck_event.has_value()) {
@@ -290,6 +290,10 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection(
result.recheck_event->recheck_delay_ms);
}
+ for (const auto* con : result.connections_to_forget_state_on) {
+ FromIceController(con)->ForgetLearnedState();
+ }
+
return result.connection.has_value();
}
@@ -694,9 +698,18 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) {
&field_trials_.send_ping_on_switch_ice_controlling,
// Reply to nomination ASAP.
"send_ping_on_nomination_ice_controlled",
- &field_trials_.send_ping_on_nomination_ice_controlled)
+ &field_trials_.send_ping_on_nomination_ice_controlled,
+ // Allow connections to live untouched longer that 30s.
+ "dead_connection_timeout_ms", &field_trials_.dead_connection_timeout_ms)
->Parse(webrtc::field_trial::FindFullName("WebRTC-IceFieldTrials"));
+ if (field_trials_.dead_connection_timeout_ms < 30000) {
+ RTC_LOG(LS_WARNING) << "dead_connection_timeout_ms set to "
+ << field_trials_.dead_connection_timeout_ms
+ << " increasing it to 30000";
+ field_trials_.dead_connection_timeout_ms = 30000;
+ }
+
if (field_trials_.skip_relay_to_non_relay_connections) {
RTC_LOG(LS_INFO) << "Set skip_relay_to_non_relay_connections";
}
@@ -1185,7 +1198,12 @@ void P2PTransportChannel::AddRemoteCandidate(const Candidate& candidate) {
}
if (new_remote_candidate.address().IsUnresolvedIP()) {
- ResolveHostnameCandidate(new_remote_candidate);
+ // Don't do DNS lookups if the IceTransportPolicy is "none" or "relay".
+ bool sharing_host = ((allocator_->candidate_filter() & CF_HOST) != 0);
+ bool sharing_stun = ((allocator_->candidate_filter() & CF_REFLEXIVE) != 0);
+ if (sharing_host || sharing_stun) {
+ ResolveHostnameCandidate(new_remote_candidate);
+ }
return;
}
@@ -1214,9 +1232,8 @@ void P2PTransportChannel::OnCandidateResolved(
Candidate candidate = p->candidate_;
resolvers_.erase(p);
AddRemoteCandidateWithResolver(candidate, resolver);
- invoker_.AsyncInvoke<void>(
- RTC_FROM_HERE, thread(),
- rtc::Bind(&rtc::AsyncResolverInterface::Destroy, resolver, false));
+ thread()->PostTask(
+ webrtc::ToQueuedTask([] {}, [resolver] { resolver->Destroy(false); }));
}
void P2PTransportChannel::AddRemoteCandidateWithResolver(
@@ -1389,7 +1406,7 @@ bool P2PTransportChannel::CreateConnection(PortInterface* port,
return false;
}
-bool P2PTransportChannel::FindConnection(Connection* connection) const {
+bool P2PTransportChannel::FindConnection(const Connection* connection) const {
RTC_DCHECK_RUN_ON(network_thread_);
return absl::c_linear_search(connections(), connection);
}
@@ -1695,7 +1712,7 @@ void P2PTransportChannel::PruneConnections() {
std::vector<const Connection*> connections_to_prune =
ice_controller_->PruneConnections();
for (const Connection* conn : connections_to_prune) {
- const_cast<Connection*>(conn)->Prune();
+ FromIceController(conn)->Prune();
}
}
@@ -1898,11 +1915,10 @@ void P2PTransportChannel::CheckAndPing() {
UpdateConnectionStates();
auto result = ice_controller_->SelectConnectionToPing(last_ping_sent_ms_);
- Connection* conn =
- const_cast<Connection*>(result.connection.value_or(nullptr));
int delay = result.recheck_delay_ms;
- if (conn) {
+ if (result.connection.value_or(nullptr)) {
+ Connection* conn = FromIceController(*result.connection);
PingConnection(conn);
MarkConnectionPinged(conn);
}
@@ -1915,7 +1931,12 @@ void P2PTransportChannel::CheckAndPing() {
// This method is only for unit testing.
Connection* P2PTransportChannel::FindNextPingableConnection() {
RTC_DCHECK_RUN_ON(network_thread_);
- return const_cast<Connection*>(ice_controller_->FindNextPingableConnection());
+ auto* conn = ice_controller_->FindNextPingableConnection();
+ if (conn) {
+ return FromIceController(conn);
+ } else {
+ return nullptr;
+ }
}
// A connection is considered a backup connection if the channel state
diff --git a/p2p/base/p2p_transport_channel.h b/p2p/base/p2p_transport_channel.h
index 3d6c86f031..4f891beb1e 100644
--- a/p2p/base/p2p_transport_channel.h
+++ b/p2p/base/p2p_transport_channel.h
@@ -245,7 +245,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal {
bool CreateConnection(PortInterface* port,
const Candidate& remote_candidate,
PortInterface* origin_port);
- bool FindConnection(Connection* connection) const;
+ bool FindConnection(const Connection* connection) const;
uint32_t GetRemoteCandidateGeneration(const Candidate& candidate);
bool IsDuplicateRemoteCandidate(const Candidate& candidate);
@@ -348,6 +348,16 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal {
// 2. Peer-reflexive remote candidates.
Candidate SanitizeRemoteCandidate(const Candidate& c) const;
+ // Cast a Connection returned from IceController and verify that it exists.
+ // (P2P owns all Connections, and only gives const pointers to IceController,
+ // see IceControllerInterface).
+ Connection* FromIceController(const Connection* conn) {
+ // Verify that IceController does not return a connection
+ // that we have destroyed.
+ RTC_DCHECK(FindConnection(conn));
+ return const_cast<Connection*>(conn);
+ }
+
std::string transport_name_ RTC_GUARDED_BY(network_thread_);
int component_ RTC_GUARDED_BY(network_thread_);
PortAllocator* allocator_ RTC_GUARDED_BY(network_thread_);
diff --git a/p2p/base/p2p_transport_channel_ice_field_trials.h b/p2p/base/p2p_transport_channel_ice_field_trials.h
index 8b208e339e..f30366fd1f 100644
--- a/p2p/base/p2p_transport_channel_ice_field_trials.h
+++ b/p2p/base/p2p_transport_channel_ice_field_trials.h
@@ -48,6 +48,10 @@ struct IceFieldTrials {
// Sending a PING directly after a nomination on ICE_CONTROLLED-side.
bool send_ping_on_nomination_ice_controlled = false;
+
+ // The timeout after which the connection will be considered dead if no
+ // traffic is received.
+ int dead_connection_timeout_ms = 30000;
};
} // namespace cricket
diff --git a/p2p/base/p2p_transport_channel_unittest.cc b/p2p/base/p2p_transport_channel_unittest.cc
index ce78335fd9..cfdee81403 100644
--- a/p2p/base/p2p_transport_channel_unittest.cc
+++ b/p2p/base/p2p_transport_channel_unittest.cc
@@ -177,14 +177,14 @@ cricket::BasicPortAllocator* CreateBasicPortAllocator(
class MockIceControllerFactory : public cricket::IceControllerFactoryInterface {
public:
- ~MockIceControllerFactory() = default;
+ ~MockIceControllerFactory() override = default;
std::unique_ptr<cricket::IceControllerInterface> Create(
- const cricket::IceControllerFactoryArgs& args) {
+ const cricket::IceControllerFactoryArgs& args) override {
RecordIceControllerCreated();
return std::make_unique<cricket::BasicIceController>(args);
}
- MOCK_METHOD0(RecordIceControllerCreated, void());
+ MOCK_METHOD(void, RecordIceControllerCreated, ());
};
} // namespace
@@ -1284,6 +1284,7 @@ TEST_F(P2PTransportChannelTest, GetStats) {
ep2_ch1()->receiving() &&
ep2_ch1()->writable(),
kMediumTimeout, clock);
+ // Sends and receives 10 packets.
TestSendRecv(&clock);
IceTransportStats ice_transport_stats;
ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats));
@@ -1306,6 +1307,7 @@ TEST_F(P2PTransportChannelTest, GetStats) {
EXPECT_EQ(0U, best_conn_info->sent_discarded_packets);
EXPECT_EQ(10 * 36U, best_conn_info->sent_total_bytes);
EXPECT_EQ(10 * 36U, best_conn_info->recv_total_bytes);
+ EXPECT_EQ(10U, best_conn_info->packets_received);
DestroyChannels();
}
@@ -1480,7 +1482,7 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignaling) {
PauseCandidates(1);
// Wait until the callee becomes writable to make sure that a ping request is
- // received by the caller before his remote ICE credentials are set.
+ // received by the caller before their remote ICE credentials are set.
ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout);
// Add two sets of remote ICE credentials, so that the ones used by the
// candidate will be generation 1 instead of 0.
@@ -1588,7 +1590,7 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithNAT) {
PauseCandidates(1);
// Wait until the callee becomes writable to make sure that a ping request is
- // received by the caller before his remote ICE credentials are set.
+ // received by the caller before their remote ICE credentials are set.
ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout);
// Add two sets of remote ICE credentials, so that the ones used by the
// candidate will be generation 1 instead of 0.
@@ -4844,10 +4846,13 @@ TEST_F(P2PTransportChannelTest,
// address after the resolution completes.
TEST_F(P2PTransportChannelTest,
PeerReflexiveCandidateDuringResolvingHostCandidateWithMdnsName) {
- NiceMock<rtc::MockAsyncResolver> mock_async_resolver;
+ auto mock_async_resolver = new NiceMock<rtc::MockAsyncResolver>();
+ ON_CALL(*mock_async_resolver, Destroy).WillByDefault([mock_async_resolver] {
+ delete mock_async_resolver;
+ });
webrtc::MockAsyncResolverFactory mock_async_resolver_factory;
EXPECT_CALL(mock_async_resolver_factory, Create())
- .WillOnce(Return(&mock_async_resolver));
+ .WillOnce(Return(mock_async_resolver));
// ep1 and ep2 will only gather host candidates with addresses
// kPublicAddrs[0] and kPublicAddrs[1], respectively.
@@ -4874,7 +4879,7 @@ TEST_F(P2PTransportChannelTest,
bool mock_async_resolver_started = false;
// Not signaling done yet, and only make sure we are in the process of
// resolution.
- EXPECT_CALL(mock_async_resolver, Start(_))
+ EXPECT_CALL(*mock_async_resolver, Start(_))
.WillOnce(InvokeWithoutArgs([&mock_async_resolver_started]() {
mock_async_resolver_started = true;
}));
@@ -4887,7 +4892,7 @@ TEST_F(P2PTransportChannelTest,
ResumeCandidates(1);
ASSERT_TRUE_WAIT(ep1_ch1()->selected_connection() != nullptr, kMediumTimeout);
// Let the mock resolver of ep2 receives the correct resolution.
- EXPECT_CALL(mock_async_resolver, GetResolvedAddress(_, _))
+ EXPECT_CALL(*mock_async_resolver, GetResolvedAddress(_, _))
.WillOnce(DoAll(SetArgPointee<1>(local_address), Return(true)));
// Upon receiving a ping from ep1, ep2 adds a prflx candidate from the
// unknown address and establishes a connection.
@@ -4899,7 +4904,7 @@ TEST_F(P2PTransportChannelTest,
ep2_ch1()->selected_connection()->remote_candidate().type());
// ep2 should also be able resolve the hostname candidate. The resolved remote
// host candidate should be merged with the prflx remote candidate.
- mock_async_resolver.SignalDone(&mock_async_resolver);
+ mock_async_resolver->SignalDone(mock_async_resolver);
EXPECT_EQ_WAIT(LOCAL_PORT_TYPE,
ep2_ch1()->selected_connection()->remote_candidate().type(),
kMediumTimeout);
@@ -5255,10 +5260,14 @@ TEST_F(P2PTransportChannelTest,
class MockMdnsResponder : public webrtc::MdnsResponderInterface {
public:
- MOCK_METHOD2(CreateNameForAddress,
- void(const rtc::IPAddress&, NameCreatedCallback));
- MOCK_METHOD2(RemoveNameForAddress,
- void(const rtc::IPAddress&, NameRemovedCallback));
+ MOCK_METHOD(void,
+ CreateNameForAddress,
+ (const rtc::IPAddress&, NameCreatedCallback),
+ (override));
+ MOCK_METHOD(void,
+ RemoveNameForAddress,
+ (const rtc::IPAddress&, NameRemovedCallback),
+ (override));
};
TEST_F(P2PTransportChannelTest,
@@ -5539,6 +5548,76 @@ TEST_F(P2PTransportChannelTest,
DestroyChannels();
}
+// Verify that things break unless
+// - both parties use the surface_ice_candidates_on_ice_transport_type_changed
+// - both parties loosen candidate filter at the same time (approx.).
+//
+// i.e surface_ice_candidates_on_ice_transport_type_changed requires
+// coordination outside of webrtc to function properly.
+TEST_F(P2PTransportChannelTest, SurfaceRequiresCoordination) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-IceFieldTrials/skip_relay_to_non_relay_connections:true/");
+ rtc::ScopedFakeClock clock;
+
+ ConfigureEndpoints(
+ OPEN, OPEN,
+ kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET,
+ kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET);
+ auto* ep1 = GetEndpoint(0);
+ auto* ep2 = GetEndpoint(1);
+ ep1->allocator_->SetCandidateFilter(CF_RELAY);
+ ep2->allocator_->SetCandidateFilter(CF_ALL);
+ // Enable continual gathering and also resurfacing gathered candidates upon
+ // the candidate filter changed in the ICE configuration.
+ IceConfig ice_config = CreateIceConfig(1000, GATHER_CONTINUALLY);
+ ice_config.surface_ice_candidates_on_ice_transport_type_changed = true;
+ // Pause candidates gathering so we can gather all types of candidates. See
+ // P2PTransportChannel::OnConnectionStateChange, where we would stop the
+ // gathering when we have a strongly connected candidate pair.
+ PauseCandidates(0);
+ PauseCandidates(1);
+ CreateChannels(ice_config, ice_config);
+
+ // On the caller we only have relay,
+ // on the callee we have host, srflx and relay.
+ EXPECT_TRUE_SIMULATED_WAIT(ep1->saved_candidates_.size() == 1u,
+ kDefaultTimeout, clock);
+ EXPECT_TRUE_SIMULATED_WAIT(ep2->saved_candidates_.size() == 3u,
+ kDefaultTimeout, clock);
+
+ ResumeCandidates(0);
+ ResumeCandidates(1);
+ ASSERT_TRUE_SIMULATED_WAIT(
+ ep1_ch1()->selected_connection() != nullptr &&
+ RELAY_PORT_TYPE ==
+ ep1_ch1()->selected_connection()->local_candidate().type() &&
+ ep2_ch1()->selected_connection() != nullptr &&
+ RELAY_PORT_TYPE ==
+ ep1_ch1()->selected_connection()->remote_candidate().type(),
+ kDefaultTimeout, clock);
+ ASSERT_TRUE_SIMULATED_WAIT(ep2_ch1()->selected_connection() != nullptr,
+ kDefaultTimeout, clock);
+
+ // Wait until the callee discards it's candidates
+ // since they don't manage to connect.
+ SIMULATED_WAIT(false, 300000, clock);
+
+ // And then loosen caller candidate filter.
+ ep1->allocator_->SetCandidateFilter(CF_ALL);
+
+ SIMULATED_WAIT(false, kDefaultTimeout, clock);
+
+ // No p2p connection will be made, it will remain on relay.
+ EXPECT_TRUE(ep1_ch1()->selected_connection() != nullptr &&
+ RELAY_PORT_TYPE ==
+ ep1_ch1()->selected_connection()->local_candidate().type() &&
+ ep2_ch1()->selected_connection() != nullptr &&
+ RELAY_PORT_TYPE ==
+ ep1_ch1()->selected_connection()->remote_candidate().type());
+
+ DestroyChannels();
+}
+
TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampening0) {
webrtc::test::ScopedFieldTrials field_trials(
"WebRTC-IceFieldTrials/initial_select_dampening:0/");
@@ -5651,4 +5730,159 @@ TEST(P2PTransportChannel, InjectIceController) {
/* event_log = */ nullptr, &factory);
}
+class ForgetLearnedStateController : public cricket::BasicIceController {
+ public:
+ explicit ForgetLearnedStateController(
+ const cricket::IceControllerFactoryArgs& args)
+ : cricket::BasicIceController(args) {}
+
+ SwitchResult SortAndSwitchConnection(IceControllerEvent reason) override {
+ auto result = cricket::BasicIceController::SortAndSwitchConnection(reason);
+ if (forget_connnection_) {
+ result.connections_to_forget_state_on.push_back(forget_connnection_);
+ forget_connnection_ = nullptr;
+ }
+ result.recheck_event =
+ IceControllerEvent(IceControllerEvent::ICE_CONTROLLER_RECHECK);
+ result.recheck_event->recheck_delay_ms = 100;
+ return result;
+ }
+
+ void ForgetThisConnectionNextTimeSortAndSwitchConnectionIsCalled(
+ Connection* con) {
+ forget_connnection_ = con;
+ }
+
+ private:
+ Connection* forget_connnection_ = nullptr;
+};
+
+class ForgetLearnedStateControllerFactory
+ : public cricket::IceControllerFactoryInterface {
+ public:
+ std::unique_ptr<cricket::IceControllerInterface> Create(
+ const cricket::IceControllerFactoryArgs& args) override {
+ auto controller = std::make_unique<ForgetLearnedStateController>(args);
+ // Keep a pointer to allow modifying calls.
+ // Must not be used after the p2ptransportchannel has been destructed.
+ controller_ = controller.get();
+ return controller;
+ }
+ virtual ~ForgetLearnedStateControllerFactory() = default;
+
+ ForgetLearnedStateController* controller_;
+};
+
+TEST_F(P2PTransportChannelPingTest, TestForgetLearnedState) {
+ ForgetLearnedStateControllerFactory factory;
+ FakePortAllocator pa(rtc::Thread::Current(), nullptr);
+ P2PTransportChannel ch("ping sufficiently", 1, &pa, nullptr, nullptr,
+ &factory);
+ PrepareChannel(&ch);
+ ch.MaybeStartGathering();
+ ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1));
+ ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2));
+
+ Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1);
+ Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2);
+ ASSERT_TRUE(conn1 != nullptr);
+ ASSERT_TRUE(conn2 != nullptr);
+
+ // Wait for conn1 to be selected.
+ conn1->ReceivedPingResponse(LOW_RTT, "id");
+ EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kMediumTimeout);
+
+ conn2->ReceivedPingResponse(LOW_RTT, "id");
+ EXPECT_TRUE(conn2->writable());
+
+ // Now let the ice controller signal to P2PTransportChannel that it
+ // should Forget conn2.
+ factory.controller_
+ ->ForgetThisConnectionNextTimeSortAndSwitchConnectionIsCalled(conn2);
+
+ // We don't have a mock Connection, so verify this by checking that it
+ // is no longer writable.
+ EXPECT_EQ_WAIT(false, conn2->writable(), kMediumTimeout);
+}
+
+TEST_F(P2PTransportChannelTest, DisableDnsLookupsWithTransportPolicyRelay) {
+ ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags,
+ kDefaultPortAllocatorFlags);
+ auto* ep1 = GetEndpoint(0);
+ ep1->allocator_->SetCandidateFilter(CF_RELAY);
+
+ rtc::MockAsyncResolver mock_async_resolver;
+ webrtc::MockAsyncResolverFactory mock_async_resolver_factory;
+ ON_CALL(mock_async_resolver_factory, Create())
+ .WillByDefault(Return(&mock_async_resolver));
+ ep1->async_resolver_factory_ = &mock_async_resolver_factory;
+
+ bool lookup_started = false;
+ ON_CALL(mock_async_resolver, Start(_))
+ .WillByDefault(Assign(&lookup_started, true));
+
+ CreateChannels();
+
+ ep1_ch1()->AddRemoteCandidate(
+ CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100));
+
+ EXPECT_FALSE(lookup_started);
+
+ DestroyChannels();
+}
+
+TEST_F(P2PTransportChannelTest, DisableDnsLookupsWithTransportPolicyNone) {
+ ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags,
+ kDefaultPortAllocatorFlags);
+ auto* ep1 = GetEndpoint(0);
+ ep1->allocator_->SetCandidateFilter(CF_NONE);
+
+ rtc::MockAsyncResolver mock_async_resolver;
+ webrtc::MockAsyncResolverFactory mock_async_resolver_factory;
+ ON_CALL(mock_async_resolver_factory, Create())
+ .WillByDefault(Return(&mock_async_resolver));
+ ep1->async_resolver_factory_ = &mock_async_resolver_factory;
+
+ bool lookup_started = false;
+ ON_CALL(mock_async_resolver, Start(_))
+ .WillByDefault(Assign(&lookup_started, true));
+
+ CreateChannels();
+
+ ep1_ch1()->AddRemoteCandidate(
+ CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100));
+
+ EXPECT_FALSE(lookup_started);
+
+ DestroyChannels();
+}
+
+TEST_F(P2PTransportChannelTest, EnableDnsLookupsWithTransportPolicyNoHost) {
+ ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags,
+ kDefaultPortAllocatorFlags);
+ auto* ep1 = GetEndpoint(0);
+ ep1->allocator_->SetCandidateFilter(CF_ALL & ~CF_HOST);
+
+ rtc::MockAsyncResolver mock_async_resolver;
+ webrtc::MockAsyncResolverFactory mock_async_resolver_factory;
+ EXPECT_CALL(mock_async_resolver_factory, Create())
+ .WillOnce(Return(&mock_async_resolver));
+ EXPECT_CALL(mock_async_resolver, Destroy(_));
+
+ ep1->async_resolver_factory_ = &mock_async_resolver_factory;
+
+ bool lookup_started = false;
+ EXPECT_CALL(mock_async_resolver, Start(_))
+ .WillOnce(Assign(&lookup_started, true));
+
+ CreateChannels();
+
+ ep1_ch1()->AddRemoteCandidate(
+ CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100));
+
+ EXPECT_TRUE(lookup_started);
+
+ DestroyChannels();
+}
+
} // namespace cricket
diff --git a/p2p/base/port_unittest.cc b/p2p/base/port_unittest.cc
index a7ac1fafdb..7703a9c281 100644
--- a/p2p/base/port_unittest.cc
+++ b/p2p/base/port_unittest.cc
@@ -64,6 +64,7 @@
#include "rtc_base/thread.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/virtual_socket_server.h"
+#include "test/field_trial.h"
#include "test/gtest.h"
using rtc::AsyncPacketSocket;
@@ -1298,6 +1299,77 @@ TEST_F(PortTest, TestConnectionDead) {
EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout);
}
+TEST_F(PortTest, TestConnectionDeadWithDeadConnectionTimeout) {
+ TestChannel ch1(CreateUdpPort(kLocalAddr1));
+ TestChannel ch2(CreateUdpPort(kLocalAddr2));
+ // Acquire address.
+ ch1.Start();
+ ch2.Start();
+ ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout);
+ ASSERT_EQ_WAIT(1, ch2.complete_count(), kDefaultTimeout);
+
+ // Note: set field trials manually since they are parsed by
+ // P2PTransportChannel but P2PTransportChannel is not used in this test.
+ IceFieldTrials field_trials;
+ field_trials.dead_connection_timeout_ms = 90000;
+
+ // Create a connection again and receive a ping.
+ ch1.CreateConnection(GetCandidate(ch2.port()));
+ auto conn = ch1.conn();
+ conn->SetIceFieldTrials(&field_trials);
+
+ ASSERT_NE(conn, nullptr);
+ int64_t before_last_receiving = rtc::TimeMillis();
+ conn->ReceivedPing();
+ int64_t after_last_receiving = rtc::TimeMillis();
+ // The connection will be dead after 90s
+ conn->UpdateState(before_last_receiving + 90000 - 1);
+ rtc::Thread::Current()->ProcessMessages(100);
+ EXPECT_TRUE(ch1.conn() != nullptr);
+ conn->UpdateState(after_last_receiving + 90000 + 1);
+ EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout);
+}
+
+TEST_F(PortTest, TestConnectionDeadOutstandingPing) {
+ auto port1 = CreateUdpPort(kLocalAddr1);
+ port1->SetIceRole(cricket::ICEROLE_CONTROLLING);
+ port1->SetIceTiebreaker(kTiebreaker1);
+ auto port2 = CreateUdpPort(kLocalAddr2);
+ port2->SetIceRole(cricket::ICEROLE_CONTROLLED);
+ port2->SetIceTiebreaker(kTiebreaker2);
+
+ TestChannel ch1(std::move(port1));
+ TestChannel ch2(std::move(port2));
+ // Acquire address.
+ ch1.Start();
+ ch2.Start();
+ ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout);
+ ASSERT_EQ_WAIT(1, ch2.complete_count(), kDefaultTimeout);
+
+ // Note: set field trials manually since they are parsed by
+ // P2PTransportChannel but P2PTransportChannel is not used in this test.
+ IceFieldTrials field_trials;
+ field_trials.dead_connection_timeout_ms = 360000;
+
+ // Create a connection again and receive a ping and then send
+ // a ping and keep it outstanding.
+ ch1.CreateConnection(GetCandidate(ch2.port()));
+ auto conn = ch1.conn();
+ conn->SetIceFieldTrials(&field_trials);
+
+ ASSERT_NE(conn, nullptr);
+ conn->ReceivedPing();
+ int64_t send_ping_timestamp = rtc::TimeMillis();
+ conn->Ping(send_ping_timestamp);
+
+ // The connection will be dead 30s after the ping was sent.
+ conn->UpdateState(send_ping_timestamp + DEAD_CONNECTION_RECEIVE_TIMEOUT - 1);
+ rtc::Thread::Current()->ProcessMessages(100);
+ EXPECT_TRUE(ch1.conn() != nullptr);
+ conn->UpdateState(send_ping_timestamp + DEAD_CONNECTION_RECEIVE_TIMEOUT + 1);
+ EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout);
+}
+
// This test case verifies standard ICE features in STUN messages. Currently it
// verifies Message Integrity attribute in STUN messages and username in STUN
// binding request will have colon (":") between remote and local username.
diff --git a/p2p/base/stun_port_unittest.cc b/p2p/base/stun_port_unittest.cc
index dfc72362ce..2804ac03af 100644
--- a/p2p/base/stun_port_unittest.cc
+++ b/p2p/base/stun_port_unittest.cc
@@ -412,24 +412,29 @@ class MockAsyncPacketSocket : public rtc::AsyncPacketSocket {
public:
~MockAsyncPacketSocket() = default;
- MOCK_CONST_METHOD0(GetLocalAddress, SocketAddress());
- MOCK_CONST_METHOD0(GetRemoteAddress, SocketAddress());
- MOCK_METHOD3(Send,
- int(const void* pv,
- size_t cb,
- const rtc::PacketOptions& options));
-
- MOCK_METHOD4(SendTo,
- int(const void* pv,
- size_t cb,
- const SocketAddress& addr,
- const rtc::PacketOptions& options));
- MOCK_METHOD0(Close, int());
- MOCK_CONST_METHOD0(GetState, State());
- MOCK_METHOD2(GetOption, int(rtc::Socket::Option opt, int* value));
- MOCK_METHOD2(SetOption, int(rtc::Socket::Option opt, int value));
- MOCK_CONST_METHOD0(GetError, int());
- MOCK_METHOD1(SetError, void(int error));
+ MOCK_METHOD(SocketAddress, GetLocalAddress, (), (const, override));
+ MOCK_METHOD(SocketAddress, GetRemoteAddress, (), (const, override));
+ MOCK_METHOD(int,
+ Send,
+ (const void* pv, size_t cb, const rtc::PacketOptions& options),
+ (override));
+
+ MOCK_METHOD(int,
+ SendTo,
+ (const void* pv,
+ size_t cb,
+ const SocketAddress& addr,
+ const rtc::PacketOptions& options),
+ (override));
+ MOCK_METHOD(int, Close, (), (override));
+ MOCK_METHOD(State, GetState, (), (const, override));
+ MOCK_METHOD(int,
+ GetOption,
+ (rtc::Socket::Option opt, int* value),
+ (override));
+ MOCK_METHOD(int, SetOption, (rtc::Socket::Option opt, int value), (override));
+ MOCK_METHOD(int, GetError, (), (const, override));
+ MOCK_METHOD(void, SetError, (int error), (override));
};
// Test that outbound packets inherit the dscp value assigned to the socket.
diff --git a/p2p/base/transport_description.cc b/p2p/base/transport_description.cc
index 729b4ae8c3..96fb9597e0 100644
--- a/p2p/base/transport_description.cc
+++ b/p2p/base/transport_description.cc
@@ -172,8 +172,7 @@ TransportDescription::TransportDescription(const TransportDescription& from)
ice_pwd(from.ice_pwd),
ice_mode(from.ice_mode),
connection_role(from.connection_role),
- identity_fingerprint(CopyFingerprint(from.identity_fingerprint.get())),
- opaque_parameters(from.opaque_parameters) {}
+ identity_fingerprint(CopyFingerprint(from.identity_fingerprint.get())) {}
TransportDescription::~TransportDescription() = default;
@@ -190,7 +189,6 @@ TransportDescription& TransportDescription::operator=(
connection_role = from.connection_role;
identity_fingerprint.reset(CopyFingerprint(from.identity_fingerprint.get()));
- opaque_parameters = from.opaque_parameters;
return *this;
}
diff --git a/p2p/base/transport_description.h b/p2p/base/transport_description.h
index 1a458c9571..32fdb5c9b3 100644
--- a/p2p/base/transport_description.h
+++ b/p2p/base/transport_description.h
@@ -100,28 +100,6 @@ constexpr auto* ICE_OPTION_RENOMINATION = "renomination";
bool StringToConnectionRole(const std::string& role_str, ConnectionRole* role);
bool ConnectionRoleToString(const ConnectionRole& role, std::string* role_str);
-// Parameters for an opaque transport protocol which may be plugged into WebRTC.
-struct OpaqueTransportParameters {
- // Protocol used by this opaque transport. Two endpoints that support the
- // same protocol are expected to be able to understand the contents of each
- // others' |parameters| fields. If those parameters are compatible, the
- // endpoints are expected to use this transport protocol.
- std::string protocol;
-
- // Opaque parameters for this transport. These parameters are serialized in a
- // manner determined by the |protocol|. They can be parsed and understood by
- // the plugin that supports |protocol|.
- std::string parameters;
-
- bool operator==(const OpaqueTransportParameters& other) const {
- return protocol == other.protocol && parameters == other.parameters;
- }
-
- bool operator!=(const OpaqueTransportParameters& other) const {
- return !(*this == other);
- }
-};
-
struct TransportDescription {
TransportDescription();
TransportDescription(const std::vector<std::string>& transport_options,
@@ -168,7 +146,6 @@ struct TransportDescription {
ConnectionRole connection_role;
std::unique_ptr<rtc::SSLFingerprint> identity_fingerprint;
- absl::optional<OpaqueTransportParameters> opaque_parameters;
};
} // namespace cricket
diff --git a/p2p/base/transport_description_factory.cc b/p2p/base/transport_description_factory.cc
index 17152d1a04..5cce2ac09d 100644
--- a/p2p/base/transport_description_factory.cc
+++ b/p2p/base/transport_description_factory.cc
@@ -55,8 +55,6 @@ std::unique_ptr<TransportDescription> TransportDescriptionFactory::CreateOffer(
}
}
- desc->opaque_parameters = options.opaque_parameters;
-
return desc;
}
@@ -110,13 +108,6 @@ std::unique_ptr<TransportDescription> TransportDescriptionFactory::CreateAnswer(
return NULL;
}
- // Answers may only attach opaque parameters if the offer contained them as
- // well. The answer's parameters may differ, and it's up to the opaque
- // transport implementation to decide if the difference is acceptable.
- if (offer->opaque_parameters && options.opaque_parameters) {
- desc->opaque_parameters = options.opaque_parameters;
- }
-
return desc;
}
diff --git a/p2p/base/transport_description_factory.h b/p2p/base/transport_description_factory.h
index d0813dc541..c1656a0fac 100644
--- a/p2p/base/transport_description_factory.h
+++ b/p2p/base/transport_description_factory.h
@@ -29,9 +29,6 @@ struct TransportOptions {
// If true, ICE renomination is supported and will be used if it is also
// supported by the remote side.
bool enable_ice_renomination = false;
-
- // Opaque parameters for plug-in transports.
- absl::optional<OpaqueTransportParameters> opaque_parameters;
};
// Creates transport descriptions according to the supplied configuration.
diff --git a/p2p/base/transport_description_factory_unittest.cc b/p2p/base/transport_description_factory_unittest.cc
index 8359ffc1c9..f7675ae643 100644
--- a/p2p/base/transport_description_factory_unittest.cc
+++ b/p2p/base/transport_description_factory_unittest.cc
@@ -26,7 +26,6 @@
#include "test/gmock.h"
#include "test/gtest.h"
-using cricket::OpaqueTransportParameters;
using cricket::TransportDescription;
using cricket::TransportDescriptionFactory;
using cricket::TransportOptions;
@@ -210,73 +209,6 @@ TEST_F(TransportDescriptionFactoryTest, TestOfferDtlsReofferDtls) {
CheckDesc(desc.get(), "", old_desc->ice_ufrag, old_desc->ice_pwd, digest_alg);
}
-TEST_F(TransportDescriptionFactoryTest, TestOfferOpaqueTransportParameters) {
- OpaqueTransportParameters params;
- params.protocol = "fake";
- params.parameters = "foobar";
-
- TransportOptions options;
- options.opaque_parameters = params;
-
- std::unique_ptr<TransportDescription> desc =
- f1_.CreateOffer(options, NULL, &ice_credentials_);
-
- CheckDesc(desc.get(), "", "", "", "");
- EXPECT_EQ(desc->opaque_parameters, params);
-}
-
-TEST_F(TransportDescriptionFactoryTest, TestAnswerOpaqueTransportParameters) {
- OpaqueTransportParameters params;
- params.protocol = "fake";
- params.parameters = "foobar";
-
- TransportOptions options;
- options.opaque_parameters = params;
-
- std::unique_ptr<TransportDescription> offer =
- f1_.CreateOffer(options, NULL, &ice_credentials_);
- std::unique_ptr<TransportDescription> answer =
- f2_.CreateAnswer(offer.get(), options, true, NULL, &ice_credentials_);
-
- CheckDesc(answer.get(), "", "", "", "");
- EXPECT_EQ(answer->opaque_parameters, params);
-}
-
-TEST_F(TransportDescriptionFactoryTest, TestAnswerNoOpaqueTransportParameters) {
- OpaqueTransportParameters params;
- params.protocol = "fake";
- params.parameters = "foobar";
-
- TransportOptions options;
- options.opaque_parameters = params;
-
- std::unique_ptr<TransportDescription> offer =
- f1_.CreateOffer(options, NULL, &ice_credentials_);
- std::unique_ptr<TransportDescription> answer = f2_.CreateAnswer(
- offer.get(), TransportOptions(), true, NULL, &ice_credentials_);
-
- CheckDesc(answer.get(), "", "", "", "");
- EXPECT_EQ(answer->opaque_parameters, absl::nullopt);
-}
-
-TEST_F(TransportDescriptionFactoryTest,
- TestAnswerNoOpaqueTransportParametersInOffer) {
- std::unique_ptr<TransportDescription> offer =
- f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_);
-
- OpaqueTransportParameters params;
- params.protocol = "fake";
- params.parameters = "foobar";
-
- TransportOptions options;
- options.opaque_parameters = params;
- std::unique_ptr<TransportDescription> answer =
- f2_.CreateAnswer(offer.get(), options, true, NULL, &ice_credentials_);
-
- CheckDesc(answer.get(), "", "", "", "");
- EXPECT_EQ(answer->opaque_parameters, absl::nullopt);
-}
-
TEST_F(TransportDescriptionFactoryTest, TestAnswerDefault) {
std::unique_ptr<TransportDescription> offer =
f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_);
diff --git a/p2p/base/turn_port.cc b/p2p/base/turn_port.cc
index 2e8024dcb6..4d39f207b4 100644
--- a/p2p/base/turn_port.cc
+++ b/p2p/base/turn_port.cc
@@ -367,7 +367,7 @@ void TurnPort::PrepareAddress() {
<< server_address_.address.ToSensitiveString();
if (!CreateTurnClientSocket()) {
RTC_LOG(LS_ERROR) << "Failed to create TURN client socket";
- OnAllocateError(STUN_ERROR_GLOBAL_FAILURE,
+ OnAllocateError(SERVER_NOT_REACHABLE_ERROR,
"Failed to create TURN client socket.");
return;
}
@@ -883,12 +883,17 @@ void TurnPort::OnAllocateError(int error_code, const std::string& reason) {
// port initialization. This way it will not be blocking other port
// creation.
thread()->Post(RTC_FROM_HERE, this, MSG_ALLOCATE_ERROR);
+ std::string address = GetLocalAddress().HostAsSensitiveURIString();
+ int port = GetLocalAddress().port();
+ if (server_address_.proto == PROTO_TCP &&
+ server_address_.address.IsPrivateIP()) {
+ address.clear();
+ port = 0;
+ }
SignalCandidateError(
- this,
- IceCandidateErrorEvent(GetLocalAddress().HostAsSensitiveURIString(),
- GetLocalAddress().port(),
- ReconstructedServerUrl(true /* use_hostname */),
- error_code, reason));
+ this, IceCandidateErrorEvent(
+ address, port, ReconstructedServerUrl(true /* use_hostname */),
+ error_code, reason));
}
void TurnPort::OnRefreshError() {
diff --git a/p2p/client/basic_port_allocator.cc b/p2p/client/basic_port_allocator.cc
index 8aeef9361d..bb640d9498 100644
--- a/p2p/client/basic_port_allocator.cc
+++ b/p2p/client/basic_port_allocator.cc
@@ -979,8 +979,11 @@ void BasicPortAllocatorSession::OnCandidateError(
const IceCandidateErrorEvent& event) {
RTC_DCHECK_RUN_ON(network_thread_);
RTC_DCHECK(FindPort(port));
-
- SignalCandidateError(this, event);
+ if (event.address.empty()) {
+ candidate_error_events_.push_back(event);
+ } else {
+ SignalCandidateError(this, event);
+ }
}
Port* BasicPortAllocatorSession::GetBestTurnPortForNetwork(
@@ -1140,6 +1143,10 @@ void BasicPortAllocatorSession::MaybeSignalCandidatesAllocationDone() {
RTC_LOG(LS_INFO) << "All candidates gathered for " << content_name()
<< ":" << component() << ":" << generation();
}
+ for (const auto& event : candidate_error_events_) {
+ SignalCandidateError(this, event);
+ }
+ candidate_error_events_.clear();
SignalCandidatesAllocationDone(this);
}
}
diff --git a/p2p/client/basic_port_allocator.h b/p2p/client/basic_port_allocator.h
index b9f2b2ebd2..b27016a1dc 100644
--- a/p2p/client/basic_port_allocator.h
+++ b/p2p/client/basic_port_allocator.h
@@ -269,6 +269,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession,
std::vector<PortConfiguration*> configs_;
std::vector<AllocationSequence*> sequences_;
std::vector<PortData> ports_;
+ std::vector<IceCandidateErrorEvent> candidate_error_events_;
uint32_t candidate_filter_ = CF_ALL;
// Policy on how to prune turn ports, taken from the port allocator.
webrtc::PortPrunePolicy turn_port_prune_policy_;
diff --git a/pc/BUILD.gn b/pc/BUILD.gn
index a48a0469d9..6b07bbe74e 100644
--- a/pc/BUILD.gn
+++ b/pc/BUILD.gn
@@ -32,12 +32,8 @@ rtc_library("rtc_pc_base") {
"channel_interface.h",
"channel_manager.cc",
"channel_manager.h",
- "composite_data_channel_transport.cc",
- "composite_data_channel_transport.h",
"composite_rtp_transport.cc",
"composite_rtp_transport.h",
- "datagram_rtp_transport.cc",
- "datagram_rtp_transport.h",
"dtls_srtp_transport.cc",
"dtls_srtp_transport.h",
"dtls_transport.cc",
@@ -88,6 +84,7 @@ rtc_library("rtc_pc_base") {
"../api:function_view",
"../api:ice_transport_factory",
"../api:libjingle_peerconnection_api",
+ "../api:priority",
"../api:rtc_error",
"../api:rtp_headers",
"../api:rtp_parameters",
@@ -96,7 +93,6 @@ rtc_library("rtc_pc_base") {
"../api/crypto:options",
"../api/rtc_event_log",
"../api/transport:datagram_transport_interface",
- "../api/transport/media:media_transport_interface",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_frame",
"../api/video:video_rtp_headers",
@@ -118,12 +114,15 @@ rtc_library("rtc_pc_base") {
"../rtc_base:deprecation",
"../rtc_base:rtc_task_queue",
"../rtc_base:stringutils",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/system:file_wrapper",
"../rtc_base/system:rtc_export",
"../rtc_base/third_party/base64",
"../rtc_base/third_party/sigslot",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/memory",
@@ -162,10 +161,10 @@ rtc_library("peerconnection") {
"audio_rtp_receiver.h",
"audio_track.cc",
"audio_track.h",
- "data_channel.cc",
- "data_channel.h",
"data_channel_controller.cc",
"data_channel_controller.h",
+ "data_channel_utils.cc",
+ "data_channel_utils.h",
"dtmf_sender.cc",
"dtmf_sender.h",
"ice_server_parsing.cc",
@@ -194,6 +193,8 @@ rtc_library("peerconnection") {
"rtc_stats_collector.h",
"rtc_stats_traversal.cc",
"rtc_stats_traversal.h",
+ "rtp_data_channel.cc",
+ "rtp_data_channel.h",
"rtp_parameters_conversion.cc",
"rtp_parameters_conversion.h",
"rtp_receiver.cc",
@@ -202,6 +203,8 @@ rtc_library("peerconnection") {
"rtp_sender.h",
"rtp_transceiver.cc",
"rtp_transceiver.h",
+ "sctp_data_channel.cc",
+ "sctp_data_channel.h",
"sdp_serializer.cc",
"sdp_serializer.h",
"sdp_utils.cc",
@@ -236,6 +239,7 @@ rtc_library("peerconnection") {
"../api:libjingle_peerconnection_api",
"../api:media_stream_interface",
"../api:network_state_predictor_api",
+ "../api:priority",
"../api:rtc_error",
"../api:rtc_event_log_output_file",
"../api:rtc_stats_api",
@@ -246,7 +250,6 @@ rtc_library("peerconnection") {
"../api/task_queue",
"../api/transport:datagram_transport_interface",
"../api/transport:field_trial_based_config",
- "../api/transport/media:media_transport_interface",
"../api/units:data_rate",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_frame",
@@ -266,6 +269,7 @@ rtc_library("peerconnection") {
"../rtc_base:safe_minmax",
"../rtc_base:weak_ptr",
"../rtc_base/experiments:field_trial_parser",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/system:file_wrapper",
"../rtc_base/system:rtc_export",
"../rtc_base/third_party/base64",
@@ -274,6 +278,8 @@ rtc_library("peerconnection") {
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -329,14 +335,11 @@ if (rtc_include_tests) {
":rtc_pc_base",
"../api:array_view",
"../api:audio_options_api",
- "../api:fake_media_transport",
"../api:ice_transport_factory",
"../api:libjingle_peerconnection_api",
- "../api:loopback_media_transport",
"../api:rtc_error",
"../api:rtp_headers",
"../api:rtp_parameters",
- "../api/transport/media:media_transport_interface",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video/test:mock_recordable_encoded_frame",
"../call:rtp_interfaces",
@@ -404,8 +407,8 @@ if (rtc_include_tests) {
"../system_wrappers",
"../test:perf_test",
"../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("peerconnection_wrapper") {
@@ -497,12 +500,14 @@ if (rtc_include_tests) {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_task_queue",
"../rtc_base:task_queue_for_test",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/synchronization:sequence_checker",
"../rtc_base/task_utils:repeating_task",
"../rtc_base/third_party/sigslot",
"../test:test_support",
"../test:video_test_common",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_test("peerconnection_unittests") {
@@ -515,6 +520,7 @@ if (rtc_include_tests) {
"jsep_session_description_unittest.cc",
"local_audio_source_unittest.cc",
"media_stream_unittest.cc",
+ "peer_connection_adaptation_integrationtest.cc",
"peer_connection_bundle_unittest.cc",
"peer_connection_crypto_unittest.cc",
"peer_connection_data_channel_unittest.cc",
@@ -566,7 +572,6 @@ if (rtc_include_tests) {
"../api:function_view",
"../api:libjingle_logging_api",
"../api:libjingle_peerconnection_api",
- "../api:loopback_media_transport",
"../api:media_stream_interface",
"../api:mock_rtp",
"../api:rtc_error",
@@ -578,10 +583,10 @@ if (rtc_include_tests) {
"../api/rtc_event_log",
"../api/rtc_event_log:rtc_event_log_factory",
"../api/task_queue:default_task_queue_factory",
- "../api/transport/media:media_transport_interface",
"../api/transport/rtp:rtp_source",
"../api/units:time_delta",
"../api/video:builtin_video_bitrate_allocator_factory",
+ "../call/adaptation:resource_adaptation_test_utilities",
"../logging:fake_rtc_event_log",
"../media:rtc_media_config",
"../media:rtc_media_engine_defaults",
@@ -595,6 +600,7 @@ if (rtc_include_tests) {
"../rtc_base:gunit_helpers",
"../rtc_base:rtc_base_tests_utils",
"../rtc_base:rtc_json",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/third_party/base64",
"../rtc_base/third_party/sigslot",
"../system_wrappers:metrics",
@@ -605,6 +611,7 @@ if (rtc_include_tests) {
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
]
if (is_android) {
deps += [ ":android_black_magic" ]
@@ -646,7 +653,6 @@ if (rtc_include_tests) {
"../test:audio_codec_mocks",
"../test:test_main",
"../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
if (is_android) {
diff --git a/pc/channel.cc b/pc/channel.cc
index f83f5cdd9a..e7f62c6aa6 100644
--- a/pc/channel.cc
+++ b/pc/channel.cc
@@ -16,7 +16,6 @@
#include "absl/algorithm/container.h"
#include "absl/memory/memory.h"
#include "api/call/audio_sink.h"
-#include "api/transport/media/media_transport_config.h"
#include "media/base/media_constants.h"
#include "media/base/rtp_utils.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
@@ -142,7 +141,7 @@ BaseChannel::BaseChannel(rtc::Thread* worker_thread,
RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(ssrc_generator_);
demuxer_criteria_.mid = content_name;
- RTC_LOG(LS_INFO) << "Created channel for " << content_name;
+ RTC_LOG(LS_INFO) << "Created channel: " << ToString();
}
BaseChannel::~BaseChannel() {
@@ -156,12 +155,23 @@ BaseChannel::~BaseChannel() {
// the media channel may try to send on the dead transport channel. NULLing
// is not an effective strategy since the sends will come on another thread.
media_channel_.reset();
- RTC_LOG(LS_INFO) << "Destroyed channel: " << content_name_;
+ RTC_LOG(LS_INFO) << "Destroyed channel: " << ToString();
+}
+
+std::string BaseChannel::ToString() const {
+ rtc::StringBuilder sb;
+ sb << "{mid: " << content_name_;
+ if (media_channel_) {
+ sb << ", media_type: " << MediaTypeToString(media_channel_->media_type());
+ }
+ sb << "}";
+ return sb.Release();
}
bool BaseChannel::ConnectToRtpTransport() {
RTC_DCHECK(rtp_transport_);
if (!RegisterRtpDemuxerSink()) {
+ RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString();
return false;
}
rtp_transport_->SignalReadyToSend.connect(
@@ -184,24 +194,20 @@ void BaseChannel::DisconnectFromRtpTransport() {
rtp_transport_->SignalSentPacket.disconnect(this);
}
-void BaseChannel::Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) {
+void BaseChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) {
RTC_DCHECK_RUN_ON(worker_thread_);
- media_transport_config_ = media_transport_config;
network_thread_->Invoke<void>(
RTC_FROM_HERE, [this, rtp_transport] { SetRtpTransport(rtp_transport); });
// Both RTP and RTCP channels should be set, we can call SetInterface on
// the media channel and it can set network options.
- media_channel_->SetInterface(this, media_transport_config);
+ media_channel_->SetInterface(this);
}
void BaseChannel::Deinit() {
RTC_DCHECK(worker_thread_->IsCurrent());
- media_channel_->SetInterface(/*iface=*/nullptr,
- webrtc::MediaTransportConfig());
+ media_channel_->SetInterface(/*iface=*/nullptr);
// Packets arrive on the network thread, processing packets calls virtual
// functions, so need to stop this process in Deinit that is called in
// derived classes destructor.
@@ -237,7 +243,8 @@ bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) {
transport_name_ = rtp_transport_->transport_name();
if (!ConnectToRtpTransport()) {
- RTC_LOG(LS_ERROR) << "Failed to connect to the new RtpTransport.";
+ RTC_LOG(LS_ERROR) << "Failed to connect to the new RtpTransport for "
+ << ToString() << ".";
return false;
}
OnTransportReadyToSend(rtp_transport_->IsReadyToSend());
@@ -349,7 +356,7 @@ void BaseChannel::OnWritableState(bool writable) {
void BaseChannel::OnNetworkRouteChanged(
absl::optional<rtc::NetworkRoute> network_route) {
- RTC_LOG(LS_INFO) << "Network route was changed.";
+ RTC_LOG(LS_INFO) << "Network route for " << ToString() << " was changed.";
RTC_DCHECK(network_thread_->IsCurrent());
rtc::NetworkRoute new_route;
@@ -404,7 +411,7 @@ bool BaseChannel::SendPacket(bool rtcp,
// Protect ourselves against crazy data.
if (!IsValidRtpPacketSize(packet_type, packet->size())) {
- RTC_LOG(LS_ERROR) << "Dropping outgoing " << content_name_ << " "
+ RTC_LOG(LS_ERROR) << "Dropping outgoing " << ToString() << " "
<< RtpPacketTypeToString(packet_type)
<< " packet: wrong size=" << packet->size();
return false;
@@ -420,16 +427,16 @@ bool BaseChannel::SendPacket(bool rtcp,
}
// However, there shouldn't be any RTP packets sent before SRTP is set up
// (and SetSend(true) is called).
- RTC_LOG(LS_ERROR)
- << "Can't send outgoing RTP packet when SRTP is inactive"
- " and crypto is required";
+ RTC_LOG(LS_ERROR) << "Can't send outgoing RTP packet for " << ToString()
+ << " when SRTP is inactive and crypto is required";
RTC_NOTREACHED();
return false;
}
std::string packet_type = rtcp ? "RTCP" : "RTP";
RTC_LOG(LS_WARNING) << "Sending an " << packet_type
- << " packet without encryption.";
+ << " packet without encryption for " << ToString()
+ << ".";
}
// Bon voyage.
@@ -463,7 +470,8 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) {
// for us to just eat packets here. This is all sidestepped if RTCP mux
// is used anyway.
RTC_LOG(LS_WARNING) << "Can't process incoming RTP packet when "
- "SRTP is inactive and crypto is required";
+ "SRTP is inactive and crypto is required "
+ << ToString();
return;
}
@@ -504,7 +512,7 @@ void BaseChannel::EnableMedia_w() {
if (enabled_)
return;
- RTC_LOG(LS_INFO) << "Channel enabled";
+ RTC_LOG(LS_INFO) << "Channel enabled: " << ToString();
enabled_ = true;
UpdateMediaSendRecvState_w();
}
@@ -514,7 +522,7 @@ void BaseChannel::DisableMedia_w() {
if (!enabled_)
return;
- RTC_LOG(LS_INFO) << "Channel disabled";
+ RTC_LOG(LS_INFO) << "Channel disabled: " << ToString();
enabled_ = false;
UpdateMediaSendRecvState_w();
}
@@ -534,7 +542,7 @@ void BaseChannel::ChannelWritable_n() {
return;
}
- RTC_LOG(LS_INFO) << "Channel writable (" << content_name_ << ")"
+ RTC_LOG(LS_INFO) << "Channel writable (" << ToString() << ")"
<< (was_ever_writable_ ? "" : " for the first time");
was_ever_writable_ = true;
@@ -547,7 +555,7 @@ void BaseChannel::ChannelNotWritable_n() {
if (!writable_)
return;
- RTC_LOG(LS_INFO) << "Channel not writable (" << content_name_ << ")";
+ RTC_LOG(LS_INFO) << "Channel not writable (" << ToString() << ")";
writable_ = false;
UpdateMediaSendRecvState();
}
@@ -591,7 +599,8 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector<StreamParams>& streams,
if (!media_channel()->RemoveSendStream(old_stream.first_ssrc())) {
rtc::StringBuilder desc;
desc << "Failed to remove send stream with ssrc "
- << old_stream.first_ssrc() << ".";
+ << old_stream.first_ssrc() << " from m-section with mid='"
+ << content_name() << "'.";
SafeSetError(desc.str(), error_desc);
ret = false;
}
@@ -617,7 +626,8 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector<StreamParams>& streams,
if (new_stream.has_ssrcs() && new_stream.has_rids()) {
rtc::StringBuilder desc;
desc << "Failed to add send stream: " << new_stream.first_ssrc()
- << ". Stream has both SSRCs and RIDs.";
+ << " into m-section with mid='" << content_name()
+ << "'. Stream has both SSRCs and RIDs.";
SafeSetError(desc.str(), error_desc);
ret = false;
continue;
@@ -632,10 +642,12 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector<StreamParams>& streams,
}
if (media_channel()->AddSendStream(new_stream)) {
- RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0];
+ RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0]
+ << " into " << ToString();
} else {
rtc::StringBuilder desc;
- desc << "Failed to add send stream ssrc: " << new_stream.first_ssrc();
+ desc << "Failed to add send stream ssrc: " << new_stream.first_ssrc()
+ << " into m-section with mid='" << content_name() << "'";
SafeSetError(desc.str(), error_desc);
ret = false;
}
@@ -655,15 +667,18 @@ bool BaseChannel::UpdateRemoteStreams_w(
// the unsignaled stream params that are cached.
if (!old_stream.has_ssrcs() && !HasStreamWithNoSsrcs(streams)) {
ResetUnsignaledRecvStream_w();
- RTC_LOG(LS_INFO) << "Reset unsignaled remote stream.";
+ RTC_LOG(LS_INFO) << "Reset unsignaled remote stream for " << ToString()
+ << ".";
} else if (old_stream.has_ssrcs() &&
!GetStreamBySsrc(streams, old_stream.first_ssrc())) {
if (RemoveRecvStream_w(old_stream.first_ssrc())) {
- RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc();
+ RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc()
+ << " from " << ToString() << ".";
} else {
rtc::StringBuilder desc;
desc << "Failed to remove remote stream with ssrc "
- << old_stream.first_ssrc() << ".";
+ << old_stream.first_ssrc() << " from m-section with mid='"
+ << content_name() << "'.";
SafeSetError(desc.str(), error_desc);
ret = false;
}
@@ -681,13 +696,15 @@ bool BaseChannel::UpdateRemoteStreams_w(
RTC_LOG(LS_INFO) << "Add remote ssrc: "
<< (new_stream.has_ssrcs()
? std::to_string(new_stream.first_ssrc())
- : "unsignaled");
+ : "unsignaled")
+ << " to " << ToString();
} else {
rtc::StringBuilder desc;
desc << "Failed to add remote stream ssrc: "
<< (new_stream.has_ssrcs()
? std::to_string(new_stream.first_ssrc())
- : "unsignaled");
+ : "unsignaled")
+ << " to " << ToString();
SafeSetError(desc.str(), error_desc);
ret = false;
}
@@ -697,7 +714,9 @@ bool BaseChannel::UpdateRemoteStreams_w(
new_stream.ssrcs.end());
}
// Re-register the sink to update the receiving ssrcs.
- RegisterRtpDemuxerSink();
+ if (!RegisterRtpDemuxerSink()) {
+ RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString();
+ }
remote_streams_ = streams;
return ret;
}
@@ -796,10 +815,8 @@ void BaseChannel::UpdateMediaSendRecvState() {
[this] { UpdateMediaSendRecvState_w(); });
}
-void VoiceChannel::Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) {
- BaseChannel::Init_w(rtp_transport, media_transport_config);
+void VoiceChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) {
+ BaseChannel::Init_w(rtp_transport);
}
void VoiceChannel::UpdateMediaSendRecvState_w() {
@@ -813,7 +830,8 @@ void VoiceChannel::UpdateMediaSendRecvState_w() {
bool send = IsReadyToSendMedia_w();
media_channel()->SetSend(send);
- RTC_LOG(LS_INFO) << "Changing voice state, recv=" << recv << " send=" << send;
+ RTC_LOG(LS_INFO) << "Changing voice state, recv=" << recv << " send=" << send
+ << " for " << ToString();
}
bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
@@ -821,7 +839,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "VoiceChannel::SetLocalContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting local voice description";
+ RTC_LOG(LS_INFO) << "Setting local voice description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -841,8 +859,11 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
audio, rtp_header_extensions,
webrtc::RtpTransceiverDirectionHasRecv(audio->direction()), &recv_params);
if (!media_channel()->SetRecvParameters(recv_params)) {
- SafeSetError("Failed to set local audio description recv parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set local audio description recv parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -852,7 +873,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
}
// Need to re-register the sink to update the handled payload.
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to set up audio demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to set up audio demuxing for " << ToString();
return false;
}
}
@@ -864,7 +885,11 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
// description too (without a remote description, we won't be able
// to send them anyway).
if (!UpdateLocalStreams_w(audio->streams(), type, error_desc)) {
- SafeSetError("Failed to set local audio description streams.", error_desc);
+ SafeSetError(
+ "Failed to set local audio description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -878,7 +903,7 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "VoiceChannel::SetRemoteContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting remote voice description";
+ RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -899,18 +924,22 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
bool parameters_applied = media_channel()->SetSendParameters(send_params);
if (!parameters_applied) {
- SafeSetError("Failed to set remote audio description send parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set remote audio description send parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_send_params_ = send_params;
if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) {
RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - "
- "disable payload type demuxing";
+ "disable payload type demuxing for "
+ << ToString();
ClearHandledPayloadTypes();
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to update audio demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to update audio demuxing for " << ToString();
return false;
}
}
@@ -920,7 +949,11 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
// description too (without a local description, we won't be able to
// recv them anyway).
if (!UpdateRemoteStreams_w(audio->streams(), type, error_desc)) {
- SafeSetError("Failed to set remote audio description streams.", error_desc);
+ SafeSetError(
+ "Failed to set remote audio description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -958,11 +991,12 @@ void VideoChannel::UpdateMediaSendRecvState_w() {
// and we have had some form of connectivity.
bool send = IsReadyToSendMedia_w();
if (!media_channel()->SetSend(send)) {
- RTC_LOG(LS_ERROR) << "Failed to SetSend on video channel";
+ RTC_LOG(LS_ERROR) << "Failed to SetSend on video channel: " + ToString();
// TODO(gangji): Report error back to server.
}
- RTC_LOG(LS_INFO) << "Changing video state, send=" << send;
+ RTC_LOG(LS_INFO) << "Changing video state, send=" << send << " for "
+ << ToString();
}
void VideoChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) {
@@ -975,7 +1009,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "VideoChannel::SetLocalContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting local video description";
+ RTC_LOG(LS_INFO) << "Setting local video description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -1007,7 +1041,9 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
needs_send_params_update = true;
} else if (recv_codec->packetization != send_codec.packetization) {
SafeSetError(
- "Failed to set local answer due to invalid codec packetization.",
+ "Failed to set local answer due to invalid codec packetization "
+ "specified in m-section with mid='" +
+ content_name() + "'.",
error_desc);
return false;
}
@@ -1016,8 +1052,11 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
}
if (!media_channel()->SetRecvParameters(recv_params)) {
- SafeSetError("Failed to set local video description recv parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set local video description recv parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -1027,7 +1066,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
}
// Need to re-register the sink to update the handled payload.
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to set up video demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to set up video demuxing for " << ToString();
return false;
}
}
@@ -1036,7 +1075,9 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
if (needs_send_params_update) {
if (!media_channel()->SetSendParameters(send_params)) {
- SafeSetError("Failed to set send parameters.", error_desc);
+ SafeSetError("Failed to set send parameters for m-section with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_send_params_ = send_params;
@@ -1047,7 +1088,11 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
// description too (without a remote description, we won't be able
// to send them anyway).
if (!UpdateLocalStreams_w(video->streams(), type, error_desc)) {
- SafeSetError("Failed to set local video description streams.", error_desc);
+ SafeSetError(
+ "Failed to set local video description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -1061,7 +1106,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "VideoChannel::SetRemoteContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting remote video description";
+ RTC_LOG(LS_INFO) << "Setting remote video description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -1095,7 +1140,9 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
needs_recv_params_update = true;
} else if (send_codec->packetization != recv_codec.packetization) {
SafeSetError(
- "Failed to set remote answer due to invalid codec packetization.",
+ "Failed to set remote answer due to invalid codec packetization "
+ "specifid in m-section with mid='" +
+ content_name() + "'.",
error_desc);
return false;
}
@@ -1104,15 +1151,20 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
}
if (!media_channel()->SetSendParameters(send_params)) {
- SafeSetError("Failed to set remote video description send parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set remote video description send parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_send_params_ = send_params;
if (needs_recv_params_update) {
if (!media_channel()->SetRecvParameters(recv_params)) {
- SafeSetError("Failed to set recv parameters.", error_desc);
+ SafeSetError("Failed to set recv parameters for m-section with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_recv_params_ = recv_params;
@@ -1120,10 +1172,11 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) {
RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - "
- "disable payload type demuxing";
+ "disable payload type demuxing for "
+ << ToString();
ClearHandledPayloadTypes();
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to update video demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to update video demuxing for " << ToString();
return false;
}
}
@@ -1133,7 +1186,11 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
// description too (without a local description, we won't be able to
// recv them anyway).
if (!UpdateRemoteStreams_w(video->streams(), type, error_desc)) {
- SafeSetError("Failed to set remote video description streams.", error_desc);
+ SafeSetError(
+ "Failed to set remote video description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
set_remote_content_direction(content->direction());
@@ -1165,10 +1222,8 @@ RtpDataChannel::~RtpDataChannel() {
Deinit();
}
-void RtpDataChannel::Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) {
- BaseChannel::Init_w(rtp_transport, media_transport_config);
+void RtpDataChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) {
+ BaseChannel::Init_w(rtp_transport);
media_channel()->SignalDataReceived.connect(this,
&RtpDataChannel::OnDataReceived);
media_channel()->SignalReadyToSend.connect(
@@ -1203,7 +1258,7 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "RtpDataChannel::SetLocalContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting local data description";
+ RTC_LOG(LS_INFO) << "Setting local data description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -1224,8 +1279,11 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content,
data, rtp_header_extensions,
webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &recv_params);
if (!media_channel()->SetRecvParameters(recv_params)) {
- SafeSetError("Failed to set remote data description recv parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set remote data description recv parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
for (const DataCodec& codec : data->codecs()) {
@@ -1233,7 +1291,7 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content,
}
// Need to re-register the sink to update the handled payload.
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to set up data demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to set up data demuxing for " << ToString();
return false;
}
@@ -1244,7 +1302,11 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content,
// description too (without a remote description, we won't be able
// to send them anyway).
if (!UpdateLocalStreams_w(data->streams(), type, error_desc)) {
- SafeSetError("Failed to set local data description streams.", error_desc);
+ SafeSetError(
+ "Failed to set local data description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -1258,7 +1320,7 @@ bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "RtpDataChannel::SetRemoteContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting remote data description";
+ RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -1280,14 +1342,17 @@ bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content,
RtpHeaderExtensions rtp_header_extensions =
GetFilteredRtpHeaderExtensions(data->rtp_header_extensions());
- RTC_LOG(LS_INFO) << "Setting remote data description";
+ RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString();
DataSendParameters send_params = last_send_params_;
RtpSendParametersFromMediaDescription<DataCodec>(
data, rtp_header_extensions,
webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &send_params);
if (!media_channel()->SetSendParameters(send_params)) {
- SafeSetError("Failed to set remote data description send parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set remote data description send parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_send_params_ = send_params;
@@ -1297,7 +1362,11 @@ bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content,
// description too (without a local description, we won't be able to
// recv them anyway).
if (!UpdateRemoteStreams_w(data->streams(), type, error_desc)) {
- SafeSetError("Failed to set remote data description streams.", error_desc);
+ SafeSetError(
+ "Failed to set remote data description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -1311,20 +1380,21 @@ void RtpDataChannel::UpdateMediaSendRecvState_w() {
// content. We receive data on the default channel and multiplexed streams.
bool recv = IsReadyToReceiveMedia_w();
if (!media_channel()->SetReceive(recv)) {
- RTC_LOG(LS_ERROR) << "Failed to SetReceive on data channel";
+ RTC_LOG(LS_ERROR) << "Failed to SetReceive on data channel: " << ToString();
}
// Send outgoing data if we're the active call, we have the remote content,
// and we have had some form of connectivity.
bool send = IsReadyToSendMedia_w();
if (!media_channel()->SetSend(send)) {
- RTC_LOG(LS_ERROR) << "Failed to SetSend on data channel";
+ RTC_LOG(LS_ERROR) << "Failed to SetSend on data channel: " << ToString();
}
// Trigger SignalReadyToSendData asynchronously.
OnDataChannelReadyToSend(send);
- RTC_LOG(LS_INFO) << "Changing data state, recv=" << recv << " send=" << send;
+ RTC_LOG(LS_INFO) << "Changing data state, recv=" << recv << " send=" << send
+ << " for " << ToString();
}
void RtpDataChannel::OnMessage(rtc::Message* pmsg) {
diff --git a/pc/channel.h b/pc/channel.h
index 238a8e20fe..44374b176b 100644
--- a/pc/channel.h
+++ b/pc/channel.h
@@ -22,7 +22,6 @@
#include "api/function_view.h"
#include "api/jsep.h"
#include "api/rtp_receiver_interface.h"
-#include "api/transport/media/media_transport_config.h"
#include "api/video/video_sink_interface.h"
#include "api/video/video_source_interface.h"
#include "call/rtp_packet_sink_interface.h"
@@ -39,14 +38,12 @@
#include "pc/srtp_transport.h"
#include "rtc_base/async_invoker.h"
#include "rtc_base/async_udp_socket.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/network.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/unique_id_generator.h"
namespace webrtc {
class AudioSinkInterface;
-class MediaTransportInterface;
} // namespace webrtc
namespace cricket {
@@ -92,9 +89,7 @@ class BaseChannel : public ChannelInterface,
webrtc::CryptoOptions crypto_options,
rtc::UniqueRandomIdGenerator* ssrc_generator);
virtual ~BaseChannel();
- virtual void Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config);
+ virtual void Init_w(webrtc::RtpTransportInternal* rtp_transport);
// Deinit may be called multiple times and is simply ignored if it's already
// done.
@@ -275,6 +270,9 @@ class BaseChannel : public ChannelInterface,
bool RegisterRtpDemuxerSink();
+ // Return description of media channel to facilitate logging
+ std::string ToString() const;
+
bool has_received_packet_ = false;
private:
@@ -296,9 +294,6 @@ class BaseChannel : public ChannelInterface,
webrtc::RtpTransportInternal* rtp_transport_ = nullptr;
- // Optional media transport configuration (experimental).
- webrtc::MediaTransportConfig media_transport_config_;
-
std::vector<std::pair<rtc::Socket::Option, int> > socket_options_;
std::vector<std::pair<rtc::Socket::Option, int> > rtcp_socket_options_;
bool writable_ = false;
@@ -350,9 +345,7 @@ class VoiceChannel : public BaseChannel {
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_AUDIO;
}
- void Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) override;
+ void Init_w(webrtc::RtpTransportInternal* rtp_transport) override;
private:
// overrides from BaseChannel
@@ -432,9 +425,7 @@ class RtpDataChannel : public BaseChannel {
DtlsTransportInternal* rtcp_dtls_transport,
rtc::PacketTransportInternal* rtp_packet_transport,
rtc::PacketTransportInternal* rtcp_packet_transport);
- void Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) override;
+ void Init_w(webrtc::RtpTransportInternal* rtp_transport) override;
virtual bool SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
diff --git a/pc/channel_manager.cc b/pc/channel_manager.cc
index f5f3dd4a7b..84d74678b5 100644
--- a/pc/channel_manager.cc
+++ b/pc/channel_manager.cc
@@ -187,7 +187,6 @@ VoiceChannel* ChannelManager::CreateVoiceChannel(
webrtc::Call* call,
const cricket::MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
@@ -197,9 +196,8 @@ VoiceChannel* ChannelManager::CreateVoiceChannel(
if (!worker_thread_->IsCurrent()) {
return worker_thread_->Invoke<VoiceChannel*>(RTC_FROM_HERE, [&] {
return CreateVoiceChannel(call, media_config, rtp_transport,
- media_transport_config, signaling_thread,
- content_name, srtp_required, crypto_options,
- ssrc_generator, options);
+ signaling_thread, content_name, srtp_required,
+ crypto_options, ssrc_generator, options);
});
}
@@ -221,7 +219,7 @@ VoiceChannel* ChannelManager::CreateVoiceChannel(
absl::WrapUnique(media_channel), content_name, srtp_required,
crypto_options, ssrc_generator);
- voice_channel->Init_w(rtp_transport, media_transport_config);
+ voice_channel->Init_w(rtp_transport);
VoiceChannel* voice_channel_ptr = voice_channel.get();
voice_channels_.push_back(std::move(voice_channel));
@@ -257,7 +255,6 @@ VideoChannel* ChannelManager::CreateVideoChannel(
webrtc::Call* call,
const cricket::MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
@@ -267,10 +264,10 @@ VideoChannel* ChannelManager::CreateVideoChannel(
webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) {
if (!worker_thread_->IsCurrent()) {
return worker_thread_->Invoke<VideoChannel*>(RTC_FROM_HERE, [&] {
- return CreateVideoChannel(
- call, media_config, rtp_transport, media_transport_config,
- signaling_thread, content_name, srtp_required, crypto_options,
- ssrc_generator, options, video_bitrate_allocator_factory);
+ return CreateVideoChannel(call, media_config, rtp_transport,
+ signaling_thread, content_name, srtp_required,
+ crypto_options, ssrc_generator, options,
+ video_bitrate_allocator_factory);
});
}
@@ -293,7 +290,7 @@ VideoChannel* ChannelManager::CreateVideoChannel(
absl::WrapUnique(media_channel), content_name, srtp_required,
crypto_options, ssrc_generator);
- video_channel->Init_w(rtp_transport, media_transport_config);
+ video_channel->Init_w(rtp_transport);
VideoChannel* video_channel_ptr = video_channel.get();
video_channels_.push_back(std::move(video_channel));
@@ -355,7 +352,7 @@ RtpDataChannel* ChannelManager::CreateRtpDataChannel(
crypto_options, ssrc_generator);
// Media Transports are not supported with Rtp Data Channel.
- data_channel->Init_w(rtp_transport, webrtc::MediaTransportConfig());
+ data_channel->Init_w(rtp_transport);
RtpDataChannel* data_channel_ptr = data_channel.get();
data_channels_.push_back(std::move(data_channel));
diff --git a/pc/channel_manager.h b/pc/channel_manager.h
index 415e476a90..8d5fc0aa5b 100644
--- a/pc/channel_manager.h
+++ b/pc/channel_manager.h
@@ -19,7 +19,6 @@
#include "api/audio_options.h"
#include "api/crypto/crypto_options.h"
-#include "api/transport/media/media_transport_config.h"
#include "call/call.h"
#include "media/base/codec.h"
#include "media/base/media_channel.h"
@@ -101,7 +100,6 @@ class ChannelManager final {
webrtc::Call* call,
const cricket::MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
@@ -118,7 +116,6 @@ class ChannelManager final {
webrtc::Call* call,
const cricket::MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
diff --git a/pc/channel_manager_unittest.cc b/pc/channel_manager_unittest.cc
index 6f3128ebde..610d7979ab 100644
--- a/pc/channel_manager_unittest.cc
+++ b/pc/channel_manager_unittest.cc
@@ -13,7 +13,6 @@
#include <memory>
#include "api/rtc_error.h"
-#include "api/transport/media/media_transport_config.h"
#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "media/base/fake_media_engine.h"
#include "media/base/test_utils.h"
@@ -73,20 +72,17 @@ class ChannelManagerTest : public ::testing::Test {
return dtls_srtp_transport;
}
- void TestCreateDestroyChannels(
- webrtc::RtpTransportInternal* rtp_transport,
- webrtc::MediaTransportConfig media_transport_config) {
+ void TestCreateDestroyChannels(webrtc::RtpTransportInternal* rtp_transport) {
cricket::VoiceChannel* voice_channel = cm_->CreateVoiceChannel(
&fake_call_, cricket::MediaConfig(), rtp_transport,
- media_transport_config, rtc::Thread::Current(), cricket::CN_AUDIO,
- kDefaultSrtpRequired, webrtc::CryptoOptions(), &ssrc_generator_,
- AudioOptions());
+ rtc::Thread::Current(), cricket::CN_AUDIO, kDefaultSrtpRequired,
+ webrtc::CryptoOptions(), &ssrc_generator_, AudioOptions());
EXPECT_TRUE(voice_channel != nullptr);
cricket::VideoChannel* video_channel = cm_->CreateVideoChannel(
&fake_call_, cricket::MediaConfig(), rtp_transport,
- media_transport_config, rtc::Thread::Current(), cricket::CN_VIDEO,
- kDefaultSrtpRequired, webrtc::CryptoOptions(), &ssrc_generator_,
- VideoOptions(), video_bitrate_allocator_factory_.get());
+ rtc::Thread::Current(), cricket::CN_VIDEO, kDefaultSrtpRequired,
+ webrtc::CryptoOptions(), &ssrc_generator_, VideoOptions(),
+ video_bitrate_allocator_factory_.get());
EXPECT_TRUE(video_channel != nullptr);
cricket::RtpDataChannel* rtp_data_channel = cm_->CreateRtpDataChannel(
cricket::MediaConfig(), rtp_transport, rtc::Thread::Current(),
@@ -183,8 +179,7 @@ TEST_F(ChannelManagerTest, SetVideoRtxEnabled) {
TEST_F(ChannelManagerTest, CreateDestroyChannels) {
EXPECT_TRUE(cm_->Init());
auto rtp_transport = CreateDtlsSrtpTransport();
- TestCreateDestroyChannels(rtp_transport.get(),
- webrtc::MediaTransportConfig());
+ TestCreateDestroyChannels(rtp_transport.get());
}
TEST_F(ChannelManagerTest, CreateDestroyChannelsOnThread) {
@@ -194,8 +189,7 @@ TEST_F(ChannelManagerTest, CreateDestroyChannelsOnThread) {
EXPECT_TRUE(cm_->set_network_thread(network_.get()));
EXPECT_TRUE(cm_->Init());
auto rtp_transport = CreateDtlsSrtpTransport();
- TestCreateDestroyChannels(rtp_transport.get(),
- webrtc::MediaTransportConfig());
+ TestCreateDestroyChannels(rtp_transport.get());
}
} // namespace cricket
diff --git a/pc/channel_unittest.cc b/pc/channel_unittest.cc
index a3fe3f68de..479340c520 100644
--- a/pc/channel_unittest.cc
+++ b/pc/channel_unittest.cc
@@ -17,7 +17,6 @@
#include "api/array_view.h"
#include "api/audio_options.h"
#include "api/rtp_parameters.h"
-#include "api/transport/media/media_transport_config.h"
#include "media/base/codec.h"
#include "media/base/fake_media_engine.h"
#include "media/base/fake_rtp.h"
@@ -1431,7 +1430,7 @@ std::unique_ptr<cricket::VoiceChannel> ChannelTest<VoiceTraits>::CreateChannel(
worker_thread, network_thread, signaling_thread, std::move(ch),
cricket::CN_AUDIO, (flags & DTLS) != 0, webrtc::CryptoOptions(),
&ssrc_generator_);
- channel->Init_w(rtp_transport, webrtc::MediaTransportConfig());
+ channel->Init_w(rtp_transport);
return channel;
}
@@ -1514,7 +1513,7 @@ std::unique_ptr<cricket::VideoChannel> ChannelTest<VideoTraits>::CreateChannel(
worker_thread, network_thread, signaling_thread, std::move(ch),
cricket::CN_VIDEO, (flags & DTLS) != 0, webrtc::CryptoOptions(),
&ssrc_generator_);
- channel->Init_w(rtp_transport, webrtc::MediaTransportConfig());
+ channel->Init_w(rtp_transport);
return channel;
}
@@ -2301,7 +2300,7 @@ std::unique_ptr<cricket::RtpDataChannel> ChannelTest<DataTraits>::CreateChannel(
worker_thread, network_thread, signaling_thread, std::move(ch),
cricket::CN_DATA, (flags & DTLS) != 0, webrtc::CryptoOptions(),
&ssrc_generator_);
- channel->Init_w(rtp_transport, webrtc::MediaTransportConfig());
+ channel->Init_w(rtp_transport);
return channel;
}
diff --git a/pc/composite_data_channel_transport.cc b/pc/composite_data_channel_transport.cc
deleted file mode 100644
index e66febc12b..0000000000
--- a/pc/composite_data_channel_transport.cc
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright 2019 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "pc/composite_data_channel_transport.h"
-
-#include <utility>
-
-#include "absl/algorithm/container.h"
-
-namespace webrtc {
-
-CompositeDataChannelTransport::CompositeDataChannelTransport(
- std::vector<DataChannelTransportInterface*> transports)
- : transports_(std::move(transports)) {
- for (auto transport : transports_) {
- transport->SetDataSink(this);
- }
-}
-
-CompositeDataChannelTransport::~CompositeDataChannelTransport() {
- for (auto transport : transports_) {
- transport->SetDataSink(nullptr);
- }
-}
-
-void CompositeDataChannelTransport::SetSendTransport(
- DataChannelTransportInterface* send_transport) {
- if (!absl::c_linear_search(transports_, send_transport)) {
- return;
- }
- send_transport_ = send_transport;
- // NB: OnReadyToSend() checks if we're actually ready to send, and signals
- // |sink_| if appropriate. This signal is required upon setting the sink.
- OnReadyToSend();
-}
-
-void CompositeDataChannelTransport::RemoveTransport(
- DataChannelTransportInterface* transport) {
- RTC_DCHECK(transport != send_transport_) << "Cannot remove send transport";
-
- auto it = absl::c_find(transports_, transport);
- if (it == transports_.end()) {
- return;
- }
-
- transport->SetDataSink(nullptr);
- transports_.erase(it);
-}
-
-RTCError CompositeDataChannelTransport::OpenChannel(int channel_id) {
- RTCError error = RTCError::OK();
- for (auto transport : transports_) {
- RTCError e = transport->OpenChannel(channel_id);
- if (!e.ok()) {
- error = std::move(e);
- }
- }
- return error;
-}
-
-RTCError CompositeDataChannelTransport::SendData(
- int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) {
- if (send_transport_) {
- return send_transport_->SendData(channel_id, params, buffer);
- }
- return RTCError(RTCErrorType::NETWORK_ERROR, "Send transport is not ready");
-}
-
-RTCError CompositeDataChannelTransport::CloseChannel(int channel_id) {
- if (send_transport_) {
- return send_transport_->CloseChannel(channel_id);
- }
- return RTCError(RTCErrorType::NETWORK_ERROR, "Send transport is not ready");
-}
-
-void CompositeDataChannelTransport::SetDataSink(DataChannelSink* sink) {
- sink_ = sink;
- // NB: OnReadyToSend() checks if we're actually ready to send, and signals
- // |sink_| if appropriate. This signal is required upon setting the sink.
- OnReadyToSend();
-}
-
-bool CompositeDataChannelTransport::IsReadyToSend() const {
- return send_transport_ && send_transport_->IsReadyToSend();
-}
-
-void CompositeDataChannelTransport::OnDataReceived(
- int channel_id,
- DataMessageType type,
- const rtc::CopyOnWriteBuffer& buffer) {
- if (sink_) {
- sink_->OnDataReceived(channel_id, type, buffer);
- }
-}
-
-void CompositeDataChannelTransport::OnChannelClosing(int channel_id) {
- if (sink_) {
- sink_->OnChannelClosing(channel_id);
- }
-}
-
-void CompositeDataChannelTransport::OnChannelClosed(int channel_id) {
- if (sink_) {
- sink_->OnChannelClosed(channel_id);
- }
-}
-
-void CompositeDataChannelTransport::OnReadyToSend() {
- if (sink_ && send_transport_ && send_transport_->IsReadyToSend()) {
- sink_->OnReadyToSend();
- }
-}
-
-} // namespace webrtc
diff --git a/pc/composite_data_channel_transport.h b/pc/composite_data_channel_transport.h
deleted file mode 100644
index 97633cb6ed..0000000000
--- a/pc/composite_data_channel_transport.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2019 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef PC_COMPOSITE_DATA_CHANNEL_TRANSPORT_H_
-#define PC_COMPOSITE_DATA_CHANNEL_TRANSPORT_H_
-
-#include <vector>
-
-#include "api/transport/data_channel_transport_interface.h"
-#include "rtc_base/critical_section.h"
-
-namespace webrtc {
-
-// Composite implementation of DataChannelTransportInterface. Allows users to
-// receive data channel messages over multiple transports and send over one of
-// those transports.
-class CompositeDataChannelTransport : public DataChannelTransportInterface,
- public DataChannelSink {
- public:
- explicit CompositeDataChannelTransport(
- std::vector<DataChannelTransportInterface*> transports);
- ~CompositeDataChannelTransport() override;
-
- // Specifies which transport to be used for sending. Must be called before
- // sending data.
- void SetSendTransport(DataChannelTransportInterface* send_transport);
-
- // Removes a given transport from the composite, if present.
- void RemoveTransport(DataChannelTransportInterface* transport);
-
- // DataChannelTransportInterface overrides.
- RTCError OpenChannel(int channel_id) override;
- RTCError SendData(int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) override;
- RTCError CloseChannel(int channel_id) override;
- void SetDataSink(DataChannelSink* sink) override;
- bool IsReadyToSend() const override;
-
- // DataChannelSink overrides.
- void OnDataReceived(int channel_id,
- DataMessageType type,
- const rtc::CopyOnWriteBuffer& buffer) override;
- void OnChannelClosing(int channel_id) override;
- void OnChannelClosed(int channel_id) override;
- void OnReadyToSend() override;
-
- private:
- std::vector<DataChannelTransportInterface*> transports_;
- DataChannelTransportInterface* send_transport_ = nullptr;
- DataChannelSink* sink_ = nullptr;
-};
-
-} // namespace webrtc
-
-#endif // PC_COMPOSITE_DATA_CHANNEL_TRANSPORT_H_
diff --git a/pc/data_channel.h b/pc/data_channel.h
deleted file mode 100644
index c1de7c7a7a..0000000000
--- a/pc/data_channel.h
+++ /dev/null
@@ -1,340 +0,0 @@
-/*
- * Copyright 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef PC_DATA_CHANNEL_H_
-#define PC_DATA_CHANNEL_H_
-
-#include <deque>
-#include <memory>
-#include <set>
-#include <string>
-
-#include "api/data_channel_interface.h"
-#include "api/proxy.h"
-#include "api/scoped_refptr.h"
-#include "media/base/media_channel.h"
-#include "pc/channel.h"
-#include "rtc_base/async_invoker.h"
-#include "rtc_base/third_party/sigslot/sigslot.h"
-
-namespace webrtc {
-
-class DataChannel;
-
-// TODO(deadbeef): Once RTP data channels go away, get rid of this and have
-// DataChannel depend on SctpTransportInternal (pure virtual SctpTransport
-// interface) instead.
-class DataChannelProviderInterface {
- public:
- // Sends the data to the transport.
- virtual bool SendData(const cricket::SendDataParams& params,
- const rtc::CopyOnWriteBuffer& payload,
- cricket::SendDataResult* result) = 0;
- // Connects to the transport signals.
- virtual bool ConnectDataChannel(DataChannel* data_channel) = 0;
- // Disconnects from the transport signals.
- virtual void DisconnectDataChannel(DataChannel* data_channel) = 0;
- // Adds the data channel SID to the transport for SCTP.
- virtual void AddSctpDataStream(int sid) = 0;
- // Begins the closing procedure by sending an outgoing stream reset. Still
- // need to wait for callbacks to tell when this completes.
- virtual void RemoveSctpDataStream(int sid) = 0;
- // Returns true if the transport channel is ready to send data.
- virtual bool ReadyToSendData() const = 0;
-
- protected:
- virtual ~DataChannelProviderInterface() {}
-};
-
-struct InternalDataChannelInit : public DataChannelInit {
- enum OpenHandshakeRole { kOpener, kAcker, kNone };
- // The default role is kOpener because the default |negotiated| is false.
- InternalDataChannelInit() : open_handshake_role(kOpener) {}
- explicit InternalDataChannelInit(const DataChannelInit& base);
- OpenHandshakeRole open_handshake_role;
-};
-
-// Helper class to allocate unique IDs for SCTP DataChannels
-class SctpSidAllocator {
- public:
- // Gets the first unused odd/even id based on the DTLS role. If |role| is
- // SSL_CLIENT, the allocated id starts from 0 and takes even numbers;
- // otherwise, the id starts from 1 and takes odd numbers.
- // Returns false if no ID can be allocated.
- bool AllocateSid(rtc::SSLRole role, int* sid);
-
- // Attempts to reserve a specific sid. Returns false if it's unavailable.
- bool ReserveSid(int sid);
-
- // Indicates that |sid| isn't in use any more, and is thus available again.
- void ReleaseSid(int sid);
-
- private:
- // Checks if |sid| is available to be assigned to a new SCTP data channel.
- bool IsSidAvailable(int sid) const;
-
- std::set<int> used_sids_;
-};
-
-// DataChannel is a an implementation of the DataChannelInterface based on
-// libjingle's data engine. It provides an implementation of unreliable or
-// reliabledata channels. Currently this class is specifically designed to use
-// both RtpDataChannel and SctpTransport.
-
-// DataChannel states:
-// kConnecting: The channel has been created the transport might not yet be
-// ready.
-// kOpen: The channel have a local SSRC set by a call to UpdateSendSsrc
-// and a remote SSRC set by call to UpdateReceiveSsrc and the transport
-// has been writable once.
-// kClosing: DataChannelInterface::Close has been called or UpdateReceiveSsrc
-// has been called with SSRC==0
-// kClosed: Both UpdateReceiveSsrc and UpdateSendSsrc has been called with
-// SSRC==0.
-//
-// How the closing procedure works for SCTP:
-// 1. Alice calls Close(), state changes to kClosing.
-// 2. Alice finishes sending any queued data.
-// 3. Alice calls RemoveSctpDataStream, sends outgoing stream reset.
-// 4. Bob receives incoming stream reset; OnClosingProcedureStartedRemotely
-// called.
-// 5. Bob sends outgoing stream reset. 6. Alice receives incoming reset,
-// Bob receives acknowledgement. Both receive OnClosingProcedureComplete
-// callback and transition to kClosed.
-class DataChannel : public DataChannelInterface, public sigslot::has_slots<> {
- public:
- static rtc::scoped_refptr<DataChannel> Create(
- DataChannelProviderInterface* provider,
- cricket::DataChannelType dct,
- const std::string& label,
- const InternalDataChannelInit& config);
-
- static bool IsSctpLike(cricket::DataChannelType type);
-
- virtual void RegisterObserver(DataChannelObserver* observer);
- virtual void UnregisterObserver();
-
- virtual std::string label() const { return label_; }
- virtual bool reliable() const;
- virtual bool ordered() const { return config_.ordered; }
- // Backwards compatible accessors
- virtual uint16_t maxRetransmitTime() const {
- return config_.maxRetransmitTime ? *config_.maxRetransmitTime
- : static_cast<uint16_t>(-1);
- }
- virtual uint16_t maxRetransmits() const {
- return config_.maxRetransmits ? *config_.maxRetransmits
- : static_cast<uint16_t>(-1);
- }
- virtual absl::optional<int> maxPacketLifeTime() const {
- return config_.maxRetransmitTime;
- }
- virtual absl::optional<int> maxRetransmitsOpt() const {
- return config_.maxRetransmits;
- }
- virtual std::string protocol() const { return config_.protocol; }
- virtual bool negotiated() const { return config_.negotiated; }
- virtual int id() const { return config_.id; }
- virtual int internal_id() const { return internal_id_; }
- virtual uint64_t buffered_amount() const;
- virtual void Close();
- virtual DataState state() const { return state_; }
- virtual RTCError error() const;
- virtual uint32_t messages_sent() const { return messages_sent_; }
- virtual uint64_t bytes_sent() const { return bytes_sent_; }
- virtual uint32_t messages_received() const { return messages_received_; }
- virtual uint64_t bytes_received() const { return bytes_received_; }
- virtual bool Send(const DataBuffer& buffer);
-
- // Close immediately, ignoring any queued data or closing procedure.
- // This is called for RTP data channels when SDP indicates a channel should
- // be removed, or SCTP data channels when the underlying SctpTransport is
- // being destroyed.
- // It is also called by the PeerConnection if SCTP ID assignment fails.
- void CloseAbruptlyWithError(RTCError error);
- // Specializations of CloseAbruptlyWithError
- void CloseAbruptlyWithDataChannelFailure(const std::string& message);
- void CloseAbruptlyWithSctpCauseCode(const std::string& message,
- uint16_t cause_code);
-
- // Called when the channel's ready to use. That can happen when the
- // underlying DataMediaChannel becomes ready, or when this channel is a new
- // stream on an existing DataMediaChannel, and we've finished negotiation.
- void OnChannelReady(bool writable);
-
- // Slots for provider to connect signals to.
- void OnDataReceived(const cricket::ReceiveDataParams& params,
- const rtc::CopyOnWriteBuffer& payload);
-
- /********************************************
- * The following methods are for SCTP only. *
- ********************************************/
-
- // Sets the SCTP sid and adds to transport layer if not set yet. Should only
- // be called once.
- void SetSctpSid(int sid);
- // The remote side started the closing procedure by resetting its outgoing
- // stream (our incoming stream). Sets state to kClosing.
- void OnClosingProcedureStartedRemotely(int sid);
- // The closing procedure is complete; both incoming and outgoing stream
- // resets are done and the channel can transition to kClosed. Called
- // asynchronously after RemoveSctpDataStream.
- void OnClosingProcedureComplete(int sid);
- // Called when the transport channel is created.
- // Only needs to be called for SCTP data channels.
- void OnTransportChannelCreated();
- // Called when the transport channel is unusable.
- // This method makes sure the DataChannel is disconnected and changes state
- // to kClosed.
- void OnTransportChannelClosed();
-
- /*******************************************
- * The following methods are for RTP only. *
- *******************************************/
-
- // The remote peer requested that this channel should be closed.
- void RemotePeerRequestClose();
- // Set the SSRC this channel should use to send data on the
- // underlying data engine. |send_ssrc| == 0 means that the channel is no
- // longer part of the session negotiation.
- void SetSendSsrc(uint32_t send_ssrc);
- // Set the SSRC this channel should use to receive data from the
- // underlying data engine.
- void SetReceiveSsrc(uint32_t receive_ssrc);
-
- cricket::DataChannelType data_channel_type() const {
- return data_channel_type_;
- }
-
- // Emitted when state transitions to kOpen.
- sigslot::signal1<DataChannel*> SignalOpened;
- // Emitted when state transitions to kClosed.
- // In the case of SCTP channels, this signal can be used to tell when the
- // channel's sid is free.
- sigslot::signal1<DataChannel*> SignalClosed;
-
- // Reset the allocator for internal ID values for testing, so that
- // the internal IDs generated are predictable. Test only.
- static void ResetInternalIdAllocatorForTesting(int new_value);
-
- protected:
- DataChannel(DataChannelProviderInterface* client,
- cricket::DataChannelType dct,
- const std::string& label);
- virtual ~DataChannel();
-
- private:
- // A packet queue which tracks the total queued bytes. Queued packets are
- // owned by this class.
- class PacketQueue final {
- public:
- size_t byte_count() const { return byte_count_; }
-
- bool Empty() const;
-
- std::unique_ptr<DataBuffer> PopFront();
-
- void PushFront(std::unique_ptr<DataBuffer> packet);
- void PushBack(std::unique_ptr<DataBuffer> packet);
-
- void Clear();
-
- void Swap(PacketQueue* other);
-
- private:
- std::deque<std::unique_ptr<DataBuffer>> packets_;
- size_t byte_count_ = 0;
- };
-
- // The OPEN(_ACK) signaling state.
- enum HandshakeState {
- kHandshakeInit,
- kHandshakeShouldSendOpen,
- kHandshakeShouldSendAck,
- kHandshakeWaitingForAck,
- kHandshakeReady
- };
-
- bool Init(const InternalDataChannelInit& config);
- void UpdateState();
- void SetState(DataState state);
- void DisconnectFromProvider();
-
- void DeliverQueuedReceivedData();
-
- void SendQueuedDataMessages();
- bool SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked);
- bool QueueSendDataMessage(const DataBuffer& buffer);
-
- void SendQueuedControlMessages();
- void QueueControlMessage(const rtc::CopyOnWriteBuffer& buffer);
- bool SendControlMessage(const rtc::CopyOnWriteBuffer& buffer);
-
- const int internal_id_;
- std::string label_;
- InternalDataChannelInit config_;
- DataChannelObserver* observer_;
- DataState state_;
- RTCError error_;
- uint32_t messages_sent_;
- uint64_t bytes_sent_;
- uint32_t messages_received_;
- uint64_t bytes_received_;
- // Number of bytes of data that have been queued using Send(). Increased
- // before each transport send and decreased after each successful send.
- uint64_t buffered_amount_;
- cricket::DataChannelType data_channel_type_;
- DataChannelProviderInterface* provider_;
- HandshakeState handshake_state_;
- bool connected_to_provider_;
- bool send_ssrc_set_;
- bool receive_ssrc_set_;
- bool writable_;
- // Did we already start the graceful SCTP closing procedure?
- bool started_closing_procedure_ = false;
- uint32_t send_ssrc_;
- uint32_t receive_ssrc_;
- // Control messages that always have to get sent out before any queued
- // data.
- PacketQueue queued_control_data_;
- PacketQueue queued_received_data_;
- PacketQueue queued_send_data_;
- rtc::AsyncInvoker invoker_;
-};
-
-// Define proxy for DataChannelInterface.
-BEGIN_SIGNALING_PROXY_MAP(DataChannel)
-PROXY_SIGNALING_THREAD_DESTRUCTOR()
-PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
-PROXY_METHOD0(void, UnregisterObserver)
-PROXY_CONSTMETHOD0(std::string, label)
-PROXY_CONSTMETHOD0(bool, reliable)
-PROXY_CONSTMETHOD0(bool, ordered)
-PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
-PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
-PROXY_CONSTMETHOD0(absl::optional<int>, maxRetransmitsOpt)
-PROXY_CONSTMETHOD0(absl::optional<int>, maxPacketLifeTime)
-PROXY_CONSTMETHOD0(std::string, protocol)
-PROXY_CONSTMETHOD0(bool, negotiated)
-PROXY_CONSTMETHOD0(int, id)
-PROXY_CONSTMETHOD0(DataState, state)
-PROXY_CONSTMETHOD0(RTCError, error)
-PROXY_CONSTMETHOD0(uint32_t, messages_sent)
-PROXY_CONSTMETHOD0(uint64_t, bytes_sent)
-PROXY_CONSTMETHOD0(uint32_t, messages_received)
-PROXY_CONSTMETHOD0(uint64_t, bytes_received)
-PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
-PROXY_METHOD0(void, Close)
-PROXY_METHOD1(bool, Send, const DataBuffer&)
-END_PROXY_MAP()
-
-} // namespace webrtc
-
-#endif // PC_DATA_CHANNEL_H_
diff --git a/pc/data_channel_controller.cc b/pc/data_channel_controller.cc
index e9ea742c44..04a4bb6245 100644
--- a/pc/data_channel_controller.cc
+++ b/pc/data_channel_controller.cc
@@ -25,88 +25,74 @@ bool DataChannelController::HasDataChannels() const {
bool DataChannelController::SendData(const cricket::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) {
- // RTC_DCHECK_RUN_ON(signaling_thread());
- if (data_channel_transport()) {
- SendDataParams send_params;
- send_params.type = ToWebrtcDataMessageType(params.type);
- send_params.ordered = params.ordered;
- if (params.max_rtx_count >= 0) {
- send_params.max_rtx_count = params.max_rtx_count;
- } else if (params.max_rtx_ms >= 0) {
- send_params.max_rtx_ms = params.max_rtx_ms;
- }
-
- RTCError error = network_thread()->Invoke<RTCError>(
- RTC_FROM_HERE, [this, params, send_params, payload] {
- return data_channel_transport()->SendData(params.sid, send_params,
- payload);
- });
-
- if (error.ok()) {
- *result = cricket::SendDataResult::SDR_SUCCESS;
- return true;
- } else if (error.type() == RTCErrorType::RESOURCE_EXHAUSTED) {
- // SCTP transport uses RESOURCE_EXHAUSTED when it's blocked.
- // TODO(mellem): Stop using RTCError here and get rid of the mapping.
- *result = cricket::SendDataResult::SDR_BLOCK;
- return false;
- }
- *result = cricket::SendDataResult::SDR_ERROR;
- return false;
- } else if (rtp_data_channel()) {
+ if (data_channel_transport())
+ return DataChannelSendData(params, payload, result);
+ if (rtp_data_channel())
return rtp_data_channel()->SendData(params, payload, result);
- }
RTC_LOG(LS_ERROR) << "SendData called before transport is ready";
return false;
}
bool DataChannelController::ConnectDataChannel(
- DataChannel* webrtc_data_channel) {
+ RtpDataChannel* webrtc_data_channel) {
RTC_DCHECK_RUN_ON(signaling_thread());
- if (!rtp_data_channel() && !data_channel_transport()) {
+ if (!rtp_data_channel()) {
// Don't log an error here, because DataChannels are expected to call
// ConnectDataChannel in this state. It's the only way to initially tell
// whether or not the underlying transport is ready.
return false;
}
- if (data_channel_transport()) {
- SignalDataChannelTransportWritable_s.connect(webrtc_data_channel,
- &DataChannel::OnChannelReady);
- SignalDataChannelTransportReceivedData_s.connect(
- webrtc_data_channel, &DataChannel::OnDataReceived);
- SignalDataChannelTransportChannelClosing_s.connect(
- webrtc_data_channel, &DataChannel::OnClosingProcedureStartedRemotely);
- SignalDataChannelTransportChannelClosed_s.connect(
- webrtc_data_channel, &DataChannel::OnClosingProcedureComplete);
- }
- if (rtp_data_channel()) {
- rtp_data_channel()->SignalReadyToSendData.connect(
- webrtc_data_channel, &DataChannel::OnChannelReady);
- rtp_data_channel()->SignalDataReceived.connect(
- webrtc_data_channel, &DataChannel::OnDataReceived);
- }
+ rtp_data_channel()->SignalReadyToSendData.connect(
+ webrtc_data_channel, &RtpDataChannel::OnChannelReady);
+ rtp_data_channel()->SignalDataReceived.connect(
+ webrtc_data_channel, &RtpDataChannel::OnDataReceived);
return true;
}
void DataChannelController::DisconnectDataChannel(
- DataChannel* webrtc_data_channel) {
+ RtpDataChannel* webrtc_data_channel) {
RTC_DCHECK_RUN_ON(signaling_thread());
- if (!rtp_data_channel() && !data_channel_transport()) {
+ if (!rtp_data_channel()) {
RTC_LOG(LS_ERROR)
- << "DisconnectDataChannel called when rtp_data_channel_ and "
- "sctp_transport_ are NULL.";
+ << "DisconnectDataChannel called when rtp_data_channel_ is NULL.";
return;
}
- if (data_channel_transport()) {
- SignalDataChannelTransportWritable_s.disconnect(webrtc_data_channel);
- SignalDataChannelTransportReceivedData_s.disconnect(webrtc_data_channel);
- SignalDataChannelTransportChannelClosing_s.disconnect(webrtc_data_channel);
- SignalDataChannelTransportChannelClosed_s.disconnect(webrtc_data_channel);
+ rtp_data_channel()->SignalReadyToSendData.disconnect(webrtc_data_channel);
+ rtp_data_channel()->SignalDataReceived.disconnect(webrtc_data_channel);
+}
+
+bool DataChannelController::ConnectDataChannel(
+ SctpDataChannel* webrtc_data_channel) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!data_channel_transport()) {
+ // Don't log an error here, because DataChannels are expected to call
+ // ConnectDataChannel in this state. It's the only way to initially tell
+ // whether or not the underlying transport is ready.
+ return false;
}
- if (rtp_data_channel()) {
- rtp_data_channel()->SignalReadyToSendData.disconnect(webrtc_data_channel);
- rtp_data_channel()->SignalDataReceived.disconnect(webrtc_data_channel);
+ SignalDataChannelTransportWritable_s.connect(
+ webrtc_data_channel, &SctpDataChannel::OnTransportReady);
+ SignalDataChannelTransportReceivedData_s.connect(
+ webrtc_data_channel, &SctpDataChannel::OnDataReceived);
+ SignalDataChannelTransportChannelClosing_s.connect(
+ webrtc_data_channel, &SctpDataChannel::OnClosingProcedureStartedRemotely);
+ SignalDataChannelTransportChannelClosed_s.connect(
+ webrtc_data_channel, &SctpDataChannel::OnClosingProcedureComplete);
+ return true;
+}
+
+void DataChannelController::DisconnectDataChannel(
+ SctpDataChannel* webrtc_data_channel) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!data_channel_transport()) {
+ RTC_LOG(LS_ERROR)
+ << "DisconnectDataChannel called when sctp_transport_ is NULL.";
+ return;
}
+ SignalDataChannelTransportWritable_s.disconnect(webrtc_data_channel);
+ SignalDataChannelTransportReceivedData_s.disconnect(webrtc_data_channel);
+ SignalDataChannelTransportChannelClosing_s.disconnect(webrtc_data_channel);
+ SignalDataChannelTransportChannelClosed_s.disconnect(webrtc_data_channel);
}
void DataChannelController::AddSctpDataStream(int sid) {
@@ -146,6 +132,14 @@ void DataChannelController::OnDataReceived(
data_channel_transport_invoker_->AsyncInvoke<void>(
RTC_FROM_HERE, signaling_thread(), [this, params, buffer] {
RTC_DCHECK_RUN_ON(signaling_thread());
+ // TODO(bugs.webrtc.org/11547): The data being received should be
+ // delivered on the network thread. The way HandleOpenMessage_s works
+ // right now is that it's called for all types of buffers and operates
+ // as a selector function. Change this so that it's only called for
+ // buffers that it should be able to handle. Once we do that, we can
+ // deliver all other buffers on the network thread (change
+ // SignalDataChannelTransportReceivedData_s to
+ // SignalDataChannelTransportReceivedData_n).
if (!HandleOpenMessage_s(params, buffer)) {
SignalDataChannelTransportReceivedData_s(params, buffer);
}
@@ -193,6 +187,11 @@ void DataChannelController::OnTransportClosed() {
void DataChannelController::SetupDataChannelTransport_n() {
RTC_DCHECK_RUN_ON(network_thread());
data_channel_transport_invoker_ = std::make_unique<rtc::AsyncInvoker>();
+
+ // There's a new data channel transport. This needs to be signaled to the
+ // |sctp_data_channels_| so that they can reopen and reconnect. This is
+ // necessary when bundling is applied.
+ NotifyDataChannelsOfTransportCreated();
}
void DataChannelController::TeardownDataChannelTransport_n() {
@@ -219,17 +218,21 @@ void DataChannelController::OnTransportChanged(
// There's a new data channel transport. This needs to be signaled to the
// |sctp_data_channels_| so that they can reopen and reconnect. This is
// necessary when bundling is applied.
- data_channel_transport_invoker_->AsyncInvoke<void>(
- RTC_FROM_HERE, signaling_thread(), [this] {
- RTC_DCHECK_RUN_ON(signaling_thread());
- for (const auto& channel : sctp_data_channels_) {
- channel->OnTransportChannelCreated();
- }
- });
+ NotifyDataChannelsOfTransportCreated();
}
}
}
+std::vector<DataChannelStats> DataChannelController::GetDataChannelStats()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<DataChannelStats> stats;
+ stats.reserve(sctp_data_channels_.size());
+ for (const auto& channel : sctp_data_channels_)
+ stats.push_back(channel->GetStats());
+ return stats;
+}
+
bool DataChannelController::HandleOpenMessage_s(
const cricket::ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
@@ -254,21 +257,19 @@ bool DataChannelController::HandleOpenMessage_s(
void DataChannelController::OnDataChannelOpenMessage(
const std::string& label,
const InternalDataChannelInit& config) {
- rtc::scoped_refptr<DataChannel> channel(
- InternalCreateDataChannel(label, &config));
+ rtc::scoped_refptr<DataChannelInterface> channel(
+ InternalCreateDataChannelWithProxy(label, &config));
if (!channel.get()) {
RTC_LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message.";
return;
}
- rtc::scoped_refptr<DataChannelInterface> proxy_channel =
- DataChannelProxy::Create(signaling_thread(), channel);
- pc_->Observer()->OnDataChannel(std::move(proxy_channel));
+ pc_->Observer()->OnDataChannel(std::move(channel));
pc_->NoteDataAddedEvent();
}
-rtc::scoped_refptr<DataChannel>
-DataChannelController::InternalCreateDataChannel(
+rtc::scoped_refptr<DataChannelInterface>
+DataChannelController::InternalCreateDataChannelWithProxy(
const std::string& label,
const InternalDataChannelInit* config) {
RTC_DCHECK_RUN_ON(signaling_thread());
@@ -280,51 +281,78 @@ DataChannelController::InternalCreateDataChannel(
<< "InternalCreateDataChannel: Data is not supported in this call.";
return nullptr;
}
- InternalDataChannelInit new_config =
- config ? (*config) : InternalDataChannelInit();
- if (DataChannel::IsSctpLike(data_channel_type_)) {
- if (new_config.id < 0) {
- rtc::SSLRole role;
- if ((pc_->GetSctpSslRole(&role)) &&
- !sid_allocator_.AllocateSid(role, &new_config.id)) {
- RTC_LOG(LS_ERROR)
- << "No id can be allocated for the SCTP data channel.";
- return nullptr;
- }
- } else if (!sid_allocator_.ReserveSid(new_config.id)) {
- RTC_LOG(LS_ERROR) << "Failed to create a SCTP data channel "
- "because the id is already in use or out of range.";
- return nullptr;
+ if (IsSctpLike(data_channel_type())) {
+ rtc::scoped_refptr<SctpDataChannel> channel =
+ InternalCreateSctpDataChannel(label, config);
+ if (channel) {
+ return SctpDataChannel::CreateProxy(channel);
+ }
+ } else if (data_channel_type() == cricket::DCT_RTP) {
+ rtc::scoped_refptr<RtpDataChannel> channel =
+ InternalCreateRtpDataChannel(label, config);
+ if (channel) {
+ return RtpDataChannel::CreateProxy(channel);
}
}
- rtc::scoped_refptr<DataChannel> channel(
- DataChannel::Create(this, data_channel_type(), label, new_config));
+ return nullptr;
+}
+
+rtc::scoped_refptr<RtpDataChannel>
+DataChannelController::InternalCreateRtpDataChannel(
+ const std::string& label,
+ const DataChannelInit* config) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ DataChannelInit new_config = config ? (*config) : DataChannelInit();
+ rtc::scoped_refptr<RtpDataChannel> channel(
+ RtpDataChannel::Create(this, label, new_config, signaling_thread()));
if (!channel) {
- sid_allocator_.ReleaseSid(new_config.id);
return nullptr;
}
+ if (rtp_data_channels_.find(channel->label()) != rtp_data_channels_.end()) {
+ RTC_LOG(LS_ERROR) << "DataChannel with label " << channel->label()
+ << " already exists.";
+ return nullptr;
+ }
+ rtp_data_channels_[channel->label()] = channel;
+ SignalRtpDataChannelCreated_(channel.get());
+ return channel;
+}
- if (channel->data_channel_type() == cricket::DCT_RTP) {
- if (rtp_data_channels_.find(channel->label()) != rtp_data_channels_.end()) {
- RTC_LOG(LS_ERROR) << "DataChannel with label " << channel->label()
- << " already exists.";
+rtc::scoped_refptr<SctpDataChannel>
+DataChannelController::InternalCreateSctpDataChannel(
+ const std::string& label,
+ const InternalDataChannelInit* config) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ InternalDataChannelInit new_config =
+ config ? (*config) : InternalDataChannelInit();
+ if (new_config.id < 0) {
+ rtc::SSLRole role;
+ if ((pc_->GetSctpSslRole(&role)) &&
+ !sid_allocator_.AllocateSid(role, &new_config.id)) {
+ RTC_LOG(LS_ERROR) << "No id can be allocated for the SCTP data channel.";
return nullptr;
}
- rtp_data_channels_[channel->label()] = channel;
- } else {
- RTC_DCHECK(DataChannel::IsSctpLike(data_channel_type_));
- sctp_data_channels_.push_back(channel);
- channel->SignalClosed.connect(pc_,
- &PeerConnection::OnSctpDataChannelClosed);
+ } else if (!sid_allocator_.ReserveSid(new_config.id)) {
+ RTC_LOG(LS_ERROR) << "Failed to create a SCTP data channel "
+ "because the id is already in use or out of range.";
+ return nullptr;
}
- SignalDataChannelCreated_(channel.get());
+ rtc::scoped_refptr<SctpDataChannel> channel(SctpDataChannel::Create(
+ this, label, new_config, signaling_thread(), network_thread()));
+ if (!channel) {
+ sid_allocator_.ReleaseSid(new_config.id);
+ return nullptr;
+ }
+ sctp_data_channels_.push_back(channel);
+ channel->SignalClosed.connect(pc_, &PeerConnection::OnSctpDataChannelClosed);
+ SignalSctpDataChannelCreated_(channel.get());
return channel;
}
void DataChannelController::AllocateSctpSids(rtc::SSLRole role) {
RTC_DCHECK_RUN_ON(signaling_thread());
- std::vector<rtc::scoped_refptr<DataChannel>> channels_to_close;
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> channels_to_close;
for (const auto& channel : sctp_data_channels_) {
if (channel->id() < 0) {
int sid;
@@ -343,7 +371,7 @@ void DataChannelController::AllocateSctpSids(rtc::SSLRole role) {
}
}
-void DataChannelController::OnSctpDataChannelClosed(DataChannel* channel) {
+void DataChannelController::OnSctpDataChannelClosed(SctpDataChannel* channel) {
RTC_DCHECK_RUN_ON(signaling_thread());
for (auto it = sctp_data_channels_.begin(); it != sctp_data_channels_.end();
++it) {
@@ -373,20 +401,20 @@ void DataChannelController::OnTransportChannelClosed() {
RTC_DCHECK_RUN_ON(signaling_thread());
// Use a temporary copy of the RTP/SCTP DataChannel list because the
// DataChannel may callback to us and try to modify the list.
- std::map<std::string, rtc::scoped_refptr<DataChannel>> temp_rtp_dcs;
+ std::map<std::string, rtc::scoped_refptr<RtpDataChannel>> temp_rtp_dcs;
temp_rtp_dcs.swap(rtp_data_channels_);
for (const auto& kv : temp_rtp_dcs) {
kv.second->OnTransportChannelClosed();
}
- std::vector<rtc::scoped_refptr<DataChannel>> temp_sctp_dcs;
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> temp_sctp_dcs;
temp_sctp_dcs.swap(sctp_data_channels_);
for (const auto& channel : temp_sctp_dcs) {
channel->OnTransportChannelClosed();
}
}
-DataChannel* DataChannelController::FindDataChannelBySid(int sid) const {
+SctpDataChannel* DataChannelController::FindDataChannelBySid(int sid) const {
RTC_DCHECK_RUN_ON(signaling_thread());
for (const auto& channel : sctp_data_channels_) {
if (channel->id() == sid) {
@@ -424,9 +452,10 @@ void DataChannelController::UpdateLocalRtpDataChannels(
void DataChannelController::UpdateRemoteRtpDataChannels(
const cricket::StreamParamsVec& streams) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
std::vector<std::string> existing_channels;
- RTC_DCHECK_RUN_ON(signaling_thread());
// Find new and active data channels.
for (const cricket::StreamParams& params : streams) {
// The data channel label is either the mslabel or the SSRC if the mslabel
@@ -447,12 +476,44 @@ void DataChannelController::UpdateRemoteRtpDataChannels(
UpdateClosingRtpDataChannels(existing_channels, false);
}
+cricket::DataChannelType DataChannelController::data_channel_type() const {
+ // TODO(bugs.webrtc.org/9987): Should be restricted to the signaling thread.
+ // RTC_DCHECK_RUN_ON(signaling_thread());
+ return data_channel_type_;
+}
+
+void DataChannelController::set_data_channel_type(
+ cricket::DataChannelType type) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ data_channel_type_ = type;
+}
+
+DataChannelTransportInterface* DataChannelController::data_channel_transport()
+ const {
+ // TODO(bugs.webrtc.org/11547): Only allow this accessor to be called on the
+ // network thread.
+ // RTC_DCHECK_RUN_ON(network_thread());
+ return data_channel_transport_;
+}
+
+void DataChannelController::set_data_channel_transport(
+ DataChannelTransportInterface* transport) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ data_channel_transport_ = transport;
+}
+
+const std::map<std::string, rtc::scoped_refptr<RtpDataChannel>>*
+DataChannelController::rtp_data_channels() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return &rtp_data_channels_;
+}
+
void DataChannelController::UpdateClosingRtpDataChannels(
const std::vector<std::string>& active_channels,
bool is_local_update) {
auto it = rtp_data_channels_.begin();
while (it != rtp_data_channels_.end()) {
- DataChannel* data_channel = it->second;
+ RtpDataChannel* data_channel = it->second;
if (absl::c_linear_search(active_channels, data_channel->label())) {
++it;
continue;
@@ -464,7 +525,7 @@ void DataChannelController::UpdateClosingRtpDataChannels(
data_channel->RemotePeerRequestClose();
}
- if (data_channel->state() == DataChannel::kClosed) {
+ if (data_channel->state() == RtpDataChannel::kClosed) {
rtp_data_channels_.erase(it);
it = rtp_data_channels_.begin();
} else {
@@ -475,8 +536,11 @@ void DataChannelController::UpdateClosingRtpDataChannels(
void DataChannelController::CreateRemoteRtpDataChannel(const std::string& label,
uint32_t remote_ssrc) {
- rtc::scoped_refptr<DataChannel> channel(
- InternalCreateDataChannel(label, nullptr));
+ if (data_channel_type() != cricket::DCT_RTP) {
+ return;
+ }
+ rtc::scoped_refptr<RtpDataChannel> channel(
+ InternalCreateRtpDataChannel(label, nullptr));
if (!channel.get()) {
RTC_LOG(LS_WARNING) << "Remote peer requested a DataChannel but"
"CreateDataChannel failed.";
@@ -484,10 +548,59 @@ void DataChannelController::CreateRemoteRtpDataChannel(const std::string& label,
}
channel->SetReceiveSsrc(remote_ssrc);
rtc::scoped_refptr<DataChannelInterface> proxy_channel =
- DataChannelProxy::Create(signaling_thread(), channel);
+ RtpDataChannel::CreateProxy(std::move(channel));
pc_->Observer()->OnDataChannel(std::move(proxy_channel));
}
+bool DataChannelController::DataChannelSendData(
+ const cricket::SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result) {
+ // TODO(bugs.webrtc.org/11547): Expect method to be called on the network
+ // thread instead. Remove the Invoke() below and move assocated state to
+ // the network thread.
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(data_channel_transport());
+
+ SendDataParams send_params;
+ send_params.type = ToWebrtcDataMessageType(params.type);
+ send_params.ordered = params.ordered;
+ if (params.max_rtx_count >= 0) {
+ send_params.max_rtx_count = params.max_rtx_count;
+ } else if (params.max_rtx_ms >= 0) {
+ send_params.max_rtx_ms = params.max_rtx_ms;
+ }
+
+ RTCError error = network_thread()->Invoke<RTCError>(
+ RTC_FROM_HERE, [this, params, send_params, payload] {
+ return data_channel_transport()->SendData(params.sid, send_params,
+ payload);
+ });
+
+ if (error.ok()) {
+ *result = cricket::SendDataResult::SDR_SUCCESS;
+ return true;
+ } else if (error.type() == RTCErrorType::RESOURCE_EXHAUSTED) {
+ // SCTP transport uses RESOURCE_EXHAUSTED when it's blocked.
+ // TODO(mellem): Stop using RTCError here and get rid of the mapping.
+ *result = cricket::SendDataResult::SDR_BLOCK;
+ return false;
+ }
+ *result = cricket::SendDataResult::SDR_ERROR;
+ return false;
+}
+
+void DataChannelController::NotifyDataChannelsOfTransportCreated() {
+ RTC_DCHECK_RUN_ON(network_thread());
+ data_channel_transport_invoker_->AsyncInvoke<void>(
+ RTC_FROM_HERE, signaling_thread(), [this] {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ for (const auto& channel : sctp_data_channels_) {
+ channel->OnTransportChannelCreated();
+ }
+ });
+}
+
rtc::Thread* DataChannelController::network_thread() const {
return pc_->network_thread();
}
diff --git a/pc/data_channel_controller.h b/pc/data_channel_controller.h
index 60bcbb32a8..3daee11381 100644
--- a/pc/data_channel_controller.h
+++ b/pc/data_channel_controller.h
@@ -17,14 +17,16 @@
#include <vector>
#include "pc/channel.h"
-#include "pc/data_channel.h"
+#include "pc/rtp_data_channel.h"
+#include "pc/sctp_data_channel.h"
#include "rtc_base/weak_ptr.h"
namespace webrtc {
class PeerConnection;
-class DataChannelController : public DataChannelProviderInterface,
+class DataChannelController : public RtpDataChannelProviderInterface,
+ public SctpDataChannelProviderInterface,
public DataChannelSink {
public:
explicit DataChannelController(PeerConnection* pc) : pc_(pc) {}
@@ -35,12 +37,15 @@ class DataChannelController : public DataChannelProviderInterface,
DataChannelController(DataChannelController&&) = delete;
DataChannelController& operator=(DataChannelController&& other) = delete;
- // Implements DataChannelProviderInterface.
+ // Implements RtpDataChannelProviderInterface/
+ // SctpDataChannelProviderInterface.
bool SendData(const cricket::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) override;
- bool ConnectDataChannel(DataChannel* webrtc_data_channel) override;
- void DisconnectDataChannel(DataChannel* webrtc_data_channel) override;
+ bool ConnectDataChannel(RtpDataChannel* webrtc_data_channel) override;
+ void DisconnectDataChannel(RtpDataChannel* webrtc_data_channel) override;
+ bool ConnectDataChannel(SctpDataChannel* webrtc_data_channel) override;
+ void DisconnectDataChannel(SctpDataChannel* webrtc_data_channel) override;
void AddSctpDataStream(int sid) override;
void RemoveSctpDataStream(int sid) override;
bool ReadyToSendData() const override;
@@ -64,15 +69,18 @@ class DataChannelController : public DataChannelProviderInterface,
void OnTransportChanged(
DataChannelTransportInterface* data_channel_transport);
+ // Called from PeerConnection::GetDataChannelStats on the signaling thread.
+ std::vector<DataChannelStats> GetDataChannelStats() const;
+
// Creates channel and adds it to the collection of DataChannels that will
- // be offered in a SessionDescription.
- rtc::scoped_refptr<DataChannel> InternalCreateDataChannel(
+ // be offered in a SessionDescription, and wraps it in a proxy object.
+ rtc::scoped_refptr<DataChannelInterface> InternalCreateDataChannelWithProxy(
const std::string& label,
const InternalDataChannelInit*
config) /* RTC_RUN_ON(signaling_thread()) */;
void AllocateSctpSids(rtc::SSLRole role);
- DataChannel* FindDataChannelBySid(int sid) const;
+ SctpDataChannel* FindDataChannelBySid(int sid) const;
// Checks if any data channel has been added.
bool HasDataChannels() const;
@@ -89,45 +97,42 @@ class DataChannelController : public DataChannelProviderInterface,
void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams);
// Accessors
- cricket::DataChannelType data_channel_type() const {
- return data_channel_type_;
- }
- void set_data_channel_type(cricket::DataChannelType type) {
- data_channel_type_ = type;
- }
+ cricket::DataChannelType data_channel_type() const;
+ void set_data_channel_type(cricket::DataChannelType type);
cricket::RtpDataChannel* rtp_data_channel() const {
return rtp_data_channel_;
}
void set_rtp_data_channel(cricket::RtpDataChannel* channel) {
rtp_data_channel_ = channel;
}
- DataChannelTransportInterface* data_channel_transport() const {
- return data_channel_transport_;
- }
- void set_data_channel_transport(DataChannelTransportInterface* transport) {
- data_channel_transport_ = transport;
- }
- const std::map<std::string, rtc::scoped_refptr<DataChannel>>*
- rtp_data_channels() const {
+ DataChannelTransportInterface* data_channel_transport() const;
+ void set_data_channel_transport(DataChannelTransportInterface* transport);
+ const std::map<std::string, rtc::scoped_refptr<RtpDataChannel>>*
+ rtp_data_channels() const;
+
+ sigslot::signal1<RtpDataChannel*>& SignalRtpDataChannelCreated() {
RTC_DCHECK_RUN_ON(signaling_thread());
- return &rtp_data_channels_;
+ return SignalRtpDataChannelCreated_;
}
- const std::vector<rtc::scoped_refptr<DataChannel>>* sctp_data_channels()
- const {
+ sigslot::signal1<SctpDataChannel*>& SignalSctpDataChannelCreated() {
RTC_DCHECK_RUN_ON(signaling_thread());
- return &sctp_data_channels_;
- }
-
- sigslot::signal1<DataChannel*>& SignalDataChannelCreated() {
- RTC_DCHECK_RUN_ON(signaling_thread());
- return SignalDataChannelCreated_;
+ return SignalSctpDataChannelCreated_;
}
// Called when the transport for the data channels is closed or destroyed.
void OnTransportChannelClosed();
- void OnSctpDataChannelClosed(DataChannel* channel);
+ void OnSctpDataChannelClosed(SctpDataChannel* channel);
private:
+ rtc::scoped_refptr<RtpDataChannel> InternalCreateRtpDataChannel(
+ const std::string& label,
+ const DataChannelInit* config) /* RTC_RUN_ON(signaling_thread()) */;
+
+ rtc::scoped_refptr<SctpDataChannel> InternalCreateSctpDataChannel(
+ const std::string& label,
+ const InternalDataChannelInit*
+ config) /* RTC_RUN_ON(signaling_thread()) */;
+
// Parses and handles open messages. Returns true if the message is an open
// message, false otherwise.
bool HandleOpenMessage_s(const cricket::ReceiveDataParams& params,
@@ -146,6 +151,15 @@ class DataChannelController : public DataChannelProviderInterface,
const std::vector<std::string>& active_channels,
bool is_local_update) RTC_RUN_ON(signaling_thread());
+ // Called from SendData when data_channel_transport() is true.
+ bool DataChannelSendData(const cricket::SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result);
+
+ // Called when all data channels need to be notified of a transport channel
+ // (calls OnTransportChannelCreated on the signaling thread).
+ void NotifyDataChannelsOfTransportCreated();
+
rtc::Thread* network_thread() const;
rtc::Thread* signaling_thread() const;
@@ -178,17 +192,19 @@ class DataChannelController : public DataChannelProviderInterface,
// signaling and some other thread.
SctpSidAllocator sid_allocator_ /* RTC_GUARDED_BY(signaling_thread()) */;
- std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> sctp_data_channels_
RTC_GUARDED_BY(signaling_thread());
- std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_to_free_
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> sctp_data_channels_to_free_
RTC_GUARDED_BY(signaling_thread());
// Map of label -> DataChannel
- std::map<std::string, rtc::scoped_refptr<DataChannel>> rtp_data_channels_
+ std::map<std::string, rtc::scoped_refptr<RtpDataChannel>> rtp_data_channels_
RTC_GUARDED_BY(signaling_thread());
// Signals from |data_channel_transport_|. These are invoked on the
// signaling thread.
+ // TODO(bugs.webrtc.org/11547): These '_s' signals likely all belong on the
+ // network thread.
sigslot::signal1<bool> SignalDataChannelTransportWritable_s
RTC_GUARDED_BY(signaling_thread());
sigslot::signal2<const cricket::ReceiveDataParams&,
@@ -200,7 +216,9 @@ class DataChannelController : public DataChannelProviderInterface,
sigslot::signal1<int> SignalDataChannelTransportChannelClosed_s
RTC_GUARDED_BY(signaling_thread());
- sigslot::signal1<DataChannel*> SignalDataChannelCreated_
+ sigslot::signal1<RtpDataChannel*> SignalRtpDataChannelCreated_
+ RTC_GUARDED_BY(signaling_thread());
+ sigslot::signal1<SctpDataChannel*> SignalSctpDataChannelCreated_
RTC_GUARDED_BY(signaling_thread());
// Used to invoke data channel transport signals on the signaling thread.
diff --git a/pc/data_channel_unittest.cc b/pc/data_channel_unittest.cc
index 6bb8f7e5c7..7048dc82b7 100644
--- a/pc/data_channel_unittest.cc
+++ b/pc/data_channel_unittest.cc
@@ -8,20 +8,20 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "pc/data_channel.h"
-
#include <string.h>
#include <memory>
#include <vector>
+#include "pc/sctp_data_channel.h"
#include "pc/sctp_utils.h"
#include "pc/test/fake_data_channel_provider.h"
#include "rtc_base/gunit.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "test/gtest.h"
-using webrtc::DataChannel;
+using webrtc::DataChannelInterface;
+using webrtc::SctpDataChannel;
using webrtc::SctpSidAllocator;
static constexpr int kDefaultTimeout = 10000;
@@ -64,14 +64,16 @@ class FakeDataChannelObserver : public webrtc::DataChannelObserver {
// TODO(deadbeef): The fact that these tests use a fake provider makes them not
// too valuable. Should rewrite using the
// peerconnection_datachannel_unittest.cc infrastructure.
+// TODO(bugs.webrtc.org/11547): Incorporate a dedicated network thread.
class SctpDataChannelTest : public ::testing::Test {
protected:
SctpDataChannelTest()
: provider_(new FakeDataChannelProvider()),
- webrtc_data_channel_(DataChannel::Create(provider_.get(),
- cricket::DCT_SCTP,
- "test",
- init_)) {}
+ webrtc_data_channel_(SctpDataChannel::Create(provider_.get(),
+ "test",
+ init_,
+ rtc::Thread::Current(),
+ rtc::Thread::Current())) {}
void SetChannelReady() {
provider_->set_transport_available(true);
@@ -90,7 +92,7 @@ class SctpDataChannelTest : public ::testing::Test {
webrtc::InternalDataChannelInit init_;
std::unique_ptr<FakeDataChannelProvider> provider_;
std::unique_ptr<FakeDataChannelObserver> observer_;
- rtc::scoped_refptr<DataChannel> webrtc_data_channel_;
+ rtc::scoped_refptr<SctpDataChannel> webrtc_data_channel_;
};
class StateSignalsListener : public sigslot::has_slots<> {
@@ -98,9 +100,9 @@ class StateSignalsListener : public sigslot::has_slots<> {
int opened_count() const { return opened_count_; }
int closed_count() const { return closed_count_; }
- void OnSignalOpened(DataChannel* data_channel) { ++opened_count_; }
+ void OnSignalOpened(DataChannelInterface* data_channel) { ++opened_count_; }
- void OnSignalClosed(DataChannel* data_channel) { ++closed_count_; }
+ void OnSignalClosed(DataChannelInterface* data_channel) { ++closed_count_; }
private:
int opened_count_ = 0;
@@ -110,8 +112,9 @@ class StateSignalsListener : public sigslot::has_slots<> {
// Verifies that the data channel is connected to the transport after creation.
TEST_F(SctpDataChannelTest, ConnectedToTransportOnCreated) {
provider_->set_transport_available(true);
- rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init_);
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ SctpDataChannel::Create(provider_.get(), "test1", init_,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_TRUE(provider_->IsConnected(dc.get()));
// The sid is not set yet, so it should not have added the streams.
@@ -304,8 +307,9 @@ TEST_F(SctpDataChannelTest, LateCreatedChannelTransitionToOpen) {
SetChannelReady();
webrtc::InternalDataChannelInit init;
init.id = 1;
- rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init);
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ SctpDataChannel::Create(provider_.get(), "test1", init,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, dc->state());
EXPECT_TRUE_WAIT(webrtc::DataChannelInterface::kOpen == dc->state(), 1000);
}
@@ -317,8 +321,9 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) {
webrtc::InternalDataChannelInit init;
init.id = 1;
init.ordered = false;
- rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init);
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ SctpDataChannel::Create(provider_.get(), "test1", init,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
@@ -347,8 +352,9 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) {
webrtc::InternalDataChannelInit init;
init.id = 1;
init.ordered = false;
- rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init);
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ SctpDataChannel::Create(provider_.get(), "test1", init,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
@@ -448,8 +454,9 @@ TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) {
config.open_handshake_role = webrtc::InternalDataChannelInit::kNone;
SetChannelReady();
- rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", config);
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ SctpDataChannel::Create(provider_.get(), "test1", config,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
EXPECT_EQ(0U, provider_->last_send_data_params().ssrc);
@@ -511,8 +518,9 @@ TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) {
config.open_handshake_role = webrtc::InternalDataChannelInit::kAcker;
SetChannelReady();
- rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", config);
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ SctpDataChannel::Create(provider_.get(), "test1", config,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
@@ -630,9 +638,9 @@ TEST_F(SctpDataChannelTest, TransportDestroyedWhileDataBuffered) {
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed,
webrtc_data_channel_->state(), kDefaultTimeout);
EXPECT_FALSE(webrtc_data_channel_->error().ok());
- EXPECT_EQ(webrtc::RTCErrorType::NETWORK_ERROR,
+ EXPECT_EQ(webrtc::RTCErrorType::OPERATION_ERROR_WITH_DATA,
webrtc_data_channel_->error().type());
- EXPECT_EQ(webrtc::RTCErrorDetailType::NONE,
+ EXPECT_EQ(webrtc::RTCErrorDetailType::SCTP_FAILURE,
webrtc_data_channel_->error().error_detail());
}
diff --git a/pc/data_channel_utils.cc b/pc/data_channel_utils.cc
new file mode 100644
index 0000000000..51d6af941f
--- /dev/null
+++ b/pc/data_channel_utils.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/data_channel_utils.h"
+
+namespace webrtc {
+
+bool PacketQueue::Empty() const {
+ return packets_.empty();
+}
+
+std::unique_ptr<DataBuffer> PacketQueue::PopFront() {
+ RTC_DCHECK(!packets_.empty());
+ byte_count_ -= packets_.front()->size();
+ std::unique_ptr<DataBuffer> packet = std::move(packets_.front());
+ packets_.pop_front();
+ return packet;
+}
+
+void PacketQueue::PushFront(std::unique_ptr<DataBuffer> packet) {
+ byte_count_ += packet->size();
+ packets_.push_front(std::move(packet));
+}
+
+void PacketQueue::PushBack(std::unique_ptr<DataBuffer> packet) {
+ byte_count_ += packet->size();
+ packets_.push_back(std::move(packet));
+}
+
+void PacketQueue::Clear() {
+ packets_.clear();
+ byte_count_ = 0;
+}
+
+void PacketQueue::Swap(PacketQueue* other) {
+ size_t other_byte_count = other->byte_count_;
+ other->byte_count_ = byte_count_;
+ byte_count_ = other_byte_count;
+
+ other->packets_.swap(packets_);
+}
+
+bool IsSctpLike(cricket::DataChannelType type) {
+ return type == cricket::DCT_SCTP;
+}
+
+} // namespace webrtc
diff --git a/pc/data_channel_utils.h b/pc/data_channel_utils.h
new file mode 100644
index 0000000000..13c6620cd8
--- /dev/null
+++ b/pc/data_channel_utils.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_DATA_CHANNEL_UTILS_H_
+#define PC_DATA_CHANNEL_UTILS_H_
+
+#include <deque>
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "api/data_channel_interface.h"
+#include "media/base/media_engine.h"
+
+namespace webrtc {
+
+// A packet queue which tracks the total queued bytes. Queued packets are
+// owned by this class.
+class PacketQueue final {
+ public:
+ size_t byte_count() const { return byte_count_; }
+
+ bool Empty() const;
+
+ std::unique_ptr<DataBuffer> PopFront();
+
+ void PushFront(std::unique_ptr<DataBuffer> packet);
+ void PushBack(std::unique_ptr<DataBuffer> packet);
+
+ void Clear();
+
+ void Swap(PacketQueue* other);
+
+ private:
+ std::deque<std::unique_ptr<DataBuffer>> packets_;
+ size_t byte_count_ = 0;
+};
+
+struct DataChannelStats {
+ int internal_id;
+ int id;
+ std::string label;
+ std::string protocol;
+ DataChannelInterface::DataState state;
+ uint32_t messages_sent;
+ uint32_t messages_received;
+ uint64_t bytes_sent;
+ uint64_t bytes_received;
+};
+
+bool IsSctpLike(cricket::DataChannelType type);
+
+} // namespace webrtc
+
+#endif // PC_DATA_CHANNEL_UTILS_H_
diff --git a/pc/datagram_rtp_transport.cc b/pc/datagram_rtp_transport.cc
deleted file mode 100644
index ad1e6dc995..0000000000
--- a/pc/datagram_rtp_transport.cc
+++ /dev/null
@@ -1,380 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "pc/datagram_rtp_transport.h"
-
-#include <algorithm>
-#include <memory>
-#include <utility>
-
-#include "absl/memory/memory.h"
-#include "absl/strings/string_view.h"
-#include "absl/types/optional.h"
-#include "api/array_view.h"
-#include "api/rtc_error.h"
-#include "media/base/rtp_utils.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
-#include "modules/rtp_rtcp/source/rtp_packet.h"
-#include "modules/rtp_rtcp/source/rtp_packet_received.h"
-#include "p2p/base/dtls_transport_internal.h"
-#include "p2p/base/packet_transport_internal.h"
-#include "rtc_base/buffer.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/dscp.h"
-#include "rtc_base/logging.h"
-#include "rtc_base/rtc_certificate.h"
-#include "rtc_base/ssl_stream_adapter.h"
-#include "rtc_base/stream.h"
-#include "rtc_base/thread.h"
-#include "system_wrappers/include/field_trial.h"
-
-namespace webrtc {
-
-namespace {
-
-// Field trials.
-// Disable datagram to RTCP feedback translation and enable RTCP feedback loop
-// on top of datagram feedback loop. Note that two
-// feedback loops add unneccesary overhead, so it's preferable to use feedback
-// loop provided by datagram transport and convert datagram ACKs to RTCP ACKs,
-// but enabling RTCP feedback loop may be useful in tests and experiments.
-const char kDisableDatagramToRtcpFeebackTranslationFieldTrial[] =
- "WebRTC-kDisableDatagramToRtcpFeebackTranslation";
-
-} // namespace
-
-// Maximum packet size of RTCP feedback packet for allocation. We re-create RTCP
-// feedback packets when we get ACK notifications from datagram transport. Our
-// rtcp feedback packets contain only 1 ACK, so they are much smaller than 1250.
-constexpr size_t kMaxRtcpFeedbackPacketSize = 1250;
-
-DatagramRtpTransport::DatagramRtpTransport(
- const std::vector<RtpExtension>& rtp_header_extensions,
- cricket::IceTransportInternal* ice_transport,
- DatagramTransportInterface* datagram_transport)
- : ice_transport_(ice_transport),
- datagram_transport_(datagram_transport),
- disable_datagram_to_rtcp_feeback_translation_(field_trial::IsEnabled(
- kDisableDatagramToRtcpFeebackTranslationFieldTrial)) {
- // Save extension map for parsing RTP packets (we only need transport
- // sequence numbers).
- const RtpExtension* transport_sequence_number_extension =
- RtpExtension::FindHeaderExtensionByUri(rtp_header_extensions,
- TransportSequenceNumber::kUri);
-
- if (transport_sequence_number_extension != nullptr) {
- rtp_header_extension_map_.Register<TransportSequenceNumber>(
- transport_sequence_number_extension->id);
- } else {
- RTC_LOG(LS_ERROR) << "Transport sequence numbers are not supported in "
- "datagram transport connection";
- }
-
- RTC_DCHECK(ice_transport_);
- RTC_DCHECK(datagram_transport_);
-
- ice_transport_->SignalNetworkRouteChanged.connect(
- this, &DatagramRtpTransport::OnNetworkRouteChanged);
- // Subscribe to DatagramTransport to read incoming packets.
- datagram_transport_->SetDatagramSink(this);
- datagram_transport_->SetTransportStateCallback(this);
-}
-
-DatagramRtpTransport::~DatagramRtpTransport() {
- // Unsubscribe from DatagramTransport sinks.
- datagram_transport_->SetDatagramSink(nullptr);
- datagram_transport_->SetTransportStateCallback(nullptr);
-}
-
-bool DatagramRtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options,
- int flags) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- // Assign and increment datagram_id.
- const DatagramId datagram_id = current_datagram_id_++;
-
- // Send as is (without extracting transport sequence number) for
- // RTP packets if we are not doing datagram => RTCP feedback translation.
- if (disable_datagram_to_rtcp_feeback_translation_) {
- // Even if we are not extracting transport sequence number we need to
- // propagate "Sent" notification for both RTP and RTCP packets. For this
- // reason we need save options.packet_id in packet map.
- sent_rtp_packet_map_[datagram_id] = SentPacketInfo(options.packet_id);
-
- return SendDatagram(*packet, datagram_id);
- }
-
- // Parse RTP packet.
- RtpPacket rtp_packet(&rtp_header_extension_map_);
- // TODO(mellem): Verify that this doesn't mangle something (it shouldn't).
- if (!rtp_packet.Parse(*packet)) {
- RTC_NOTREACHED() << "Failed to parse outgoing RtpPacket, len="
- << packet->size()
- << ", options.packet_id=" << options.packet_id;
- return -1;
- }
-
- // Try to get transport sequence number.
- uint16_t transport_senquence_number;
- if (!rtp_packet.GetExtension<TransportSequenceNumber>(
- &transport_senquence_number)) {
- // Save packet info without transport sequence number.
- sent_rtp_packet_map_[datagram_id] = SentPacketInfo(options.packet_id);
-
- RTC_LOG(LS_VERBOSE)
- << "Sending rtp packet without transport sequence number, packet="
- << rtp_packet.ToString();
-
- return SendDatagram(*packet, datagram_id);
- }
-
- // Save packet info with sequence number and ssrc so we could reconstruct
- // RTCP feedback packet when we receive datagram ACK.
- sent_rtp_packet_map_[datagram_id] = SentPacketInfo(
- options.packet_id, rtp_packet.Ssrc(), transport_senquence_number);
-
- // Since datagram transport provides feedback and timestamps, we do not need
- // to send transport sequence number, so we remove it from RTP packet. Later
- // when we get Ack for sent datagram, we will re-create RTCP feedback packet.
- if (!rtp_packet.RemoveExtension(TransportSequenceNumber::kId)) {
- RTC_NOTREACHED() << "Failed to remove transport sequence number, packet="
- << rtp_packet.ToString();
- return -1;
- }
-
- RTC_LOG(LS_VERBOSE) << "Removed transport_senquence_number="
- << transport_senquence_number
- << " from packet=" << rtp_packet.ToString()
- << ", saved bytes=" << packet->size() - rtp_packet.size();
-
- return SendDatagram(
- rtc::ArrayView<const uint8_t>(rtp_packet.data(), rtp_packet.size()),
- datagram_id);
-}
-
-bool DatagramRtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options,
- int flags) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- // Assign and increment datagram_id.
- const DatagramId datagram_id = current_datagram_id_++;
-
- // Even if we are not extracting transport sequence number we need to
- // propagate "Sent" notification for both RTP and RTCP packets. For this
- // reason we need save options.packet_id in packet map.
- sent_rtp_packet_map_[datagram_id] = SentPacketInfo(options.packet_id);
- return SendDatagram(*packet, datagram_id);
-}
-
-bool DatagramRtpTransport::SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) {
- return datagram_transport_->SendDatagram(data, datagram_id).ok();
-}
-
-void DatagramRtpTransport::OnDatagramReceived(
- rtc::ArrayView<const uint8_t> data) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- rtc::ArrayView<const char> cdata(reinterpret_cast<const char*>(data.data()),
- data.size());
- if (cricket::InferRtpPacketType(cdata) == cricket::RtpPacketType::kRtcp) {
- rtc::CopyOnWriteBuffer buffer(data.data(), data.size());
- SignalRtcpPacketReceived(&buffer, /*packet_time_us=*/-1);
- return;
- }
-
- // TODO(sukhanov): I am not filling out time, but on my video quality
- // test in WebRTC the time was not set either and higher layers of the stack
- // overwrite -1 with current current rtc time. Leaveing comment for now to
- // make sure it works as expected.
- RtpPacketReceived parsed_packet(&rtp_header_extension_map_);
- if (!parsed_packet.Parse(data)) {
- RTC_LOG(LS_ERROR) << "Failed to parse incoming RTP packet";
- return;
- }
- if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) {
- RTC_LOG(LS_WARNING) << "Failed to demux RTP packet: "
- << RtpDemuxer::DescribePacket(parsed_packet);
- }
-}
-
-void DatagramRtpTransport::OnDatagramSent(DatagramId datagram_id) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- // Find packet_id and propagate OnPacketSent notification.
- const auto& it = sent_rtp_packet_map_.find(datagram_id);
- if (it == sent_rtp_packet_map_.end()) {
- RTC_NOTREACHED() << "Did not find sent packet info for sent datagram_id="
- << datagram_id;
- return;
- }
-
- // Also see how DatagramRtpTransport::OnSentPacket handles OnSentPacket
- // notification from ICE in bypass mode.
- rtc::SentPacket sent_packet(/*packet_id=*/it->second.packet_id,
- rtc::TimeMillis());
-
- SignalSentPacket(sent_packet);
-}
-
-bool DatagramRtpTransport::GetAndRemoveSentPacketInfo(
- DatagramId datagram_id,
- SentPacketInfo* sent_packet_info) {
- RTC_CHECK(sent_packet_info != nullptr);
-
- const auto& it = sent_rtp_packet_map_.find(datagram_id);
- if (it == sent_rtp_packet_map_.end()) {
- return false;
- }
-
- *sent_packet_info = it->second;
- sent_rtp_packet_map_.erase(it);
- return true;
-}
-
-void DatagramRtpTransport::OnDatagramAcked(const DatagramAck& ack) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- SentPacketInfo sent_packet_info;
- if (!GetAndRemoveSentPacketInfo(ack.datagram_id, &sent_packet_info)) {
- // TODO(sukhanov): If OnDatagramAck() can come after OnDatagramLost(),
- // datagram_id is already deleted and we may need to relax the CHECK below.
- // It's probably OK to ignore such datagrams, because it's been a few RTTs
- // anyway since they were sent.
- RTC_NOTREACHED() << "Did not find sent packet info for datagram_id="
- << ack.datagram_id;
- return;
- }
-
- RTC_LOG(LS_VERBOSE) << "Datagram acked, ack.datagram_id=" << ack.datagram_id
- << ", sent_packet_info.packet_id="
- << sent_packet_info.packet_id
- << ", sent_packet_info.transport_sequence_number="
- << sent_packet_info.transport_sequence_number.value_or(-1)
- << ", sent_packet_info.ssrc="
- << sent_packet_info.ssrc.value_or(-1)
- << ", receive_timestamp_ms="
- << ack.receive_timestamp.ms();
-
- // If transport sequence number was not present in RTP packet, we do not need
- // to propagate RTCP feedback.
- if (!sent_packet_info.transport_sequence_number) {
- return;
- }
-
- // TODO(sukhanov): We noticed that datagram transport implementations can
- // return zero timestamps in the middle of the call. This is workaround to
- // avoid propagating zero timestamps, but we need to understand why we have
- // them in the first place.
- int64_t receive_timestamp_us = ack.receive_timestamp.us();
-
- if (receive_timestamp_us == 0) {
- receive_timestamp_us = previous_nonzero_timestamp_us_;
- } else {
- previous_nonzero_timestamp_us_ = receive_timestamp_us;
- }
-
- // Ssrc must be provided in packet info if transport sequence number is set,
- // which is guaranteed by SentPacketInfo constructor.
- RTC_CHECK(sent_packet_info.ssrc);
-
- // Recreate RTCP feedback packet.
- rtcp::TransportFeedback feedback_packet;
- feedback_packet.SetMediaSsrc(*sent_packet_info.ssrc);
-
- const uint16_t transport_sequence_number =
- sent_packet_info.transport_sequence_number.value();
-
- feedback_packet.SetBase(transport_sequence_number, receive_timestamp_us);
- feedback_packet.AddReceivedPacket(transport_sequence_number,
- receive_timestamp_us);
-
- rtc::CopyOnWriteBuffer buffer(kMaxRtcpFeedbackPacketSize);
- size_t index = 0;
- if (!feedback_packet.Create(buffer.data(), &index, buffer.capacity(),
- nullptr)) {
- RTC_NOTREACHED() << "Failed to create RTCP feedback packet";
- return;
- }
-
- RTC_CHECK_GT(index, 0);
- RTC_CHECK_LE(index, kMaxRtcpFeedbackPacketSize);
-
- // Propagage created RTCP packet as normal incoming packet.
- buffer.SetSize(index);
- SignalRtcpPacketReceived(&buffer, /*packet_time_us=*/-1);
-}
-
-void DatagramRtpTransport::OnDatagramLost(DatagramId datagram_id) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- RTC_LOG(LS_INFO) << "Datagram lost, datagram_id=" << datagram_id;
-
- SentPacketInfo sent_packet_info;
- if (!GetAndRemoveSentPacketInfo(datagram_id, &sent_packet_info)) {
- RTC_NOTREACHED() << "Did not find sent packet info for lost datagram_id="
- << datagram_id;
- }
-}
-
-void DatagramRtpTransport::OnStateChanged(MediaTransportState state) {
- state_ = state;
- SignalWritableState(state_ == MediaTransportState::kWritable);
- if (state_ == MediaTransportState::kWritable) {
- SignalReadyToSend(true);
- }
-}
-
-const std::string& DatagramRtpTransport::transport_name() const {
- return ice_transport_->transport_name();
-}
-
-int DatagramRtpTransport::SetRtpOption(rtc::Socket::Option opt, int value) {
- return ice_transport_->SetOption(opt, value);
-}
-
-int DatagramRtpTransport::SetRtcpOption(rtc::Socket::Option opt, int value) {
- return -1;
-}
-
-bool DatagramRtpTransport::IsReadyToSend() const {
- return state_ == MediaTransportState::kWritable;
-}
-
-bool DatagramRtpTransport::IsWritable(bool /*rtcp*/) const {
- return state_ == MediaTransportState::kWritable;
-}
-
-void DatagramRtpTransport::UpdateRtpHeaderExtensionMap(
- const cricket::RtpHeaderExtensions& header_extensions) {
- rtp_header_extension_map_ = RtpHeaderExtensionMap(header_extensions);
-}
-
-bool DatagramRtpTransport::RegisterRtpDemuxerSink(
- const RtpDemuxerCriteria& criteria,
- RtpPacketSinkInterface* sink) {
- rtp_demuxer_.RemoveSink(sink);
- return rtp_demuxer_.AddSink(criteria, sink);
-}
-
-bool DatagramRtpTransport::UnregisterRtpDemuxerSink(
- RtpPacketSinkInterface* sink) {
- return rtp_demuxer_.RemoveSink(sink);
-}
-
-void DatagramRtpTransport::OnNetworkRouteChanged(
- absl::optional<rtc::NetworkRoute> network_route) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
- SignalNetworkRouteChanged(network_route);
-}
-
-} // namespace webrtc
diff --git a/pc/datagram_rtp_transport.h b/pc/datagram_rtp_transport.h
deleted file mode 100644
index f9684c69c0..0000000000
--- a/pc/datagram_rtp_transport.h
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef PC_DATAGRAM_RTP_TRANSPORT_H_
-#define PC_DATAGRAM_RTP_TRANSPORT_H_
-
-#include <map>
-#include <memory>
-#include <string>
-#include <vector>
-
-#include "api/crypto/crypto_options.h"
-#include "api/transport/datagram_transport_interface.h"
-#include "api/transport/media/media_transport_interface.h"
-#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
-#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
-#include "p2p/base/ice_transport_internal.h"
-#include "p2p/base/packet_transport_internal.h"
-#include "pc/rtp_transport_internal.h"
-#include "rtc_base/buffer.h"
-#include "rtc_base/buffer_queue.h"
-#include "rtc_base/constructor_magic.h"
-#include "rtc_base/ssl_stream_adapter.h"
-#include "rtc_base/stream.h"
-#include "rtc_base/strings/string_builder.h"
-#include "rtc_base/thread_checker.h"
-
-namespace webrtc {
-
-constexpr int kDatagramDtlsAdaptorComponent = -1;
-
-// RTP transport which uses the DatagramTransportInterface to send and receive
-// packets.
-class DatagramRtpTransport : public RtpTransportInternal,
- public webrtc::DatagramSinkInterface,
- public webrtc::MediaTransportStateCallback {
- public:
- DatagramRtpTransport(
- const std::vector<webrtc::RtpExtension>& rtp_header_extensions,
- cricket::IceTransportInternal* ice_transport,
- DatagramTransportInterface* datagram_transport);
-
- ~DatagramRtpTransport() override;
-
- // =====================================================
- // Overrides for webrtc::DatagramTransportSinkInterface
- // and MediaTransportStateCallback
- // =====================================================
- void OnDatagramReceived(rtc::ArrayView<const uint8_t> data) override;
-
- void OnDatagramSent(webrtc::DatagramId datagram_id) override;
-
- void OnDatagramAcked(const webrtc::DatagramAck& ack) override;
-
- void OnDatagramLost(webrtc::DatagramId datagram_id) override;
-
- void OnStateChanged(webrtc::MediaTransportState state) override;
-
- // =====================================================
- // RtpTransportInternal overrides
- // =====================================================
- bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options,
- int flags) override;
-
- bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options,
- int flags) override;
-
- const std::string& transport_name() const override;
-
- // Datagram transport always muxes RTCP.
- bool rtcp_mux_enabled() const override { return true; }
- void SetRtcpMuxEnabled(bool enable) override {}
-
- int SetRtpOption(rtc::Socket::Option opt, int value) override;
- int SetRtcpOption(rtc::Socket::Option opt, int value) override;
-
- bool IsReadyToSend() const override;
-
- bool IsWritable(bool rtcp) const override;
-
- bool IsSrtpActive() const override { return false; }
-
- void UpdateRtpHeaderExtensionMap(
- const cricket::RtpHeaderExtensions& header_extensions) override;
-
- bool RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria,
- RtpPacketSinkInterface* sink) override;
-
- bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override;
-
- private:
- // RTP/RTCP packet info stored for each sent packet.
- struct SentPacketInfo {
- // RTP packet info with ssrc and transport sequence number.
- SentPacketInfo(int64_t packet_id,
- uint32_t ssrc,
- uint16_t transport_sequence_number)
- : ssrc(ssrc),
- transport_sequence_number(transport_sequence_number),
- packet_id(packet_id) {}
-
- // Packet info without SSRC and transport sequence number used for RTCP
- // packets, RTP packets when transport sequence number is not provided or
- // when feedback translation is disabled.
- explicit SentPacketInfo(int64_t packet_id) : packet_id(packet_id) {}
-
- SentPacketInfo() = default;
-
- absl::optional<uint32_t> ssrc;
-
- // Transport sequence number (if it was provided in outgoing RTP packet).
- // It is used to re-create RTCP feedback packets from datagram ACKs.
- absl::optional<uint16_t> transport_sequence_number;
-
- // Packet id from rtc::PacketOptions. It is required to propagage sent
- // notification up the stack (SignalSentPacket).
- int64_t packet_id = 0;
- };
-
- // Finds SentPacketInfo for given |datagram_id| and removes map entry.
- // Returns false if entry was not found.
- bool GetAndRemoveSentPacketInfo(webrtc::DatagramId datagram_id,
- SentPacketInfo* sent_packet_info);
-
- // Sends datagram to datagram_transport.
- bool SendDatagram(rtc::ArrayView<const uint8_t> data,
- webrtc::DatagramId datagram_id);
-
- // Propagates network route changes from ICE.
- void OnNetworkRouteChanged(absl::optional<rtc::NetworkRoute> network_route);
-
- rtc::ThreadChecker thread_checker_;
- cricket::IceTransportInternal* ice_transport_;
- webrtc::DatagramTransportInterface* datagram_transport_;
-
- RtpDemuxer rtp_demuxer_;
-
- MediaTransportState state_ = MediaTransportState::kPending;
-
- // Extension map for parsing transport sequence numbers.
- webrtc::RtpHeaderExtensionMap rtp_header_extension_map_;
-
- // Keeps information about sent RTP packet until they are Acked or Lost.
- std::map<webrtc::DatagramId, SentPacketInfo> sent_rtp_packet_map_;
-
- // Current datagram_id, incremented after each sent RTP packets.
- // Datagram id is passed to datagram transport when we send datagram and we
- // get it back in notifications about Sent, Acked and Lost datagrams.
- int64_t current_datagram_id_ = 0;
-
- // TODO(sukhanov): Previous nonzero timestamp is required for workaround for
- // zero timestamps received, which sometimes are received from datagram
- // transport. Investigate if we can eliminate zero timestamps.
- int64_t previous_nonzero_timestamp_us_ = 0;
-
- // Disable datagram to RTCP feedback translation and enable RTCP feedback
- // loop (note that having both RTCP and datagram feedback loops is
- // inefficient, but can be useful in tests and experiments).
- const bool disable_datagram_to_rtcp_feeback_translation_;
-};
-
-} // namespace webrtc
-
-#endif // PC_DATAGRAM_RTP_TRANSPORT_H_
diff --git a/pc/dtls_transport.cc b/pc/dtls_transport.cc
index 1362f94ac1..f0882de3be 100644
--- a/pc/dtls_transport.cc
+++ b/pc/dtls_transport.cc
@@ -56,7 +56,7 @@ DtlsTransport::~DtlsTransport() {
}
DtlsTransportInformation DtlsTransport::Information() {
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
return info_;
}
@@ -85,7 +85,7 @@ void DtlsTransport::Clear() {
// into DtlsTransport, so we can't hold the lock while releasing.
std::unique_ptr<cricket::DtlsTransportInternal> transport_to_release;
{
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
transport_to_release = std::move(internal_dtls_transport_);
ice_transport_->Clear();
}
@@ -109,7 +109,7 @@ void DtlsTransport::OnInternalDtlsState(
void DtlsTransport::UpdateInformation() {
RTC_DCHECK_RUN_ON(owner_thread_);
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
if (internal_dtls_transport_) {
if (internal_dtls_transport_->dtls_state() ==
cricket::DTLS_TRANSPORT_CONNECTED) {
diff --git a/pc/dtls_transport.h b/pc/dtls_transport.h
index b5caae5212..ff8108ca90 100644
--- a/pc/dtls_transport.h
+++ b/pc/dtls_transport.h
@@ -17,6 +17,7 @@
#include "api/ice_transport_interface.h"
#include "api/scoped_refptr.h"
#include "p2p/base/dtls_transport.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -42,12 +43,12 @@ class DtlsTransport : public DtlsTransportInterface,
void Clear();
cricket::DtlsTransportInternal* internal() {
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
return internal_dtls_transport_.get();
}
const cricket::DtlsTransportInternal* internal() const {
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
return internal_dtls_transport_.get();
}
@@ -61,7 +62,7 @@ class DtlsTransport : public DtlsTransportInterface,
DtlsTransportObserverInterface* observer_ = nullptr;
rtc::Thread* owner_thread_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
DtlsTransportInformation info_ RTC_GUARDED_BY(lock_);
std::unique_ptr<cricket::DtlsTransportInternal> internal_dtls_transport_
RTC_GUARDED_BY(lock_);
diff --git a/pc/jsep_transport.cc b/pc/jsep_transport.cc
index 5bf74f1e87..2f7615ab3b 100644
--- a/pc/jsep_transport.cc
+++ b/pc/jsep_transport.cc
@@ -38,16 +38,12 @@ JsepTransportDescription::JsepTransportDescription(
const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_header_extension_ids,
int rtp_abs_sendtime_extn_id,
- const TransportDescription& transport_desc,
- absl::optional<std::string> media_alt_protocol,
- absl::optional<std::string> data_alt_protocol)
+ const TransportDescription& transport_desc)
: rtcp_mux_enabled(rtcp_mux_enabled),
cryptos(cryptos),
encrypted_header_extension_ids(encrypted_header_extension_ids),
rtp_abs_sendtime_extn_id(rtp_abs_sendtime_extn_id),
- transport_desc(transport_desc),
- media_alt_protocol(media_alt_protocol),
- data_alt_protocol(data_alt_protocol) {}
+ transport_desc(transport_desc) {}
JsepTransportDescription::JsepTransportDescription(
const JsepTransportDescription& from)
@@ -55,9 +51,7 @@ JsepTransportDescription::JsepTransportDescription(
cryptos(from.cryptos),
encrypted_header_extension_ids(from.encrypted_header_extension_ids),
rtp_abs_sendtime_extn_id(from.rtp_abs_sendtime_extn_id),
- transport_desc(from.transport_desc),
- media_alt_protocol(from.media_alt_protocol),
- data_alt_protocol(from.data_alt_protocol) {}
+ transport_desc(from.transport_desc) {}
JsepTransportDescription::~JsepTransportDescription() = default;
@@ -71,8 +65,6 @@ JsepTransportDescription& JsepTransportDescription::operator=(
encrypted_header_extension_ids = from.encrypted_header_extension_ids;
rtp_abs_sendtime_extn_id = from.rtp_abs_sendtime_extn_id;
transport_desc = from.transport_desc;
- media_alt_protocol = from.media_alt_protocol;
- data_alt_protocol = from.data_alt_protocol;
return *this;
}
@@ -88,9 +80,7 @@ JsepTransport::JsepTransport(
std::unique_ptr<webrtc::RtpTransportInternal> datagram_rtp_transport,
std::unique_ptr<DtlsTransportInternal> rtp_dtls_transport,
std::unique_ptr<DtlsTransportInternal> rtcp_dtls_transport,
- std::unique_ptr<SctpTransportInternal> sctp_transport,
- std::unique_ptr<webrtc::DatagramTransportInterface> datagram_transport,
- webrtc::DataChannelTransportInterface* data_channel_transport)
+ std::unique_ptr<SctpTransportInternal> sctp_transport)
: network_thread_(rtc::Thread::Current()),
mid_(mid),
local_certificate_(local_certificate),
@@ -115,10 +105,7 @@ JsepTransport::JsepTransport(
sctp_transport_(sctp_transport
? new rtc::RefCountedObject<webrtc::SctpTransport>(
std::move(sctp_transport))
- : nullptr),
- datagram_transport_(std::move(datagram_transport)),
- datagram_rtp_transport_(std::move(datagram_rtp_transport)),
- data_channel_transport_(data_channel_transport) {
+ : nullptr) {
RTC_DCHECK(ice_transport_);
RTC_DCHECK(rtp_dtls_transport_);
// |rtcp_ice_transport_| must be present iff |rtcp_dtls_transport_| is
@@ -147,13 +134,6 @@ JsepTransport::JsepTransport(
std::vector<webrtc::RtpTransportInternal*>{
datagram_rtp_transport_.get(), default_rtp_transport()});
}
-
- if (data_channel_transport_ && sctp_data_channel_transport_) {
- composite_data_channel_transport_ =
- std::make_unique<webrtc::CompositeDataChannelTransport>(
- std::vector<webrtc::DataChannelTransportInterface*>{
- data_channel_transport_, sctp_data_channel_transport_.get()});
- }
}
JsepTransport::~JsepTransport() {
@@ -247,7 +227,6 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription(
// If PRANSWER/ANSWER is set, we should decide transport protocol type.
if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) {
error = NegotiateAndSetDtlsParameters(type);
- NegotiateDatagramTransport(type);
}
if (!error.ok()) {
local_description_.reset();
@@ -325,7 +304,6 @@ webrtc::RTCError JsepTransport::SetRemoteJsepTransportDescription(
// If PRANSWER/ANSWER is set, we should decide transport protocol type.
if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) {
error = NegotiateAndSetDtlsParameters(SdpType::kOffer);
- NegotiateDatagramTransport(type);
}
if (!error.ok()) {
remote_description_.reset();
@@ -383,18 +361,6 @@ absl::optional<rtc::SSLRole> JsepTransport::GetDtlsRole() const {
return absl::optional<rtc::SSLRole>(dtls_role);
}
-absl::optional<OpaqueTransportParameters>
-JsepTransport::GetTransportParameters() const {
- rtc::CritScope scope(&accessor_lock_);
- if (!datagram_transport()) {
- return absl::nullopt;
- }
-
- OpaqueTransportParameters params;
- params.parameters = datagram_transport()->GetTransportParameters();
- return params;
-}
-
bool JsepTransport::GetStats(TransportStats* stats) {
RTC_DCHECK_RUN_ON(network_thread_);
rtc::CritScope scope(&accessor_lock_);
@@ -462,7 +428,6 @@ webrtc::RTCError JsepTransport::SetNegotiatedDtlsParameters(
DtlsTransportInternal* dtls_transport,
absl::optional<rtc::SSLRole> dtls_role,
rtc::SSLFingerprint* remote_fingerprint) {
- RTC_DCHECK_RUN_ON(network_thread_);
RTC_DCHECK(dtls_transport);
// Set SSL role. Role must be set before fingerprint is applied, which
// initiates DTLS setup.
@@ -535,7 +500,7 @@ void JsepTransport::ActivateRtcpMux() {
RTC_DCHECK(dtls_srtp_transport_);
RTC_DCHECK(!unencrypted_rtp_transport_);
RTC_DCHECK(!sdes_transport_);
- dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport(),
+ dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport_locked(),
/*rtcp_dtls_transport=*/nullptr);
}
rtcp_dtls_transport_ = nullptr; // Destroy this reference.
@@ -549,7 +514,6 @@ bool JsepTransport::SetSdes(const std::vector<CryptoParams>& cryptos,
webrtc::SdpType type,
ContentSource source) {
RTC_DCHECK_RUN_ON(network_thread_);
- rtc::CritScope scope(&accessor_lock_);
bool ret = false;
ret = sdes_negotiator_.Process(cryptos, type, source);
if (!ret) {
@@ -734,7 +698,6 @@ webrtc::RTCError JsepTransport::NegotiateDtlsRole(
bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport,
TransportStats* stats) {
RTC_DCHECK_RUN_ON(network_thread_);
- rtc::CritScope scope(&accessor_lock_);
RTC_DCHECK(dtls_transport);
TransportChannelStats substats;
if (rtcp_dtls_transport_) {
@@ -756,106 +719,4 @@ bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport,
return true;
}
-void JsepTransport::NegotiateDatagramTransport(SdpType type) {
- RTC_DCHECK(type == SdpType::kAnswer || type == SdpType::kPrAnswer);
- rtc::CritScope lock(&accessor_lock_);
- if (!datagram_transport_) {
- return; // No need to negotiate the use of datagram transport.
- }
-
- bool compatible_datagram_transport = false;
- if (datagram_transport_ &&
- local_description_->transport_desc.opaque_parameters &&
- remote_description_->transport_desc.opaque_parameters) {
- // If both descriptions have datagram transport parameters, and the remote
- // parameters are accepted by the datagram transport, then use the datagram
- // transport. Otherwise, fall back to RTP.
- compatible_datagram_transport =
- datagram_transport_
- ->SetRemoteTransportParameters(remote_description_->transport_desc
- .opaque_parameters->parameters)
- .ok();
- }
-
- bool use_datagram_transport_for_media =
- compatible_datagram_transport &&
- remote_description_->media_alt_protocol ==
- remote_description_->transport_desc.opaque_parameters->protocol &&
- remote_description_->media_alt_protocol ==
- local_description_->media_alt_protocol;
-
- bool use_datagram_transport_for_data =
- compatible_datagram_transport &&
- remote_description_->data_alt_protocol ==
- remote_description_->transport_desc.opaque_parameters->protocol &&
- remote_description_->data_alt_protocol ==
- local_description_->data_alt_protocol;
-
- RTC_LOG(LS_INFO)
- << "Negotiating datagram transport, use_datagram_transport_for_media="
- << use_datagram_transport_for_media
- << ", use_datagram_transport_for_data=" << use_datagram_transport_for_data
- << " answer type=" << (type == SdpType::kAnswer ? "answer" : "pr_answer");
-
- // A provisional or full or answer lets the peer start sending on one of the
- // transports.
- if (composite_rtp_transport_) {
- composite_rtp_transport_->SetSendTransport(
- use_datagram_transport_for_media ? datagram_rtp_transport_.get()
- : default_rtp_transport());
- }
- if (composite_data_channel_transport_) {
- composite_data_channel_transport_->SetSendTransport(
- use_datagram_transport_for_data ? data_channel_transport_
- : sctp_data_channel_transport_.get());
- }
-
- if (type != SdpType::kAnswer) {
- return;
- }
-
- if (composite_rtp_transport_) {
- if (use_datagram_transport_for_media) {
- // Negotiated use of datagram transport for RTP, so remove the
- // non-datagram RTP transport.
- composite_rtp_transport_->RemoveTransport(default_rtp_transport());
- if (unencrypted_rtp_transport_) {
- unencrypted_rtp_transport_ = nullptr;
- } else if (sdes_transport_) {
- sdes_transport_ = nullptr;
- } else {
- dtls_srtp_transport_ = nullptr;
- }
- } else {
- composite_rtp_transport_->RemoveTransport(datagram_rtp_transport_.get());
- datagram_rtp_transport_ = nullptr;
- }
- }
-
- if (composite_data_channel_transport_) {
- if (use_datagram_transport_for_data) {
- // Negotiated use of datagram transport for data channels, so remove the
- // non-datagram data channel transport.
- composite_data_channel_transport_->RemoveTransport(
- sctp_data_channel_transport_.get());
- sctp_data_channel_transport_ = nullptr;
- sctp_transport_ = nullptr;
- } else {
- composite_data_channel_transport_->RemoveTransport(
- data_channel_transport_);
- data_channel_transport_ = nullptr;
- }
- } else if (data_channel_transport_ && !use_datagram_transport_for_data) {
- // The datagram transport has been rejected without a fallback. We still
- // need to inform the application and delete it.
- SignalDataChannelTransportNegotiated(this, nullptr);
- data_channel_transport_ = nullptr;
- }
-
- if (!use_datagram_transport_for_media && !use_datagram_transport_for_data) {
- // Datagram transport is not being used for anything, so clean it up.
- datagram_transport_ = nullptr;
- }
-}
-
} // namespace cricket
diff --git a/pc/jsep_transport.h b/pc/jsep_transport.h
index 6d88deff07..11c8168d9e 100644
--- a/pc/jsep_transport.h
+++ b/pc/jsep_transport.h
@@ -20,12 +20,11 @@
#include "api/candidate.h"
#include "api/ice_transport_interface.h"
#include "api/jsep.h"
-#include "api/transport/datagram_transport_interface.h"
+#include "api/transport/data_channel_transport_interface.h"
#include "media/sctp/sctp_transport_internal.h"
#include "p2p/base/dtls_transport.h"
#include "p2p/base/p2p_constants.h"
#include "p2p/base/transport_info.h"
-#include "pc/composite_data_channel_transport.h"
#include "pc/composite_rtp_transport.h"
#include "pc/dtls_srtp_transport.h"
#include "pc/dtls_transport.h"
@@ -54,9 +53,7 @@ struct JsepTransportDescription {
const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_header_extension_ids,
int rtp_abs_sendtime_extn_id,
- const TransportDescription& transport_description,
- absl::optional<std::string> media_alt_protocol,
- absl::optional<std::string> data_alt_protocol);
+ const TransportDescription& transport_description);
JsepTransportDescription(const JsepTransportDescription& from);
~JsepTransportDescription();
@@ -69,14 +66,6 @@ struct JsepTransportDescription {
// TODO(zhihuang): Add the ICE and DTLS related variables and methods from
// TransportDescription and remove this extra layer of abstraction.
TransportDescription transport_desc;
-
- // Alt-protocols that apply to this JsepTransport. Presence indicates a
- // request to use an alternative protocol for media and/or data. The
- // alt-protocol is handled by a datagram transport. If one or both of these
- // values are present, JsepTransport will attempt to negotiate use of the
- // datagram transport for media and/or data.
- absl::optional<std::string> media_alt_protocol;
- absl::optional<std::string> data_alt_protocol;
};
// Helper class used by JsepTransportController that processes
@@ -103,9 +92,7 @@ class JsepTransport : public sigslot::has_slots<> {
std::unique_ptr<webrtc::RtpTransportInternal> datagram_rtp_transport,
std::unique_ptr<DtlsTransportInternal> rtp_dtls_transport,
std::unique_ptr<DtlsTransportInternal> rtcp_dtls_transport,
- std::unique_ptr<SctpTransportInternal> sctp_transport,
- std::unique_ptr<webrtc::DatagramTransportInterface> datagram_transport,
- webrtc::DataChannelTransportInterface* data_channel_transport);
+ std::unique_ptr<SctpTransportInternal> sctp_transport);
~JsepTransport() override;
@@ -128,14 +115,15 @@ class JsepTransport : public sigslot::has_slots<> {
webrtc::RTCError SetLocalJsepTransportDescription(
const JsepTransportDescription& jsep_description,
- webrtc::SdpType type);
+ webrtc::SdpType type) RTC_LOCKS_EXCLUDED(accessor_lock_);
// Set the remote TransportDescription to be used by DTLS and ICE channels
// that are part of this Transport.
webrtc::RTCError SetRemoteJsepTransportDescription(
const JsepTransportDescription& jsep_description,
- webrtc::SdpType type);
- webrtc::RTCError AddRemoteCandidates(const Candidates& candidates);
+ webrtc::SdpType type) RTC_LOCKS_EXCLUDED(accessor_lock_);
+ webrtc::RTCError AddRemoteCandidates(const Candidates& candidates)
+ RTC_LOCKS_EXCLUDED(accessor_lock_);
// Set the "needs-ice-restart" flag as described in JSEP. After the flag is
// set, offers should generate new ufrags/passwords until an ICE restart
@@ -143,23 +131,22 @@ class JsepTransport : public sigslot::has_slots<> {
//
// This and the below method can be called safely from any thread as long as
// SetXTransportDescription is not in progress.
- void SetNeedsIceRestartFlag();
+ void SetNeedsIceRestartFlag() RTC_LOCKS_EXCLUDED(accessor_lock_);
// Returns true if the ICE restart flag above was set, and no ICE restart has
// occurred yet for this transport (by applying a local description with
// changed ufrag/password).
- bool needs_ice_restart() const {
+ bool needs_ice_restart() const RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
return needs_ice_restart_;
}
// Returns role if negotiated, or empty absl::optional if it hasn't been
// negotiated yet.
- absl::optional<rtc::SSLRole> GetDtlsRole() const;
-
- absl::optional<OpaqueTransportParameters> GetTransportParameters() const;
+ absl::optional<rtc::SSLRole> GetDtlsRole() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_);
// TODO(deadbeef): Make this const. See comment in transportcontroller.h.
- bool GetStats(TransportStats* stats);
+ bool GetStats(TransportStats* stats) RTC_LOCKS_EXCLUDED(accessor_lock_);
const JsepTransportDescription* local_description() const {
RTC_DCHECK_RUN_ON(network_thread_);
@@ -171,7 +158,8 @@ class JsepTransport : public sigslot::has_slots<> {
return remote_description_.get();
}
- webrtc::RtpTransportInternal* rtp_transport() const {
+ webrtc::RtpTransportInternal* rtp_transport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
if (composite_rtp_transport_) {
return composite_rtp_transport_.get();
@@ -182,7 +170,8 @@ class JsepTransport : public sigslot::has_slots<> {
}
}
- const DtlsTransportInternal* rtp_dtls_transport() const {
+ const DtlsTransportInternal* rtp_dtls_transport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
if (rtp_dtls_transport_) {
return rtp_dtls_transport_->internal();
@@ -191,16 +180,14 @@ class JsepTransport : public sigslot::has_slots<> {
}
}
- DtlsTransportInternal* rtp_dtls_transport() {
+ DtlsTransportInternal* rtp_dtls_transport()
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
- if (rtp_dtls_transport_) {
- return rtp_dtls_transport_->internal();
- } else {
- return nullptr;
- }
+ return rtp_dtls_transport_locked();
}
- const DtlsTransportInternal* rtcp_dtls_transport() const {
+ const DtlsTransportInternal* rtcp_dtls_transport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
if (rtcp_dtls_transport_) {
return rtcp_dtls_transport_->internal();
@@ -209,7 +196,8 @@ class JsepTransport : public sigslot::has_slots<> {
}
}
- DtlsTransportInternal* rtcp_dtls_transport() {
+ DtlsTransportInternal* rtcp_dtls_transport()
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
if (rtcp_dtls_transport_) {
return rtcp_dtls_transport_->internal();
@@ -218,30 +206,27 @@ class JsepTransport : public sigslot::has_slots<> {
}
}
- rtc::scoped_refptr<webrtc::DtlsTransport> RtpDtlsTransport() {
+ rtc::scoped_refptr<webrtc::DtlsTransport> RtpDtlsTransport()
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
return rtp_dtls_transport_;
}
- rtc::scoped_refptr<webrtc::SctpTransport> SctpTransport() const {
+ rtc::scoped_refptr<webrtc::SctpTransport> SctpTransport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
return sctp_transport_;
}
- webrtc::DataChannelTransportInterface* data_channel_transport() const {
+ // TODO(bugs.webrtc.org/9719): Delete method, update callers to use
+ // SctpTransport() instead.
+ webrtc::DataChannelTransportInterface* data_channel_transport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
- if (composite_data_channel_transport_) {
- return composite_data_channel_transport_.get();
- } else if (sctp_data_channel_transport_) {
+ if (sctp_data_channel_transport_) {
return sctp_data_channel_transport_.get();
}
- return data_channel_transport_;
- }
-
- // Returns datagram transport, if available.
- webrtc::DatagramTransportInterface* datagram_transport() const {
- rtc::CritScope scope(&accessor_lock_);
- return datagram_transport_.get();
+ return nullptr;
}
// This is signaled when RTCP-mux becomes active and
@@ -249,15 +234,6 @@ class JsepTransport : public sigslot::has_slots<> {
// handle the signal and update the aggregate transport states.
sigslot::signal<> SignalRtcpMuxActive;
- // Signals that a data channel transport was negotiated and may be used to
- // send data. The first parameter is |this|. The second parameter is the
- // transport that was negotiated, or null if negotiation rejected the data
- // channel transport. The third parameter (bool) indicates whether the
- // negotiation was provisional or final. If true, it is provisional, if
- // false, it is final.
- sigslot::signal2<JsepTransport*, webrtc::DataChannelTransportInterface*>
- SignalDataChannelTransportNegotiated;
-
// TODO(deadbeef): The methods below are only public for testing. Should make
// them utility functions or objects so they can be tested independently from
// this class.
@@ -271,6 +247,15 @@ class JsepTransport : public sigslot::has_slots<> {
void SetActiveResetSrtpParams(bool active_reset_srtp_params);
private:
+ DtlsTransportInternal* rtp_dtls_transport_locked()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_) {
+ if (rtp_dtls_transport_) {
+ return rtp_dtls_transport_->internal();
+ } else {
+ return nullptr;
+ }
+ }
+
bool SetRtcpMux(bool enable, webrtc::SdpType type, ContentSource source);
void ActivateRtcpMux();
@@ -278,7 +263,8 @@ class JsepTransport : public sigslot::has_slots<> {
bool SetSdes(const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_extension_ids,
webrtc::SdpType type,
- ContentSource source);
+ ContentSource source)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_);
// Negotiates and sets the DTLS parameters based on the current local and
// remote transport description, such as the DTLS role to use, and whether
@@ -295,26 +281,22 @@ class JsepTransport : public sigslot::has_slots<> {
webrtc::SdpType local_description_type,
ConnectionRole local_connection_role,
ConnectionRole remote_connection_role,
- absl::optional<rtc::SSLRole>* negotiated_dtls_role);
+ absl::optional<rtc::SSLRole>* negotiated_dtls_role)
+ RTC_LOCKS_EXCLUDED(accessor_lock_);
// Pushes down the ICE parameters from the remote description.
void SetRemoteIceParameters(const IceParameters& ice_parameters,
IceTransportInternal* ice);
// Pushes down the DTLS parameters obtained via negotiation.
- webrtc::RTCError SetNegotiatedDtlsParameters(
+ static webrtc::RTCError SetNegotiatedDtlsParameters(
DtlsTransportInternal* dtls_transport,
absl::optional<rtc::SSLRole> dtls_role,
rtc::SSLFingerprint* remote_fingerprint);
bool GetTransportStats(DtlsTransportInternal* dtls_transport,
- TransportStats* stats);
-
- // Deactivates, signals removal, and deletes |composite_rtp_transport_| if the
- // current state of negotiation is sufficient to determine which rtp_transport
- // and data channel transport to use.
- void NegotiateDatagramTransport(webrtc::SdpType type)
- RTC_RUN_ON(network_thread_);
+ TransportStats* stats)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_);
// Returns the default (non-datagram) rtp transport, if any.
webrtc::RtpTransportInternal* default_rtp_transport() const
@@ -334,7 +316,7 @@ class JsepTransport : public sigslot::has_slots<> {
const rtc::Thread* const network_thread_;
// Critical scope for fields accessed off-thread
// TODO(https://bugs.webrtc.org/10300): Stop doing this.
- rtc::CriticalSection accessor_lock_;
+ rtc::RecursiveCriticalSection accessor_lock_;
const std::string mid_;
// needs-ice-restart bit as described in JSEP.
bool needs_ice_restart_ RTC_GUARDED_BY(accessor_lock_) = false;
@@ -387,22 +369,9 @@ class JsepTransport : public sigslot::has_slots<> {
absl::optional<std::vector<int>> recv_extension_ids_
RTC_GUARDED_BY(network_thread_);
- // Optional datagram transport (experimental).
- std::unique_ptr<webrtc::DatagramTransportInterface> datagram_transport_
- RTC_GUARDED_BY(accessor_lock_);
-
std::unique_ptr<webrtc::RtpTransportInternal> datagram_rtp_transport_
RTC_GUARDED_BY(accessor_lock_);
- // Non-SCTP data channel transport. Set to |datagram_transport_| if that
- // transport should be used for data chanels. Unset otherwise.
- webrtc::DataChannelTransportInterface* data_channel_transport_
- RTC_GUARDED_BY(accessor_lock_) = nullptr;
-
- // Composite data channel transport, used during negotiation.
- std::unique_ptr<webrtc::CompositeDataChannelTransport>
- composite_data_channel_transport_ RTC_GUARDED_BY(accessor_lock_);
-
RTC_DISALLOW_COPY_AND_ASSIGN(JsepTransport);
};
diff --git a/pc/jsep_transport_controller.cc b/pc/jsep_transport_controller.cc
index a7e1b876fe..71dd8acc21 100644
--- a/pc/jsep_transport_controller.cc
+++ b/pc/jsep_transport_controller.cc
@@ -15,11 +15,8 @@
#include "absl/algorithm/container.h"
#include "api/ice_transport_factory.h"
-#include "api/transport/datagram_transport_interface.h"
-#include "api/transport/media/media_transport_interface.h"
#include "p2p/base/ice_transport_internal.h"
#include "p2p/base/port.h"
-#include "pc/datagram_rtp_transport.h"
#include "pc/srtp_filter.h"
#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
@@ -140,26 +137,6 @@ RtpTransportInternal* JsepTransportController::GetRtpTransport(
return jsep_transport->rtp_transport();
}
-MediaTransportConfig JsepTransportController::GetMediaTransportConfig(
- const std::string& mid) const {
- auto jsep_transport = GetJsepTransportForMid(mid);
- if (!jsep_transport) {
- return MediaTransportConfig();
- }
-
- DatagramTransportInterface* datagram_transport = nullptr;
- if (config_.use_datagram_transport) {
- datagram_transport = jsep_transport->datagram_transport();
- }
-
- if (datagram_transport) {
- return MediaTransportConfig(
- /*rtp_max_packet_size=*/datagram_transport->GetLargestDatagramSize());
- } else {
- return MediaTransportConfig();
- }
-}
-
DataChannelTransportInterface* JsepTransportController::GetDataChannelTransport(
const std::string& mid) const {
auto jsep_transport = GetJsepTransportForMid(mid);
@@ -425,17 +402,6 @@ void JsepTransportController::SetActiveResetSrtpParams(
}
}
-void JsepTransportController::SetMediaTransportSettings(
- bool use_datagram_transport,
- bool use_datagram_transport_for_data_channels,
- bool use_datagram_transport_for_data_channels_receive_only) {
- config_.use_datagram_transport = use_datagram_transport;
- config_.use_datagram_transport_for_data_channels =
- use_datagram_transport_for_data_channels;
- config_.use_datagram_transport_for_data_channels_receive_only =
- use_datagram_transport_for_data_channels_receive_only;
-}
-
void JsepTransportController::RollbackTransports() {
if (!network_thread_->IsCurrent()) {
network_thread_->Invoke<void>(RTC_FROM_HERE, [=] { RollbackTransports(); });
@@ -468,16 +434,12 @@ JsepTransportController::CreateIceTransport(const std::string& transport_name,
std::unique_ptr<cricket::DtlsTransportInternal>
JsepTransportController::CreateDtlsTransport(
const cricket::ContentInfo& content_info,
- cricket::IceTransportInternal* ice,
- DatagramTransportInterface* datagram_transport) {
+ cricket::IceTransportInternal* ice) {
RTC_DCHECK(network_thread_->IsCurrent());
std::unique_ptr<cricket::DtlsTransportInternal> dtls;
- if (datagram_transport) {
- RTC_DCHECK(config_.use_datagram_transport ||
- config_.use_datagram_transport_for_data_channels);
- } else if (config_.dtls_transport_factory) {
+ if (config_.dtls_transport_factory) {
dtls = config_.dtls_transport_factory->CreateDtlsTransport(
ice, config_.crypto_options);
} else {
@@ -614,16 +576,9 @@ RTCError JsepTransportController::ApplyDescription_n(
}
std::vector<int> merged_encrypted_extension_ids;
- absl::optional<std::string> bundle_media_alt_protocol;
- absl::optional<std::string> bundle_data_alt_protocol;
if (bundle_group_) {
merged_encrypted_extension_ids =
MergeEncryptedHeaderExtensionIdsForBundle(description);
- error = GetAltProtocolsForBundle(description, &bundle_media_alt_protocol,
- &bundle_data_alt_protocol);
- if (!error.ok()) {
- return error;
- }
}
for (const cricket::ContentInfo& content_info : description->contents()) {
@@ -642,8 +597,6 @@ RTCError JsepTransportController::ApplyDescription_n(
description->transport_infos().size());
for (size_t i = 0; i < description->contents().size(); ++i) {
const cricket::ContentInfo& content_info = description->contents()[i];
- const cricket::MediaContentDescription* media_description =
- content_info.media_description();
const cricket::TransportInfo& transport_info =
description->transport_infos()[i];
if (content_info.rejected) {
@@ -654,7 +607,8 @@ RTCError JsepTransportController::ApplyDescription_n(
if (IsBundled(content_info.name) && content_info.name != *bundled_mid()) {
if (!HandleBundledContent(content_info)) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "Failed to process the bundled m= section.");
+ "Failed to process the bundled m= section with mid='" +
+ content_info.name + "'.");
}
continue;
}
@@ -665,23 +619,10 @@ RTCError JsepTransportController::ApplyDescription_n(
}
std::vector<int> extension_ids;
- absl::optional<std::string> media_alt_protocol;
- absl::optional<std::string> data_alt_protocol;
if (bundled_mid() && content_info.name == *bundled_mid()) {
extension_ids = merged_encrypted_extension_ids;
- media_alt_protocol = bundle_media_alt_protocol;
- data_alt_protocol = bundle_data_alt_protocol;
} else {
extension_ids = GetEncryptedHeaderExtensionIds(content_info);
- switch (media_description->type()) {
- case cricket::MEDIA_TYPE_AUDIO:
- case cricket::MEDIA_TYPE_VIDEO:
- media_alt_protocol = media_description->alt_protocol();
- break;
- case cricket::MEDIA_TYPE_DATA:
- data_alt_protocol = media_description->alt_protocol();
- break;
- }
}
int rtp_abs_sendtime_extn_id =
@@ -695,8 +636,7 @@ RTCError JsepTransportController::ApplyDescription_n(
cricket::JsepTransportDescription jsep_description =
CreateJsepTransportDescription(content_info, transport_info,
- extension_ids, rtp_abs_sendtime_extn_id,
- media_alt_protocol, data_alt_protocol);
+ extension_ids, rtp_abs_sendtime_extn_id);
if (local) {
error =
transport->SetLocalJsepTransportDescription(jsep_description, type);
@@ -706,9 +646,10 @@ RTCError JsepTransportController::ApplyDescription_n(
}
if (!error.ok()) {
- LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
- "Failed to apply the description for " +
- content_info.name + ": " + error.message());
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Failed to apply the description for m= section with mid='" +
+ content_info.name + "': " + error.message());
}
}
if (type == SdpType::kAnswer) {
@@ -727,11 +668,11 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
// The BUNDLE group containing a MID that no m= section has is invalid.
if (new_bundle_group) {
- for (const auto& content_name : new_bundle_group->content_names()) {
+ for (const std::string& content_name : new_bundle_group->content_names()) {
if (!description->GetContentByName(content_name)) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group contains MID:" + content_name +
- " matching no m= section.");
+ "The BUNDLE group contains MID='" + content_name +
+ "' matching no m= section.");
}
}
}
@@ -743,18 +684,21 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
if (new_bundle_group) {
// The BUNDLE group in answer should be a subset of offered group.
- for (const auto& content_name : new_bundle_group->content_names()) {
+ for (const std::string& content_name :
+ new_bundle_group->content_names()) {
if (!offered_bundle_group ||
!offered_bundle_group->HasContentName(content_name)) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group in answer contains a MID that was "
- "not in the offered group.");
+ "The BUNDLE group in answer contains a MID='" +
+ content_name +
+ "' that was "
+ "not in the offered group.");
}
}
}
if (bundle_group_) {
- for (const auto& content_name : bundle_group_->content_names()) {
+ for (const std::string& content_name : bundle_group_->content_names()) {
// An answer that removes m= sections from pre-negotiated BUNDLE group
// without rejecting it, is invalid.
if (!new_bundle_group ||
@@ -762,8 +706,9 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
auto* content_info = description->GetContentByName(content_name);
if (!content_info || !content_info->rejected) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "Answer cannot remove m= section " + content_name +
- " from already-established BUNDLE group.");
+ "Answer cannot remove m= section with mid='" +
+ content_name +
+ "' from already-established BUNDLE group.");
}
}
}
@@ -798,9 +743,9 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
for (const auto& content_name : bundle_group_->content_names()) {
auto other_content = description->GetContentByName(content_name);
if (!other_content->rejected) {
- return RTCError(
- RTCErrorType::INVALID_PARAMETER,
- "The m= section:" + content_name + " should be rejected.");
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "The m= section with mid='" + content_name +
+ "' should be rejected.");
}
}
}
@@ -815,8 +760,8 @@ RTCError JsepTransportController::ValidateContent(
content_info.type == cricket::MediaProtocolType::kRtp &&
!content_info.media_description()->rtcp_mux()) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The m= section:" + content_info.name +
- " is invalid. RTCP-MUX is not "
+ "The m= section with mid='" + content_info.name +
+ "' is invalid. RTCP-MUX is not "
"enabled when it is required.");
}
return RTCError::OK();
@@ -892,9 +837,7 @@ JsepTransportController::CreateJsepTransportDescription(
const cricket::ContentInfo& content_info,
const cricket::TransportInfo& transport_info,
const std::vector<int>& encrypted_extension_ids,
- int rtp_abs_sendtime_extn_id,
- absl::optional<std::string> media_alt_protocol,
- absl::optional<std::string> data_alt_protocol) {
+ int rtp_abs_sendtime_extn_id) {
const cricket::MediaContentDescription* content_desc =
content_info.media_description();
RTC_DCHECK(content_desc);
@@ -904,8 +847,7 @@ JsepTransportController::CreateJsepTransportDescription(
return cricket::JsepTransportDescription(
rtcp_mux_enabled, content_desc->cryptos(), encrypted_extension_ids,
- rtp_abs_sendtime_extn_id, transport_info.description, media_alt_protocol,
- data_alt_protocol);
+ rtp_abs_sendtime_extn_id, transport_info.description);
}
bool JsepTransportController::ShouldUpdateBundleGroup(
@@ -971,55 +913,6 @@ JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundle(
return merged_ids;
}
-RTCError JsepTransportController::GetAltProtocolsForBundle(
- const cricket::SessionDescription* description,
- absl::optional<std::string>* media_alt_protocol,
- absl::optional<std::string>* data_alt_protocol) {
- RTC_DCHECK(description);
- RTC_DCHECK(bundle_group_);
- RTC_DCHECK(media_alt_protocol);
- RTC_DCHECK(data_alt_protocol);
-
- bool found_media = false;
- bool found_data = false;
- for (const cricket::ContentInfo& content : description->contents()) {
- if (bundle_group_->HasContentName(content.name)) {
- const cricket::MediaContentDescription* media_description =
- content.media_description();
- switch (media_description->type()) {
- case cricket::MEDIA_TYPE_AUDIO:
- case cricket::MEDIA_TYPE_VIDEO:
- if (found_media &&
- *media_alt_protocol != media_description->alt_protocol()) {
- return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group contains conflicting "
- "alt-protocols for media ('" +
- media_alt_protocol->value_or("") + "' and '" +
- media_description->alt_protocol().value_or("") +
- "')");
- }
- found_media = true;
- *media_alt_protocol = media_description->alt_protocol();
- break;
- case cricket::MEDIA_TYPE_DATA:
- if (found_data &&
- *data_alt_protocol != media_description->alt_protocol()) {
- return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group contains conflicting "
- "alt-protocols for data ('" +
- data_alt_protocol->value_or("") + "' and '" +
- media_description->alt_protocol().value_or("") +
- "')");
- }
- found_data = true;
- *data_alt_protocol = media_description->alt_protocol();
- break;
- }
- }
- }
- return RTCError::OK();
-}
-
int JsepTransportController::GetRtpAbsSendTimeHeaderExtensionId(
const cricket::ContentInfo& content_info) {
if (!config_.enable_external_auth) {
@@ -1060,83 +953,6 @@ cricket::JsepTransport* JsepTransportController::GetJsepTransportByName(
return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get();
}
-// TODO(sukhanov): Refactor to avoid code duplication for Media and Datagram
-// transports setup.
-std::unique_ptr<webrtc::DatagramTransportInterface>
-JsepTransportController::MaybeCreateDatagramTransport(
- const cricket::ContentInfo& content_info,
- const cricket::SessionDescription& description,
- bool local) {
- if (config_.media_transport_factory == nullptr) {
- return nullptr;
- }
-
- if (!(config_.use_datagram_transport ||
- config_.use_datagram_transport_for_data_channels)) {
- return nullptr;
- }
-
- // Caller (offerer) datagram transport.
- if (offer_datagram_transport_) {
- RTC_DCHECK(local);
- RTC_LOG(LS_INFO) << "Offered datagram transport has now been activated.";
- return std::move(offer_datagram_transport_);
- }
-
- const cricket::TransportDescription* transport_description =
- description.GetTransportDescriptionByName(content_info.mid());
- RTC_DCHECK(transport_description)
- << "Missing transport description for mid=" << content_info.mid();
-
- if (!transport_description->opaque_parameters) {
- RTC_LOG(LS_INFO)
- << "No opaque transport parameters, not creating datagram transport";
- return nullptr;
- }
-
- if (transport_description->opaque_parameters->protocol !=
- config_.media_transport_factory->GetTransportName()) {
- RTC_LOG(LS_INFO) << "Opaque transport parameters for protocol="
- << transport_description->opaque_parameters->protocol
- << ", which does not match supported protocol="
- << config_.media_transport_factory->GetTransportName();
- return nullptr;
- }
-
- RTC_DCHECK(!local);
- // When bundle is enabled, two JsepTransports are created, and then
- // the second transport is destroyed (right away).
- // For datagram transport, we don't want to create the second
- // datagram transport in the first place.
- RTC_LOG(LS_INFO) << "Returning new, client datagram transport.";
-
- MediaTransportSettings settings;
- settings.is_caller = local;
- settings.remote_transport_parameters =
- transport_description->opaque_parameters->parameters;
- settings.event_log = config_.event_log;
-
- auto datagram_transport_result =
- config_.media_transport_factory->CreateDatagramTransport(network_thread_,
- settings);
-
- if (!datagram_transport_result.ok()) {
- // Datagram transport negotiation will fail and we'll fall back to RTP.
- return nullptr;
- }
-
- if (!datagram_transport_result.value()
- ->SetRemoteTransportParameters(
- transport_description->opaque_parameters->parameters)
- .ok()) {
- // Datagram transport negotiation failed (parameters are incompatible).
- // Fall back to RTP.
- return nullptr;
- }
-
- return datagram_transport_result.MoveValue();
-}
-
RTCError JsepTransportController::MaybeCreateJsepTransport(
bool local,
const cricket::ContentInfo& content_info,
@@ -1158,14 +974,8 @@ RTCError JsepTransportController::MaybeCreateJsepTransport(
CreateIceTransport(content_info.name, /*rtcp=*/false);
RTC_DCHECK(ice);
- std::unique_ptr<DatagramTransportInterface> datagram_transport =
- MaybeCreateDatagramTransport(content_info, description, local);
- if (datagram_transport) {
- datagram_transport->Connect(ice->internal());
- }
-
std::unique_ptr<cricket::DtlsTransportInternal> rtp_dtls_transport =
- CreateDtlsTransport(content_info, ice->internal(), nullptr);
+ CreateDtlsTransport(content_info, ice->internal());
std::unique_ptr<cricket::DtlsTransportInternal> rtcp_dtls_transport;
std::unique_ptr<RtpTransport> unencrypted_rtp_transport;
@@ -1177,29 +987,9 @@ RTCError JsepTransportController::MaybeCreateJsepTransport(
if (config_.rtcp_mux_policy !=
PeerConnectionInterface::kRtcpMuxPolicyRequire &&
content_info.type == cricket::MediaProtocolType::kRtp) {
- RTC_DCHECK(datagram_transport == nullptr);
rtcp_ice = CreateIceTransport(content_info.name, /*rtcp=*/true);
rtcp_dtls_transport =
- CreateDtlsTransport(content_info, rtcp_ice->internal(),
- /*datagram_transport=*/nullptr);
- }
-
- // Only create a datagram RTP transport if the datagram transport should be
- // used for RTP.
- if (datagram_transport && config_.use_datagram_transport) {
- // TODO(sukhanov): We use unencrypted RTP transport over DatagramTransport,
- // because MediaTransport encrypts. In the future we may want to
- // implement our own version of RtpTransport over MediaTransport, because
- // it will give us more control over things like:
- // - Fusing
- // - Rtp header compression
- // - Handling Rtcp feedback.
- RTC_LOG(LS_INFO) << "Creating UnencryptedRtpTransport, because datagram "
- "transport is used.";
- RTC_DCHECK(!rtcp_dtls_transport);
- datagram_rtp_transport = std::make_unique<DatagramRtpTransport>(
- content_info.media_description()->rtp_header_extensions(),
- ice->internal(), datagram_transport.get());
+ CreateDtlsTransport(content_info, rtcp_ice->internal());
}
if (config_.disable_encryption) {
@@ -1223,27 +1013,19 @@ RTCError JsepTransportController::MaybeCreateJsepTransport(
config_.sctp_factory->CreateSctpTransport(rtp_dtls_transport.get());
}
- DataChannelTransportInterface* data_channel_transport = nullptr;
- if (config_.use_datagram_transport_for_data_channels) {
- data_channel_transport = datagram_transport.get();
- }
-
std::unique_ptr<cricket::JsepTransport> jsep_transport =
std::make_unique<cricket::JsepTransport>(
content_info.name, certificate_, std::move(ice), std::move(rtcp_ice),
std::move(unencrypted_rtp_transport), std::move(sdes_transport),
std::move(dtls_srtp_transport), std::move(datagram_rtp_transport),
std::move(rtp_dtls_transport), std::move(rtcp_dtls_transport),
- std::move(sctp_transport), std::move(datagram_transport),
- data_channel_transport);
+ std::move(sctp_transport));
jsep_transport->rtp_transport()->SignalRtcpPacketReceived.connect(
this, &JsepTransportController::OnRtcpPacketReceived_n);
jsep_transport->SignalRtcpMuxActive.connect(
this, &JsepTransportController::UpdateAggregateStates_n);
- jsep_transport->SignalDataChannelTransportNegotiated.connect(
- this, &JsepTransportController::OnDataChannelTransportNegotiated_n);
SetTransportForMid(content_info.name, jsep_transport.get());
jsep_transports_by_name_[content_info.name] = std::move(jsep_transport);
@@ -1424,18 +1206,6 @@ void JsepTransportController::OnTransportStateChanged_n(
UpdateAggregateStates_n();
}
-void JsepTransportController::OnDataChannelTransportNegotiated_n(
- cricket::JsepTransport* transport,
- DataChannelTransportInterface* data_channel_transport) {
- for (const auto& it : mid_to_transport_) {
- if (it.second == transport) {
- config_.transport_observer->OnTransportChanged(
- it.first, transport->rtp_transport(), transport->RtpDtlsTransport(),
- data_channel_transport);
- }
- }
-}
-
void JsepTransportController::UpdateAggregateStates_n() {
RTC_DCHECK(network_thread_->IsCurrent());
@@ -1637,54 +1407,4 @@ void JsepTransportController::OnDtlsHandshakeError(
SignalDtlsHandshakeError(error);
}
-absl::optional<cricket::OpaqueTransportParameters>
-JsepTransportController::GetTransportParameters(const std::string& mid) {
- if (!(config_.use_datagram_transport ||
- config_.use_datagram_transport_for_data_channels)) {
- return absl::nullopt;
- }
-
- cricket::JsepTransport* transport = GetJsepTransportForMid(mid);
- if (transport) {
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport->GetTransportParameters();
- if (params) {
- params->protocol = config_.media_transport_factory->GetTransportName();
- }
- return params;
- }
-
- RTC_DCHECK(!local_desc_ && !remote_desc_)
- << "JsepTransport should exist for every mid once any description is set";
-
- if (config_.use_datagram_transport_for_data_channels_receive_only) {
- return absl::nullopt;
- }
-
- // Need to generate a transport for the offer.
- if (!offer_datagram_transport_) {
- webrtc::MediaTransportSettings settings;
- settings.is_caller = true;
- settings.pre_shared_key = rtc::CreateRandomString(32);
- settings.event_log = config_.event_log;
- auto datagram_transport_or_error =
- config_.media_transport_factory->CreateDatagramTransport(
- network_thread_, settings);
-
- if (datagram_transport_or_error.ok()) {
- offer_datagram_transport_ =
- std::move(datagram_transport_or_error.value());
- } else {
- RTC_LOG(LS_INFO) << "Unable to create datagram transport, error="
- << datagram_transport_or_error.error().message();
- }
- }
-
- // We have prepared a transport for the offer, and can now use its parameters.
- cricket::OpaqueTransportParameters params;
- params.parameters = offer_datagram_transport_->GetTransportParameters();
- params.protocol = config_.media_transport_factory->GetTransportName();
- return params;
-}
-
} // namespace webrtc
diff --git a/pc/jsep_transport_controller.h b/pc/jsep_transport_controller.h
index c966e744c6..d95b475969 100644
--- a/pc/jsep_transport_controller.h
+++ b/pc/jsep_transport_controller.h
@@ -22,7 +22,6 @@
#include "api/ice_transport_factory.h"
#include "api/peer_connection_interface.h"
#include "api/rtc_event_log/rtc_event_log.h"
-#include "api/transport/media/media_transport_config.h"
#include "media/sctp/sctp_transport_internal.h"
#include "p2p/base/dtls_transport.h"
#include "p2p/base/dtls_transport_factory.h"
@@ -103,31 +102,6 @@ class JsepTransportController : public sigslot::has_slots<> {
// Factory for SCTP transports.
cricket::SctpTransportInternalFactory* sctp_factory = nullptr;
-
- // Whether an RtpMediaTransport should be created as default, when no
- // MediaTransportFactory is provided.
- bool use_rtp_media_transport = false;
-
- // Use encrypted datagram transport to send packets.
- bool use_datagram_transport = false;
-
- // Use datagram transport's implementation of data channels instead of SCTP.
- bool use_datagram_transport_for_data_channels = false;
-
- // Whether |use_datagram_transport_for_data_channels| applies to outgoing
- // calls. If true, |use_datagram_transport_for_data_channels| applies only
- // to incoming calls.
- bool use_datagram_transport_for_data_channels_receive_only = false;
-
- // Optional media transport factory (experimental). If provided it will be
- // used to create datagram_transport (as long as either
- // |use_datagram_transport| or
- // |use_datagram_transport_for_data_channels| is set to true). However,
- // whether it will be used to send / receive audio and video frames instead
- // of RTP is determined by |use_datagram_transport|. Note that currently
- // datagram_transport co-exists with RTP / RTCP transports and may use the
- // same underlying ICE transport.
- MediaTransportFactory* media_transport_factory = nullptr;
};
// The ICE related events are signaled on the |signaling_thread|.
@@ -161,8 +135,6 @@ class JsepTransportController : public sigslot::has_slots<> {
rtc::scoped_refptr<SctpTransport> GetSctpTransport(
const std::string& mid) const;
- MediaTransportConfig GetMediaTransportConfig(const std::string& mid) const;
-
DataChannelTransportInterface* GetDataChannelTransport(
const std::string& mid) const;
@@ -215,26 +187,10 @@ class JsepTransportController : public sigslot::has_slots<> {
void SetActiveResetSrtpParams(bool active_reset_srtp_params);
- // Allows to overwrite the settings from config. You may set or reset the
- // media transport configuration on the jsep transport controller, as long as
- // you did not call 'GetMediaTransport' or 'MaybeCreateJsepTransport'. Once
- // Jsep transport is created, you can't change this setting.
- void SetMediaTransportSettings(
- bool use_datagram_transport,
- bool use_datagram_transport_for_data_channels,
- bool use_datagram_transport_for_data_channels_receive_only);
-
// For now the rollback only removes mid to transport mappings
// and deletes unused transports, but doesn't consider anything more complex.
void RollbackTransports();
- // Gets the transport parameters for the transport identified by |mid|.
- // If |mid| is bundled, returns the parameters for the bundled transport.
- // If the transport for |mid| has not been created yet, it may be allocated in
- // order to generate transport parameters.
- absl::optional<cricket::OpaqueTransportParameters> GetTransportParameters(
- const std::string& mid);
-
// All of these signals are fired on the signaling thread.
// If any transport failed => failed,
@@ -290,9 +246,7 @@ class JsepTransportController : public sigslot::has_slots<> {
const cricket::ContentInfo& content_info,
const cricket::TransportInfo& transport_info,
const std::vector<int>& encrypted_extension_ids,
- int rtp_abs_sendtime_extn_id,
- absl::optional<std::string> media_alt_protocol,
- absl::optional<std::string> data_alt_protocol);
+ int rtp_abs_sendtime_extn_id);
absl::optional<std::string> bundled_mid() const {
absl::optional<std::string> bundled_mid;
@@ -314,12 +268,6 @@ class JsepTransportController : public sigslot::has_slots<> {
std::vector<int> GetEncryptedHeaderExtensionIds(
const cricket::ContentInfo& content_info);
- // Extracts the alt-protocol settings that apply to the bundle group.
- RTCError GetAltProtocolsForBundle(
- const cricket::SessionDescription* description,
- absl::optional<std::string>* media_alt_protocol,
- absl::optional<std::string>* data_alt_protocol);
-
int GetRtpAbsSendTimeHeaderExtensionId(
const cricket::ContentInfo& content_info);
@@ -347,16 +295,6 @@ class JsepTransportController : public sigslot::has_slots<> {
const cricket::ContentInfo& content_info,
const cricket::SessionDescription& description);
- // Creates datagram transport if config wants to use it, and a=x-mt line is
- // present for the current media transport. Returned
- // DatagramTransportInterface is not connected, and must be connected to ICE.
- // You must call |GenerateOrGetLastMediaTransportOffer| on the caller before
- // calling MaybeCreateDatagramTransport.
- std::unique_ptr<webrtc::DatagramTransportInterface>
- MaybeCreateDatagramTransport(const cricket::ContentInfo& content_info,
- const cricket::SessionDescription& description,
- bool local);
-
void MaybeDestroyJsepTransport(const std::string& mid);
void DestroyAllJsepTransports_n();
@@ -370,8 +308,7 @@ class JsepTransportController : public sigslot::has_slots<> {
std::unique_ptr<cricket::DtlsTransportInternal> CreateDtlsTransport(
const cricket::ContentInfo& content_info,
- cricket::IceTransportInternal* ice,
- DatagramTransportInterface* datagram_transport);
+ cricket::IceTransportInternal* ice);
rtc::scoped_refptr<webrtc::IceTransportInterface> CreateIceTransport(
const std::string& transport_name,
bool rtcp);
@@ -409,10 +346,6 @@ class JsepTransportController : public sigslot::has_slots<> {
void OnTransportStateChanged_n(cricket::IceTransportInternal* transport);
void OnTransportCandidatePairChanged_n(
const cricket::CandidatePairChangeEvent& event);
- void OnDataChannelTransportNegotiated_n(
- cricket::JsepTransport* transport,
- DataChannelTransportInterface* data_channel_transport);
-
void UpdateAggregateStates_n();
void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer* packet,
@@ -446,17 +379,6 @@ class JsepTransportController : public sigslot::has_slots<> {
Config config_;
- // Early on in the call we don't know if datagram transport is going to be
- // used, but we need to get the server-supported parameters to add to an SDP.
- // This server datagram transport will be promoted to the used datagram
- // transport after the local description is set, and the ownership will be
- // transferred to the actual JsepTransport. This "offer" datagram transport is
- // not created if it's done on the party that provides answer. This offer
- // datagram transport is only created once at the beginning of the connection,
- // and never again.
- std::unique_ptr<DatagramTransportInterface> offer_datagram_transport_ =
- nullptr;
-
const cricket::SessionDescription* local_desc_ = nullptr;
const cricket::SessionDescription* remote_desc_ = nullptr;
absl::optional<bool> initial_offerer_;
diff --git a/pc/jsep_transport_controller_unittest.cc b/pc/jsep_transport_controller_unittest.cc
index 3fc6f8b7e5..7bdba23c2d 100644
--- a/pc/jsep_transport_controller_unittest.cc
+++ b/pc/jsep_transport_controller_unittest.cc
@@ -13,9 +13,6 @@
#include <map>
#include <memory>
-#include "api/test/fake_media_transport.h"
-#include "api/test/loopback_media_transport.h"
-#include "api/transport/media/media_transport_interface.h"
#include "p2p/base/dtls_transport_factory.h"
#include "p2p/base/fake_dtls_transport.h"
#include "p2p/base/fake_ice_transport.h"
@@ -44,20 +41,6 @@ static const char kDataMid1[] = "data1";
namespace webrtc {
-namespace {
-
-// Media transport factory requires crypto settings to be present in order to
-// create media transport.
-void AddCryptoSettings(cricket::SessionDescription* description) {
- for (auto& content : description->contents()) {
- content.media_description()->AddCrypto(cricket::CryptoParams(
- /*t=*/0, std::string(rtc::CS_AES_CM_128_HMAC_SHA1_80),
- "inline:YUJDZGVmZ2hpSktMbW9QUXJzVHVWd3l6MTIzNDU2", ""));
- }
-}
-
-} // namespace
-
class FakeIceTransportFactory : public webrtc::IceTransportFactory {
public:
~FakeIceTransportFactory() override = default;
@@ -440,96 +423,6 @@ TEST_F(JsepTransportControllerTest, GetDtlsTransportWithRtcpMux) {
EXPECT_EQ(nullptr, transport_controller_->GetRtcpDtlsTransport(kVideoMid1));
}
-TEST_F(JsepTransportControllerTest,
- DtlsIsStillCreatedIfDatagramTransportIsOnlyUsedForDataChannels) {
- FakeMediaTransportFactory fake_media_transport_factory("transport_params");
- JsepTransportController::Config config;
-
- config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- config.media_transport_factory = &fake_media_transport_factory;
- config.use_datagram_transport_for_data_channels = true;
- CreateJsepTransportController(config);
-
- auto description = CreateSessionDescriptionWithBundledData();
- AddCryptoSettings(description.get());
-
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport_controller_->GetTransportParameters(kAudioMid1);
- for (auto& info : description->transport_infos()) {
- info.description.opaque_parameters = params;
- }
- for (cricket::ContentInfo& content_info : description->contents()) {
- if (content_info.media_description()->type() == cricket::MEDIA_TYPE_DATA) {
- content_info.media_description()->set_alt_protocol(params->protocol);
- }
- }
-
- EXPECT_TRUE(transport_controller_
- ->SetLocalDescription(SdpType::kOffer, description.get())
- .ok());
- EXPECT_TRUE(transport_controller_
- ->SetRemoteDescription(SdpType::kAnswer, description.get())
- .ok());
-
- FakeDatagramTransport* datagram_transport =
- static_cast<FakeDatagramTransport*>(
- transport_controller_->GetDataChannelTransport(kAudioMid1));
-
- ASSERT_NE(nullptr, datagram_transport);
-
- EXPECT_EQ(cricket::ICE_CANDIDATE_COMPONENT_RTP,
- transport_controller_->GetDtlsTransport(kAudioMid1)->component())
- << "Datagram transport for media was not enabled, and so DTLS transport "
- "should be created.";
-
- // Datagram transport is not used for media, so no max packet size is
- // specified.
- EXPECT_EQ(transport_controller_->GetMediaTransportConfig(kAudioMid1)
- .rtp_max_packet_size,
- absl::nullopt);
-
- // Since datagram transport is not used for RTP, setting it to writable should
- // not make the RTP transport writable.
- datagram_transport->set_state(MediaTransportState::kWritable);
- EXPECT_FALSE(transport_controller_->GetRtpTransport(kAudioMid1)
- ->IsWritable(/*rtcp=*/false));
-}
-
-// An offer that bundles different alt-protocols should be rejected.
-TEST_F(JsepTransportControllerTest, CannotBundleDifferentAltProtocols) {
- FakeMediaTransportFactory fake_media_transport_factory("transport_params");
- JsepTransportController::Config config;
- config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- config.media_transport_factory = &fake_media_transport_factory;
- config.use_datagram_transport = true;
- config.use_datagram_transport_for_data_channels = true;
- CreateJsepTransportController(config);
-
- auto description = CreateSessionDescriptionWithBundledData();
- AddCryptoSettings(description.get());
-
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport_controller_->GetTransportParameters(kAudioMid1);
- for (auto& info : description->transport_infos()) {
- info.description.opaque_parameters = params;
- }
-
- // Append a different alt-protocol to each of the sections.
- for (cricket::ContentInfo& content_info : description->contents()) {
- content_info.media_description()->set_alt_protocol(params->protocol + "-" +
- content_info.name);
- }
-
- EXPECT_FALSE(transport_controller_
- ->SetLocalDescription(SdpType::kOffer, description.get())
- .ok());
- EXPECT_FALSE(transport_controller_
- ->SetRemoteDescription(SdpType::kAnswer, description.get())
- .ok());
-}
-
TEST_F(JsepTransportControllerTest, SetIceConfig) {
CreateJsepTransportController(JsepTransportController::Config());
auto description = CreateSessionDescriptionWithoutBundle();
@@ -1650,423 +1543,4 @@ TEST_F(JsepTransportControllerTest, ChangeTaggedMediaSectionMaxBundle) {
.ok());
}
-constexpr char kFakeTransportParameters[] = "fake-params";
-
-// Test fixture that provides common setup and helpers for tests related to the
-// datagram transport.
-class JsepTransportControllerDatagramTest
- : public JsepTransportControllerTest,
- public testing::WithParamInterface<bool> {
- public:
- JsepTransportControllerDatagramTest()
- : JsepTransportControllerTest(),
- fake_media_transport_factory_(kFakeTransportParameters) {
- JsepTransportController::Config config;
- config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- config.media_transport_factory = &fake_media_transport_factory_;
- config.use_datagram_transport = true;
- CreateJsepTransportController(config);
- }
-
- // Whether the JsepTransportController under test acts as the offerer or
- // answerer in this test.
- bool IsOfferer() { return GetParam(); }
-
- // Sets a description as local or remote based on type and current
- // perspective.
- RTCError SetDescription(SdpType type,
- const cricket::SessionDescription* description) {
- if (IsOfferer() == (type == SdpType::kOffer)) {
- return transport_controller_->SetLocalDescription(type, description);
- } else {
- return transport_controller_->SetRemoteDescription(type, description);
- }
- }
-
- // Creates a session description with the settings necessary for datagram
- // transport (bundle + crypto) and the given |transport_params|.
- std::unique_ptr<cricket::SessionDescription>
- CreateSessionDescriptionForDatagramTransport(
- absl::optional<cricket::OpaqueTransportParameters> transport_params) {
- auto description = CreateSessionDescriptionWithBundleGroup();
- AddCryptoSettings(description.get());
-
- for (auto& info : description->transport_infos()) {
- info.description.opaque_parameters = transport_params;
- }
- if (transport_params) {
- for (auto& content_info : description->contents()) {
- content_info.media_description()->set_alt_protocol(
- transport_params->protocol);
- }
- }
- return description;
- }
-
- // Creates transport parameters with |protocol| and |parameters|
- // matching what |fake_media_transport_factory_| provides.
- cricket::OpaqueTransportParameters CreateTransportParameters() {
- cricket::OpaqueTransportParameters params;
- params.protocol = fake_media_transport_factory_.GetTransportName();
- params.parameters = "fake-params";
- return params;
- }
-
- protected:
- FakeMediaTransportFactory fake_media_transport_factory_;
-};
-
-TEST_P(JsepTransportControllerDatagramTest, InitDatagramTransport) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- // Getting transport parameters is allowed before setting a description.
- // This is necessary so that the offerer can include these params.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- // Setting a description activates the datagram transport without changing
- // transport parameters.
- auto description = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, description.get()).ok());
-
- // After setting an offer with transport parameters, those parameters are
- // reflected by the controller.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest,
- OfferMissingDatagramTransportParams) {
- if (IsOfferer()) {
- // This test doesn't make sense from the offerer's perspective, as the offer
- // must contain datagram transport params if the offerer supports it.
- return;
- }
-
- auto description =
- CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, description.get()).ok());
-
- // The offer didn't contain any datagram transport parameters, so the answer
- // won't either.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, OfferHasWrongTransportName) {
- if (IsOfferer()) {
- // This test doesn't make sense from the offerer's perspective, as the
- // offerer cannot offer itself the wrong transport.
- return;
- }
-
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- fake_params.protocol = "wrong-name";
-
- auto description = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, description.get()).ok());
-
- // The offerer and answerer support different datagram transports, so the
- // answerer rejects the offered parameters.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, IncompatibleAnswer) {
- // Transport will claim that no parameters are compatible, even if they match
- // exactly.
- fake_media_transport_factory_.set_transport_parameters_comparison(
- [](absl::string_view, absl::string_view) { return false; });
-
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- // The offerer and answerer have incompatible parameters, so the answerer
- // rejects the offered parameters.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, CompatibleAnswer) {
- // Transport will claim that no parameters are compatible, even if they are
- // completely different.
- fake_media_transport_factory_.set_transport_parameters_comparison(
- [](absl::string_view, absl::string_view) { return true; });
-
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- cricket::OpaqueTransportParameters answer_params;
- answer_params.protocol = fake_params.protocol;
- answer_params.parameters = "something different from offer";
- auto answer = CreateSessionDescriptionForDatagramTransport(answer_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- // The offerer and answerer have compatible parameters, so the answerer
- // accepts the offered parameters.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, AnswerRejectsDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- // The answer rejected datagram transport, so its parameters are empty.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, AnswerAcceptsDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- // The answer accepted datagram transport, so it is present.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, PrAnswerRejectsDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kPrAnswer, answer.get()).ok());
-
- // The answer rejected datagram transport, but it's provisional, so the
- // transport is kept around for now.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, PrAnswerAcceptsDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kPrAnswer, answer.get()).ok());
-
- // The answer provisionally accepted datagram transport, so it's kept.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, RenegotiationCannotAddDatagram) {
- auto offer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-
- // Attempting to add a datagram transport on a re-offer does not cause an
- // error, but also does not add a datagram transport.
- auto reoffer =
- CreateSessionDescriptionForDatagramTransport(CreateTransportParameters());
- EXPECT_TRUE(SetDescription(SdpType::kOffer, reoffer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, RenegotiationCannotRemoveDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- // Attempting to remove a datagram transport on a re-offer does not cause an
- // error, but also does not remove the datagram transport.
- auto reoffer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, reoffer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest,
- RenegotiationKeepsDatagramTransport) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- // Attempting to remove a datagram transport on a re-offer does not cause an
- // error, but also does not remove the datagram transport.
- auto reoffer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, reoffer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto reanswer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, reanswer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-INSTANTIATE_TEST_SUITE_P(
- JsepTransportControllerDatagramTests,
- JsepTransportControllerDatagramTest,
- testing::Values(true, false),
- // The parameter value is the local perspective (offerer or answerer).
- [](const testing::TestParamInfo<bool>& info) {
- return info.param ? "Offerer" : "Answerer";
- });
-
} // namespace webrtc
diff --git a/pc/jsep_transport_unittest.cc b/pc/jsep_transport_unittest.cc
index a4b1d5593e..d8f2fff621 100644
--- a/pc/jsep_transport_unittest.cc
+++ b/pc/jsep_transport_unittest.cc
@@ -120,9 +120,7 @@ class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> {
std::move(sdes_transport), std::move(dtls_srtp_transport),
/*datagram_rtp_transport=*/nullptr, std::move(rtp_dtls_transport),
std::move(rtcp_dtls_transport),
- /*sctp_transport=*/nullptr,
- /*datagram_transport=*/nullptr,
- /*data_channel_transport=*/nullptr);
+ /*sctp_transport=*/nullptr);
signal_rtcp_mux_active_received_ = false;
jsep_transport->SignalRtcpMuxActive.connect(
diff --git a/pc/media_session.cc b/pc/media_session.cc
index a9c523d430..69ddb0c895 100644
--- a/pc/media_session.cc
+++ b/pc/media_session.cc
@@ -55,6 +55,57 @@ void GetSupportedSdesCryptoSuiteNames(
}
}
+webrtc::RtpExtension RtpExtensionFromCapability(
+ const webrtc::RtpHeaderExtensionCapability& capability) {
+ return webrtc::RtpExtension(capability.uri,
+ capability.preferred_id.value_or(1));
+}
+
+cricket::RtpHeaderExtensions RtpHeaderExtensionsFromCapabilities(
+ const std::vector<webrtc::RtpHeaderExtensionCapability>& capabilities) {
+ cricket::RtpHeaderExtensions exts;
+ for (const auto& capability : capabilities) {
+ exts.push_back(RtpExtensionFromCapability(capability));
+ }
+ return exts;
+}
+
+std::vector<webrtc::RtpHeaderExtensionCapability>
+UnstoppedRtpHeaderExtensionCapabilities(
+ std::vector<webrtc::RtpHeaderExtensionCapability> capabilities) {
+ capabilities.erase(
+ std::remove_if(
+ capabilities.begin(), capabilities.end(),
+ [](const webrtc::RtpHeaderExtensionCapability& capability) {
+ return capability.direction == RtpTransceiverDirection::kStopped;
+ }),
+ capabilities.end());
+ return capabilities;
+}
+
+bool IsCapabilityPresent(const webrtc::RtpHeaderExtensionCapability& capability,
+ const cricket::RtpHeaderExtensions& extensions) {
+ return std::find_if(extensions.begin(), extensions.end(),
+ [&capability](const webrtc::RtpExtension& extension) {
+ return capability.uri == extension.uri;
+ }) != extensions.end();
+}
+
+cricket::RtpHeaderExtensions UnstoppedOrPresentRtpHeaderExtensions(
+ const std::vector<webrtc::RtpHeaderExtensionCapability>& capabilities,
+ const cricket::RtpHeaderExtensions& unencrypted,
+ const cricket::RtpHeaderExtensions& encrypted) {
+ cricket::RtpHeaderExtensions extensions;
+ for (const auto& capability : capabilities) {
+ if (capability.direction != RtpTransceiverDirection::kStopped ||
+ IsCapabilityPresent(capability, unencrypted) ||
+ IsCapabilityPresent(capability, encrypted)) {
+ extensions.push_back(RtpExtensionFromCapability(capability));
+ }
+ }
+ return extensions;
+}
+
} // namespace
namespace cricket {
@@ -182,14 +233,14 @@ bool FindMatchingCrypto(const CryptoParamsVec& cryptos,
void GetSupportedAudioSdesCryptoSuites(
const webrtc::CryptoOptions& crypto_options,
std::vector<int>* crypto_suites) {
- if (crypto_options.srtp.enable_gcm_crypto_suites) {
- crypto_suites->push_back(rtc::SRTP_AEAD_AES_256_GCM);
- crypto_suites->push_back(rtc::SRTP_AEAD_AES_128_GCM);
- }
if (crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher) {
crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_32);
}
crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80);
+ if (crypto_options.srtp.enable_gcm_crypto_suites) {
+ crypto_suites->push_back(rtc::SRTP_AEAD_AES_256_GCM);
+ crypto_suites->push_back(rtc::SRTP_AEAD_AES_128_GCM);
+ }
}
void GetSupportedAudioSdesCryptoSuiteNames(
@@ -202,11 +253,11 @@ void GetSupportedAudioSdesCryptoSuiteNames(
void GetSupportedVideoSdesCryptoSuites(
const webrtc::CryptoOptions& crypto_options,
std::vector<int>* crypto_suites) {
+ crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80);
if (crypto_options.srtp.enable_gcm_crypto_suites) {
crypto_suites->push_back(rtc::SRTP_AEAD_AES_256_GCM);
crypto_suites->push_back(rtc::SRTP_AEAD_AES_128_GCM);
}
- crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80);
}
void GetSupportedVideoSdesCryptoSuiteNames(
@@ -219,11 +270,11 @@ void GetSupportedVideoSdesCryptoSuiteNames(
void GetSupportedDataSdesCryptoSuites(
const webrtc::CryptoOptions& crypto_options,
std::vector<int>* crypto_suites) {
+ crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80);
if (crypto_options.srtp.enable_gcm_crypto_suites) {
crypto_suites->push_back(rtc::SRTP_AEAD_AES_256_GCM);
crypto_suites->push_back(rtc::SRTP_AEAD_AES_128_GCM);
}
- crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80);
}
void GetSupportedDataSdesCryptoSuiteNames(
@@ -268,19 +319,6 @@ static StreamParamsVec GetCurrentStreamParams(
return stream_params;
}
-// Filters the data codecs for the data channel type.
-void FilterDataCodecs(std::vector<DataCodec>* codecs, bool sctp) {
- // Filter RTP codec for SCTP and vice versa.
- const char* codec_name =
- sctp ? kGoogleRtpDataCodecName : kGoogleSctpDataCodecName;
- codecs->erase(std::remove_if(codecs->begin(), codecs->end(),
- [&codec_name](const DataCodec& codec) {
- return absl::EqualsIgnoreCase(codec.name,
- codec_name);
- }),
- codecs->end());
-}
-
static StreamParams CreateStreamParamsForNewSenderWithSsrcs(
const SenderOptions& sender,
const std::string& rtcp_cname,
@@ -452,15 +490,12 @@ static bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group,
selected_transport_info->description.ice_pwd;
ConnectionRole selected_connection_role =
selected_transport_info->description.connection_role;
- const absl::optional<OpaqueTransportParameters>& selected_opaque_parameters =
- selected_transport_info->description.opaque_parameters;
for (TransportInfo& transport_info : sdesc->transport_infos()) {
if (bundle_group.HasContentName(transport_info.content_name) &&
transport_info.content_name != selected_content_name) {
transport_info.description.ice_ufrag = selected_ufrag;
transport_info.description.ice_pwd = selected_pwd;
transport_info.description.connection_role = selected_connection_role;
- transport_info.description.opaque_parameters = selected_opaque_parameters;
}
}
return true;
@@ -646,7 +681,21 @@ static bool CreateContentOffer(
if (offer->type() == cricket::MEDIA_TYPE_VIDEO) {
offer->set_rtcp_reduced_size(true);
}
- offer->set_rtp_header_extensions(rtp_extensions);
+
+ // Build the vector of header extensions with directions for this
+ // media_description's options.
+ RtpHeaderExtensions extensions;
+ for (auto extension_with_id : rtp_extensions) {
+ for (const auto& extension : media_description_options.header_extensions) {
+ if (extension_with_id.uri == extension.uri) {
+ // TODO(crbug.com/1051821): Configure the extension direction from
+ // the information in the media_description_options extension
+ // capability.
+ extensions.push_back(extension_with_id);
+ }
+ }
+ }
+ offer->set_rtp_header_extensions(extensions);
AddSimulcastToMediaDescription(media_description_options, offer);
@@ -661,8 +710,6 @@ static bool CreateContentOffer(
}
}
- offer->set_alt_protocol(media_description_options.alt_protocol);
-
if (secure_policy == SEC_REQUIRED && offer->cryptos().empty()) {
return false;
}
@@ -1178,7 +1225,7 @@ static bool CreateMediaContentAnswer(
const MediaSessionOptions& session_options,
const SecurePolicy& sdes_policy,
const CryptoParamsVec* current_cryptos,
- const RtpHeaderExtensions& local_rtp_extenstions,
+ const RtpHeaderExtensions& local_rtp_extensions,
UniqueRandomIdGenerator* ssrc_generator,
bool enable_encrypted_rtp_header_extensions,
StreamParamsVec* current_streams,
@@ -1187,7 +1234,7 @@ static bool CreateMediaContentAnswer(
answer->set_extmap_allow_mixed_enum(offer->extmap_allow_mixed_enum());
RtpHeaderExtensions negotiated_rtp_extensions;
NegotiateRtpHeaderExtensions(
- local_rtp_extenstions, offer->rtp_header_extensions(),
+ local_rtp_extensions, offer->rtp_header_extensions(),
enable_encrypted_rtp_header_extensions, &negotiated_rtp_extensions);
answer->set_rtp_header_extensions(negotiated_rtp_extensions);
@@ -1218,9 +1265,6 @@ static bool CreateMediaContentAnswer(
answer->set_direction(NegotiateRtpTransceiverDirection(
offer->direction(), media_description_options.direction));
- if (offer->alt_protocol() == media_description_options.alt_protocol) {
- answer->set_alt_protocol(media_description_options.alt_protocol);
- }
return true;
}
@@ -1365,12 +1409,8 @@ MediaSessionDescriptionFactory::MediaSessionDescriptionFactory(
: MediaSessionDescriptionFactory(transport_desc_factory, ssrc_generator) {
channel_manager->GetSupportedAudioSendCodecs(&audio_send_codecs_);
channel_manager->GetSupportedAudioReceiveCodecs(&audio_recv_codecs_);
- audio_rtp_extensions_ =
- channel_manager->GetDefaultEnabledAudioRtpHeaderExtensions();
channel_manager->GetSupportedVideoSendCodecs(&video_send_codecs_);
channel_manager->GetSupportedVideoReceiveCodecs(&video_recv_codecs_);
- video_rtp_extensions_ =
- channel_manager->GetDefaultEnabledVideoRtpHeaderExtensions();
channel_manager->GetSupportedDataCodecs(&rtp_data_codecs_);
ComputeAudioCodecsIntersectionAndUnion();
ComputeVideoCodecsIntersectionAndUnion();
@@ -1433,22 +1473,11 @@ static void RemoveUnifiedPlanExtensions(RtpHeaderExtensions* extensions) {
}
RtpHeaderExtensions
-MediaSessionDescriptionFactory::audio_rtp_header_extensions() const {
- RtpHeaderExtensions extensions = audio_rtp_extensions_;
+MediaSessionDescriptionFactory::filtered_rtp_header_extensions(
+ RtpHeaderExtensions extensions) const {
if (!is_unified_plan_) {
RemoveUnifiedPlanExtensions(&extensions);
}
-
- return extensions;
-}
-
-RtpHeaderExtensions
-MediaSessionDescriptionFactory::video_rtp_header_extensions() const {
- RtpHeaderExtensions extensions = video_rtp_extensions_;
- if (!is_unified_plan_) {
- RemoveUnifiedPlanExtensions(&extensions);
- }
-
return extensions;
}
@@ -1483,14 +1512,10 @@ std::unique_ptr<SessionDescription> MediaSessionDescriptionFactory::CreateOffer(
// If application doesn't want CN codecs in offer.
StripCNCodecs(&offer_audio_codecs);
}
- FilterDataCodecs(&offer_rtp_data_codecs,
- session_options.data_channel_type == DCT_SCTP);
-
- RtpHeaderExtensions audio_rtp_extensions;
- RtpHeaderExtensions video_rtp_extensions;
- GetRtpHdrExtsToOffer(current_active_contents,
- session_options.offer_extmap_allow_mixed,
- &audio_rtp_extensions, &video_rtp_extensions);
+ AudioVideoRtpHeaderExtensions extensions_with_ids =
+ GetOfferedRtpHeaderExtensionsWithIds(
+ current_active_contents, session_options.offer_extmap_allow_mixed,
+ session_options.media_description_options);
auto offer = std::make_unique<SessionDescription>();
@@ -1510,18 +1535,20 @@ std::unique_ptr<SessionDescription> MediaSessionDescriptionFactory::CreateOffer(
}
switch (media_description_options.type) {
case MEDIA_TYPE_AUDIO:
- if (!AddAudioContentForOffer(
- media_description_options, session_options, current_content,
- current_description, audio_rtp_extensions, offer_audio_codecs,
- &current_streams, offer.get(), &ice_credentials)) {
+ if (!AddAudioContentForOffer(media_description_options, session_options,
+ current_content, current_description,
+ extensions_with_ids.audio,
+ offer_audio_codecs, &current_streams,
+ offer.get(), &ice_credentials)) {
return nullptr;
}
break;
case MEDIA_TYPE_VIDEO:
- if (!AddVideoContentForOffer(
- media_description_options, session_options, current_content,
- current_description, video_rtp_extensions, offer_video_codecs,
- &current_streams, offer.get(), &ice_credentials)) {
+ if (!AddVideoContentForOffer(media_description_options, session_options,
+ current_content, current_description,
+ extensions_with_ids.video,
+ offer_video_codecs, &current_streams,
+ offer.get(), &ice_credentials)) {
return nullptr;
}
break;
@@ -1628,8 +1655,6 @@ MediaSessionDescriptionFactory::CreateAnswer(
// If application doesn't want CN codecs in answer.
StripCNCodecs(&answer_audio_codecs);
}
- FilterDataCodecs(&answer_rtp_data_codecs,
- session_options.data_channel_type == DCT_SCTP);
auto answer = std::make_unique<SessionDescription>();
@@ -1658,13 +1683,16 @@ MediaSessionDescriptionFactory::CreateAnswer(
msection_index < current_description->contents().size()) {
current_content = &current_description->contents()[msection_index];
}
+ RtpHeaderExtensions header_extensions = RtpHeaderExtensionsFromCapabilities(
+ UnstoppedRtpHeaderExtensionCapabilities(
+ media_description_options.header_extensions));
switch (media_description_options.type) {
case MEDIA_TYPE_AUDIO:
if (!AddAudioContentForAnswer(
media_description_options, session_options, offer_content,
offer, current_content, current_description,
- bundle_transport.get(), answer_audio_codecs, &current_streams,
- answer.get(), &ice_credentials)) {
+ bundle_transport.get(), answer_audio_codecs, header_extensions,
+ &current_streams, answer.get(), &ice_credentials)) {
return nullptr;
}
break;
@@ -1672,8 +1700,8 @@ MediaSessionDescriptionFactory::CreateAnswer(
if (!AddVideoContentForAnswer(
media_description_options, session_options, offer_content,
offer, current_content, current_description,
- bundle_transport.get(), answer_video_codecs, &current_streams,
- answer.get(), &ice_credentials)) {
+ bundle_transport.get(), answer_video_codecs, header_extensions,
+ &current_streams, answer.get(), &ice_credentials)) {
return nullptr;
}
break;
@@ -1966,11 +1994,12 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer(
&used_pltypes);
}
-void MediaSessionDescriptionFactory::GetRtpHdrExtsToOffer(
+MediaSessionDescriptionFactory::AudioVideoRtpHeaderExtensions
+MediaSessionDescriptionFactory::GetOfferedRtpHeaderExtensionsWithIds(
const std::vector<const ContentInfo*>& current_active_contents,
bool extmap_allow_mixed,
- RtpHeaderExtensions* offer_audio_extensions,
- RtpHeaderExtensions* offer_video_extensions) const {
+ const std::vector<MediaDescriptionOptions>& media_description_options)
+ const {
// All header extensions allocated from the same range to avoid potential
// issues when using BUNDLE.
@@ -1984,6 +2013,7 @@ void MediaSessionDescriptionFactory::GetRtpHdrExtsToOffer(
RtpHeaderExtensions all_regular_extensions;
RtpHeaderExtensions all_encrypted_extensions;
+ AudioVideoRtpHeaderExtensions offered_extensions;
// First - get all extensions from the current description if the media type
// is used.
// Add them to |used_ids| so the local ids are not reused if a new media
@@ -1992,36 +2022,45 @@ void MediaSessionDescriptionFactory::GetRtpHdrExtsToOffer(
if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) {
const AudioContentDescription* audio =
content->media_description()->as_audio();
- MergeRtpHdrExts(audio->rtp_header_extensions(), offer_audio_extensions,
+ MergeRtpHdrExts(audio->rtp_header_extensions(), &offered_extensions.audio,
&all_regular_extensions, &all_encrypted_extensions,
&used_ids);
} else if (IsMediaContentOfType(content, MEDIA_TYPE_VIDEO)) {
const VideoContentDescription* video =
content->media_description()->as_video();
- MergeRtpHdrExts(video->rtp_header_extensions(), offer_video_extensions,
+ MergeRtpHdrExts(video->rtp_header_extensions(), &offered_extensions.video,
&all_regular_extensions, &all_encrypted_extensions,
&used_ids);
}
}
- // Add our default RTP header extensions that are not in the current
- // description.
- MergeRtpHdrExts(audio_rtp_header_extensions(), offer_audio_extensions,
- &all_regular_extensions, &all_encrypted_extensions,
- &used_ids);
- MergeRtpHdrExts(video_rtp_header_extensions(), offer_video_extensions,
- &all_regular_extensions, &all_encrypted_extensions,
- &used_ids);
+ // Add all encountered header extensions in the media description options that
+ // are not in the current description.
+ for (const auto& entry : media_description_options) {
+ RtpHeaderExtensions filtered_extensions =
+ filtered_rtp_header_extensions(UnstoppedOrPresentRtpHeaderExtensions(
+ entry.header_extensions, all_regular_extensions,
+ all_encrypted_extensions));
+ if (entry.type == MEDIA_TYPE_AUDIO)
+ MergeRtpHdrExts(filtered_extensions, &offered_extensions.audio,
+ &all_regular_extensions, &all_encrypted_extensions,
+ &used_ids);
+ else if (entry.type == MEDIA_TYPE_VIDEO)
+ MergeRtpHdrExts(filtered_extensions, &offered_extensions.video,
+ &all_regular_extensions, &all_encrypted_extensions,
+ &used_ids);
+ }
// TODO(jbauch): Support adding encrypted header extensions to existing
// sessions.
if (enable_encrypted_rtp_header_extensions_ &&
current_active_contents.empty()) {
- AddEncryptedVersionsOfHdrExts(offer_audio_extensions,
+ AddEncryptedVersionsOfHdrExts(&offered_extensions.audio,
&all_encrypted_extensions, &used_ids);
- AddEncryptedVersionsOfHdrExts(offer_video_extensions,
+ AddEncryptedVersionsOfHdrExts(&offered_extensions.video,
&all_encrypted_extensions, &used_ids);
}
+ return offered_extensions;
}
bool MediaSessionDescriptionFactory::AddTransportOffer(
@@ -2353,9 +2392,7 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer(
StreamParamsVec* current_streams,
SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const {
- bool is_sctp =
- (session_options.data_channel_type == DCT_SCTP ||
- session_options.data_channel_type == DCT_DATA_CHANNEL_TRANSPORT_SCTP);
+ bool is_sctp = (session_options.data_channel_type == DCT_SCTP);
// If the DataChannel type is not specified, use the DataChannel type in
// the current description.
if (session_options.data_channel_type == DCT_NONE && current_content) {
@@ -2396,6 +2433,7 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer(
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const AudioCodecs& audio_codecs,
+ const RtpHeaderExtensions& default_audio_rtp_header_extensions,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const {
@@ -2468,9 +2506,9 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer(
if (!CreateMediaContentAnswer(
offer_audio_description, media_description_options, session_options,
sdes_policy, GetCryptos(current_content),
- audio_rtp_header_extensions(), ssrc_generator_,
- enable_encrypted_rtp_header_extensions_, current_streams,
- bundle_enabled, audio_answer.get())) {
+ filtered_rtp_header_extensions(default_audio_rtp_header_extensions),
+ ssrc_generator_, enable_encrypted_rtp_header_extensions_,
+ current_streams, bundle_enabled, audio_answer.get())) {
return false; // Fails the session setup.
}
@@ -2506,6 +2544,7 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer(
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const VideoCodecs& video_codecs,
+ const RtpHeaderExtensions& default_video_rtp_header_extensions,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const {
@@ -2586,9 +2625,9 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer(
if (!CreateMediaContentAnswer(
offer_video_description, media_description_options, session_options,
sdes_policy, GetCryptos(current_content),
- video_rtp_header_extensions(), ssrc_generator_,
- enable_encrypted_rtp_header_extensions_, current_streams,
- bundle_enabled, video_answer.get())) {
+ filtered_rtp_header_extensions(default_video_rtp_header_extensions),
+ ssrc_generator_, enable_encrypted_rtp_header_extensions_,
+ current_streams, bundle_enabled, video_answer.get())) {
return false; // Failed the sessin setup.
}
bool secure = bundle_transport ? bundle_transport->description.secure()
diff --git a/pc/media_session.h b/pc/media_session.h
index ef83834318..f305a6214c 100644
--- a/pc/media_session.h
+++ b/pc/media_session.h
@@ -78,7 +78,7 @@ struct MediaDescriptionOptions {
// stream information goes in the local descriptions.
std::vector<SenderOptions> sender_options;
std::vector<webrtc::RtpCodecCapability> codec_preferences;
- absl::optional<std::string> alt_protocol;
+ std::vector<webrtc::RtpHeaderExtensionCapability> header_extensions;
private:
// Doesn't DCHECK on |type|.
@@ -147,19 +147,13 @@ class MediaSessionDescriptionFactory {
const AudioCodecs& audio_recv_codecs() const;
void set_audio_codecs(const AudioCodecs& send_codecs,
const AudioCodecs& recv_codecs);
- void set_audio_rtp_header_extensions(const RtpHeaderExtensions& extensions) {
- audio_rtp_extensions_ = extensions;
- }
- RtpHeaderExtensions audio_rtp_header_extensions() const;
const VideoCodecs& video_sendrecv_codecs() const;
const VideoCodecs& video_send_codecs() const;
const VideoCodecs& video_recv_codecs() const;
void set_video_codecs(const VideoCodecs& send_codecs,
const VideoCodecs& recv_codecs);
- void set_video_rtp_header_extensions(const RtpHeaderExtensions& extensions) {
- video_rtp_extensions_ = extensions;
- }
- RtpHeaderExtensions video_rtp_header_extensions() const;
+ RtpHeaderExtensions filtered_rtp_header_extensions(
+ RtpHeaderExtensions extensions) const;
const RtpDataCodecs& rtp_data_codecs() const { return rtp_data_codecs_; }
void set_rtp_data_codecs(const RtpDataCodecs& codecs) {
rtp_data_codecs_ = codecs;
@@ -184,6 +178,11 @@ class MediaSessionDescriptionFactory {
const SessionDescription* current_description) const;
private:
+ struct AudioVideoRtpHeaderExtensions {
+ RtpHeaderExtensions audio;
+ RtpHeaderExtensions video;
+ };
+
const AudioCodecs& GetAudioCodecsForOffer(
const webrtc::RtpTransceiverDirection& direction) const;
const AudioCodecs& GetAudioCodecsForAnswer(
@@ -205,11 +204,11 @@ class MediaSessionDescriptionFactory {
AudioCodecs* audio_codecs,
VideoCodecs* video_codecs,
RtpDataCodecs* rtp_data_codecs) const;
- void GetRtpHdrExtsToOffer(
+ AudioVideoRtpHeaderExtensions GetOfferedRtpHeaderExtensionsWithIds(
const std::vector<const ContentInfo*>& current_active_contents,
bool extmap_allow_mixed,
- RtpHeaderExtensions* audio_extensions,
- RtpHeaderExtensions* video_extensions) const;
+ const std::vector<MediaDescriptionOptions>& media_description_options)
+ const;
bool AddTransportOffer(const std::string& content_name,
const TransportOptions& transport_options,
const SessionDescription* current_desc,
@@ -293,6 +292,7 @@ class MediaSessionDescriptionFactory {
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const AudioCodecs& audio_codecs,
+ const RtpHeaderExtensions& default_audio_rtp_header_extensions,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const;
@@ -306,6 +306,7 @@ class MediaSessionDescriptionFactory {
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const VideoCodecs& video_codecs,
+ const RtpHeaderExtensions& default_video_rtp_header_extensions,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const;
@@ -334,14 +335,12 @@ class MediaSessionDescriptionFactory {
AudioCodecs audio_sendrecv_codecs_;
// Union of send and recv.
AudioCodecs all_audio_codecs_;
- RtpHeaderExtensions audio_rtp_extensions_;
VideoCodecs video_send_codecs_;
VideoCodecs video_recv_codecs_;
// Intersection of send and recv.
VideoCodecs video_sendrecv_codecs_;
// Union of send and recv.
VideoCodecs all_video_codecs_;
- RtpHeaderExtensions video_rtp_extensions_;
RtpDataCodecs rtp_data_codecs_;
// This object is not owned by the channel so it must outlive it.
rtc::UniqueRandomIdGenerator* const ssrc_generator_;
diff --git a/pc/media_session_unittest.cc b/pc/media_session_unittest.cc
index 1a4b507c2b..ac949fb630 100644
--- a/pc/media_session_unittest.cc
+++ b/pc/media_session_unittest.cc
@@ -413,6 +413,17 @@ static MediaSessionOptions CreatePlanBMediaSessionOptions() {
return session_options;
}
+// prefers GCM SDES crypto suites by removing non-GCM defaults.
+void PreferGcmCryptoParameters(CryptoParamsVec* cryptos) {
+ cryptos->erase(
+ std::remove_if(cryptos->begin(), cryptos->end(),
+ [](const cricket::CryptoParams& crypto) {
+ return crypto.cipher_suite != CS_AEAD_AES_256_GCM &&
+ crypto.cipher_suite != CS_AEAD_AES_128_GCM;
+ }),
+ cryptos->end());
+}
+
// TODO(zhihuang): Most of these tests were written while MediaSessionOptions
// was designed for Plan B SDP, where only one audio "m=" section and one video
// "m=" section could be generated, and ordering couldn't be controlled. Many of
@@ -530,9 +541,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
EXPECT_EQ(
media_desc_options_it->transport_options.enable_ice_renomination,
GetIceRenomination(ti_audio));
- EXPECT_EQ(media_desc_options_it->transport_options.opaque_parameters,
- ti_audio->description.opaque_parameters);
-
} else {
EXPECT_TRUE(ti_audio == NULL);
}
@@ -545,8 +553,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
EXPECT_EQ(ti_audio->description.ice_ufrag,
ti_video->description.ice_ufrag);
EXPECT_EQ(ti_audio->description.ice_pwd, ti_video->description.ice_pwd);
- EXPECT_EQ(ti_audio->description.opaque_parameters,
- ti_video->description.opaque_parameters);
} else {
if (has_current_desc) {
EXPECT_EQ(current_video_ufrag, ti_video->description.ice_ufrag);
@@ -557,8 +563,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
EXPECT_EQ(static_cast<size_t>(cricket::ICE_PWD_LENGTH),
ti_video->description.ice_pwd.size());
}
- EXPECT_EQ(media_desc_options_it->transport_options.opaque_parameters,
- ti_video->description.opaque_parameters);
}
EXPECT_EQ(
media_desc_options_it->transport_options.enable_ice_renomination,
@@ -698,6 +702,13 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
std::unique_ptr<SessionDescription> offer =
f1_.CreateOffer(offer_opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ if (gcm_offer && gcm_answer) {
+ for (cricket::ContentInfo& content : offer->contents()) {
+ auto cryptos = content.media_description()->cryptos();
+ PreferGcmCryptoParameters(&cryptos);
+ content.media_description()->set_cryptos(cryptos);
+ }
+ }
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), answer_opts, NULL);
const ContentInfo* ac = answer->GetContentByName("audio");
@@ -736,13 +747,10 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
const cricket::RtpHeaderExtensions& expectedAnswer) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- f1_.set_audio_rtp_header_extensions(offered);
- f1_.set_video_rtp_header_extensions(offered);
- f2_.set_audio_rtp_header_extensions(local);
- f2_.set_video_rtp_header_extensions(local);
-
+ SetAudioVideoRtpHeaderExtensions(offered, offered, &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(local, local, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -754,6 +762,38 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
GetFirstVideoContentDescription(answer.get())->rtp_header_extensions());
}
+ std::vector<webrtc::RtpHeaderExtensionCapability>
+ HeaderExtensionCapabilitiesFromRtpExtensions(
+ cricket::RtpHeaderExtensions extensions) {
+ std::vector<webrtc::RtpHeaderExtensionCapability> capabilities;
+ for (const auto& extension : extensions) {
+ webrtc::RtpHeaderExtensionCapability capability(
+ extension.uri, extension.id,
+ webrtc::RtpTransceiverDirection::kSendRecv);
+ capabilities.push_back(capability);
+ }
+ return capabilities;
+ }
+
+ void SetAudioVideoRtpHeaderExtensions(cricket::RtpHeaderExtensions audio_exts,
+ cricket::RtpHeaderExtensions video_exts,
+ MediaSessionOptions* opts) {
+ auto audio_caps = HeaderExtensionCapabilitiesFromRtpExtensions(audio_exts);
+ auto video_caps = HeaderExtensionCapabilitiesFromRtpExtensions(video_exts);
+ for (auto& entry : opts->media_description_options) {
+ switch (entry.type) {
+ case MEDIA_TYPE_AUDIO:
+ entry.header_extensions = audio_caps;
+ break;
+ case MEDIA_TYPE_VIDEO:
+ entry.header_extensions = video_caps;
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
protected:
UniqueRandomIdGenerator ssrc_generator1;
UniqueRandomIdGenerator ssrc_generator2;
@@ -1237,6 +1277,11 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswerGcm) {
opts.crypto_options.srtp.enable_gcm_crypto_suites = true;
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ for (cricket::ContentInfo& content : offer->contents()) {
+ auto cryptos = content.media_description()->cryptos();
+ PreferGcmCryptoParameters(&cryptos);
+ content.media_description()->set_cryptos(cryptos);
+ }
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
const ContentInfo* ac = answer->GetContentByName("audio");
@@ -1343,6 +1388,11 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerGcm) {
f2_.set_secure(SEC_ENABLED);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ for (cricket::ContentInfo& content : offer->contents()) {
+ auto cryptos = content.media_description()->cryptos();
+ PreferGcmCryptoParameters(&cryptos);
+ content.media_description()->set_cryptos(cryptos);
+ }
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
const ContentInfo* ac = answer->GetContentByName("audio");
@@ -1631,13 +1681,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, AudioOfferAnswerWithCryptoDisabled) {
TEST_F(MediaSessionDescriptionFactoryTest, TestOfferAnswerWithRtpExtensions) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -1686,21 +1736,21 @@ TEST_F(MediaSessionDescriptionFactoryTest,
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- const auto offered = MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00);
- f1_.set_audio_rtp_header_extensions(offered);
- f1_.set_video_rtp_header_extensions(offered);
- const auto local = MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01);
- f2_.set_audio_rtp_header_extensions(local);
- f2_.set_video_rtp_header_extensions(local);
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00),
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01),
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(),
- ElementsAreArray(offered));
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
EXPECT_THAT(
GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
- ElementsAreArray(offered));
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
}
TEST_F(MediaSessionDescriptionFactoryTest,
@@ -1708,21 +1758,18 @@ TEST_F(MediaSessionDescriptionFactoryTest,
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- const auto offered = MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00);
- f1_.set_audio_rtp_header_extensions(offered);
- f1_.set_video_rtp_header_extensions(offered);
- const auto local = MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00);
- f2_.set_audio_rtp_header_extensions(local);
- f2_.set_video_rtp_header_extensions(local);
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00),
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(),
- ElementsAreArray(offered));
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
EXPECT_THAT(
GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
- ElementsAreArray(offered));
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
}
TEST_F(MediaSessionDescriptionFactoryTest,
@@ -1731,10 +1778,10 @@ TEST_F(MediaSessionDescriptionFactoryTest,
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7);
- RtpExtension local_tsn(RtpExtension::kTransportSequenceNumberUri, 5);
- f1_.set_video_rtp_header_extensions({offer_dd});
- f2_.set_video_rtp_header_extensions({local_tsn});
+ SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ RtpExtension local_tsn(RtpExtension::kTransportSequenceNumberUri, 5);
+ SetAudioVideoRtpHeaderExtensions({}, {local_tsn}, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1749,9 +1796,9 @@ TEST_F(MediaSessionDescriptionFactoryTest,
RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7);
RtpExtension local_dd(RtpExtension::kDependencyDescriptorUri, 5);
- f1_.set_video_rtp_header_extensions({offer_dd});
- f2_.set_video_rtp_header_extensions({local_dd});
+ SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions({}, {local_dd}, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1768,12 +1815,10 @@ TEST_F(MediaSessionDescriptionFactoryTest,
RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)};
const cricket::RtpHeaderExtensions local_extensions = {
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 5)};
- f1_.set_video_rtp_header_extensions(offered_extensions);
- f1_.set_audio_rtp_header_extensions(offered_extensions);
- f2_.set_video_rtp_header_extensions(local_extensions);
- f2_.set_audio_rtp_header_extensions(local_extensions);
-
+ SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions,
+ &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1793,12 +1838,10 @@ TEST_F(MediaSessionDescriptionFactoryTest,
RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)};
const cricket::RtpHeaderExtensions local_extensions = {
RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)};
- f1_.set_video_rtp_header_extensions(offered_extensions);
- f1_.set_audio_rtp_header_extensions(offered_extensions);
- f2_.set_video_rtp_header_extensions(local_extensions);
- f2_.set_audio_rtp_header_extensions(local_extensions);
-
+ SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions,
+ &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1818,12 +1861,10 @@ TEST_F(MediaSessionDescriptionFactoryTest,
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 7)};
const cricket::RtpHeaderExtensions local_extensions = {
RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)};
- f1_.set_video_rtp_header_extensions(offered_extensions);
- f1_.set_audio_rtp_header_extensions(offered_extensions);
- f2_.set_video_rtp_header_extensions(local_extensions);
- f2_.set_audio_rtp_header_extensions(local_extensions);
-
+ SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions,
+ &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1835,6 +1876,203 @@ TEST_F(MediaSessionDescriptionFactoryTest,
}
TEST_F(MediaSessionDescriptionFactoryTest,
+ OffersUnstoppedExtensionsWithAudioVideoExtensionStopped) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kSendOnly)};
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri3", 7,
+ RtpTransceiverDirection::kSendOnly)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ EXPECT_THAT(
+ offer->contents(),
+ ElementsAre(
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri2"))))),
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri3")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OffersUnstoppedExtensionsWithAudioExtensionStopped) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kStopped)};
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri42", 42,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri3", 7,
+ RtpTransceiverDirection::kSendOnly)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ EXPECT_THAT(
+ offer->contents(),
+ ElementsAre(
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri1"))))),
+ Property(
+ &ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ UnorderedElementsAre(Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri42")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OffersUnstoppedExtensionsWithVideoExtensionStopped) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 5,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri2", 7,
+ RtpTransceiverDirection::kSendRecv)};
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri42", 42,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri3", 7,
+ RtpTransceiverDirection::kStopped)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ EXPECT_THAT(
+ offer->contents(),
+ ElementsAre(
+ Property(
+ &ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ UnorderedElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"))))),
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri42")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest, AnswersUnstoppedExtensions) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 4,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri3", 2,
+ RtpTransceiverDirection::kRecvOnly),
+ webrtc::RtpHeaderExtensionCapability("uri4", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 4,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kRecvOnly),
+ webrtc::RtpHeaderExtensionCapability("uri3", 2,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri4", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto answer = f2_.CreateAnswer(offer.get(), opts, nullptr);
+ EXPECT_THAT(
+ answer->contents(),
+ ElementsAre(Property(
+ &ContentInfo::media_description,
+ Pointee(Property(&MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri4")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ AppendsUnstoppedExtensionsToCurrentDescription) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 2,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kRecvOnly),
+ webrtc::RtpHeaderExtensionCapability("uri3", 5,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri4", 6,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer2 = f1_.CreateOffer(opts, offer.get());
+ EXPECT_THAT(
+ offer2->contents(),
+ ElementsAre(Property(
+ &ContentInfo::media_description,
+ Pointee(Property(&MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri4")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ AppendsStoppedExtensionIfKnownAndPresentInTheOffer) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri2", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+
+ // Now add "uri2" as stopped to the options verify that the offer contains
+ // uri2 since it's already present since before.
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri2", 2,
+ RtpTransceiverDirection::kStopped)};
+ auto offer2 = f1_.CreateOffer(opts, offer.get());
+ EXPECT_THAT(
+ offer2->contents(),
+ ElementsAre(Property(
+ &ContentInfo::media_description,
+ Pointee(Property(&MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
TestOfferAnswerWithEncryptedRtpExtensionsBoth) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
@@ -1842,13 +2080,12 @@ TEST_F(MediaSessionDescriptionFactoryTest,
f1_.set_enable_encrypted_rtp_header_extensions(true);
f2_.set_enable_encrypted_rtp_header_extensions(true);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -1873,13 +2110,12 @@ TEST_F(MediaSessionDescriptionFactoryTest,
f1_.set_enable_encrypted_rtp_header_extensions(true);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -1904,13 +2140,12 @@ TEST_F(MediaSessionDescriptionFactoryTest,
f2_.set_enable_encrypted_rtp_header_extensions(true);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -3312,12 +3547,11 @@ TEST_F(MediaSessionDescriptionFactoryTest,
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -3368,9 +3602,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReused) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension3));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension3));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension3),
+ MAKE_VECTOR(kVideoRtpExtension3), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
// Since the audio extensions used ID 3 for "both_audio_and_video", so should
@@ -3407,11 +3640,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReusedEncrypted) {
f1_.set_enable_encrypted_rtp_header_extensions(true);
f2_.set_enable_encrypted_rtp_header_extensions(true);
- f1_.set_audio_rtp_header_extensions(
- MAKE_VECTOR(kAudioRtpExtension3ForEncryption));
- f1_.set_video_rtp_header_extensions(
- MAKE_VECTOR(kVideoRtpExtension3ForEncryption));
-
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kAudioRtpExtension3ForEncryption),
+ MAKE_VECTOR(kVideoRtpExtension3ForEncryption), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
// The extensions that are shared between audio and video should use the same
@@ -3603,164 +3834,6 @@ TEST_F(MediaSessionDescriptionFactoryTest,
TestTransportInfo(false, options, true);
}
-TEST_F(MediaSessionDescriptionFactoryTest,
- TestTransportInfoOfferBundlesTransportOptions) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
-
- cricket::OpaqueTransportParameters audio_params;
- audio_params.protocol = "audio-transport";
- audio_params.parameters = "audio-params";
- FindFirstMediaDescriptionByMid("audio", &options)
- ->transport_options.opaque_parameters = audio_params;
-
- cricket::OpaqueTransportParameters video_params;
- video_params.protocol = "video-transport";
- video_params.parameters = "video-params";
- FindFirstMediaDescriptionByMid("video", &options)
- ->transport_options.opaque_parameters = video_params;
-
- TestTransportInfo(/*offer=*/true, options, /*has_current_desc=*/false);
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest,
- TestTransportInfoAnswerBundlesTransportOptions) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
-
- cricket::OpaqueTransportParameters audio_params;
- audio_params.protocol = "audio-transport";
- audio_params.parameters = "audio-params";
- FindFirstMediaDescriptionByMid("audio", &options)
- ->transport_options.opaque_parameters = audio_params;
-
- cricket::OpaqueTransportParameters video_params;
- video_params.protocol = "video-transport";
- video_params.parameters = "video-params";
- FindFirstMediaDescriptionByMid("video", &options)
- ->transport_options.opaque_parameters = video_params;
-
- TestTransportInfo(/*offer=*/false, options, /*has_current_desc=*/false);
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolAddedToOffer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
-
- EXPECT_EQ(offer->GetContentDescriptionByName("audio")->alt_protocol(), "foo");
- EXPECT_EQ(offer->GetContentDescriptionByName("video")->alt_protocol(), "bar");
- EXPECT_EQ(offer->GetContentDescriptionByName("data")->alt_protocol(), "baz");
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolAddedToAnswer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
- std::unique_ptr<SessionDescription> answer =
- f1_.CreateAnswer(offer.get(), options, nullptr);
-
- EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(),
- "foo");
- EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(),
- "bar");
- EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(), "baz");
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolNotInOffer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> answer =
- f1_.CreateAnswer(offer.get(), options, nullptr);
-
- EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(),
- absl::nullopt);
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolDifferentInOffer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "not-foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "not-bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "not-baz";
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> answer =
- f1_.CreateAnswer(offer.get(), options, nullptr);
-
- EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(),
- absl::nullopt);
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolNotInAnswer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol =
- absl::nullopt;
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol =
- absl::nullopt;
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol =
- absl::nullopt;
-
- std::unique_ptr<SessionDescription> answer =
- f1_.CreateAnswer(offer.get(), options, nullptr);
-
- EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(),
- absl::nullopt);
-}
-
// Create an offer with bundle enabled and verify the crypto parameters are
// the common set of the available cryptos.
TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoWithOfferBundle) {
@@ -4571,7 +4644,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestSetAudioCodecs) {
// properly.
send_codecs[1].channels = 0;
- // Alther iLBC receive codec to be lowercase, to test that case conversions
+ // Alter iLBC receive codec to be lowercase, to test that case conversions
// are handled properly.
recv_codecs[2].name = "ilbc";
diff --git a/pc/media_stream_unittest.cc b/pc/media_stream_unittest.cc
index b70171dfcb..55226992e0 100644
--- a/pc/media_stream_unittest.cc
+++ b/pc/media_stream_unittest.cc
@@ -46,7 +46,7 @@ class MockObserver : public ObserverInterface {
}
}
- MOCK_METHOD0(OnChanged, void());
+ MOCK_METHOD(void, OnChanged, (), (override));
private:
NotifierInterface* notifier_;
diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc
index 05e7b95591..1e738a933d 100644
--- a/pc/peer_connection.cc
+++ b/pc/peer_connection.cc
@@ -107,13 +107,6 @@ const char kDtlsSrtpSetupFailureRtcp[] =
namespace {
-// Field trials.
-// Controls datagram transport support.
-const char kDatagramTransportFieldTrial[] = "WebRTC-DatagramTransport";
-// Controls datagram transport data channel support.
-const char kDatagramTransportDataChannelFieldTrial[] =
- "WebRTC-DatagramTransportDataChannels";
-
// UMA metric names.
const char kSimulcastVersionApplyLocalDescription[] =
"WebRTC.PeerConnection.Simulcast.ApplyLocalDescription";
@@ -222,7 +215,7 @@ void AddPlanBRtpSenderOptions(
// Add options to |session_options| from |rtp_data_channels|.
void AddRtpDataChannelOptions(
- const std::map<std::string, rtc::scoped_refptr<DataChannel>>&
+ const std::map<std::string, rtc::scoped_refptr<RtpDataChannel>>&
rtp_data_channels,
cricket::MediaDescriptionOptions* data_media_description_options) {
if (!data_media_description_options) {
@@ -230,9 +223,9 @@ void AddRtpDataChannelOptions(
}
// Check for data channels.
for (const auto& kv : rtp_data_channels) {
- const DataChannel* channel = kv.second;
- if (channel->state() == DataChannel::kConnecting ||
- channel->state() == DataChannel::kOpen) {
+ const RtpDataChannel* channel = kv.second;
+ if (channel->state() == RtpDataChannel::kConnecting ||
+ channel->state() == RtpDataChannel::kOpen) {
// Legacy RTP data channels are signaled with the track/stream ID set to
// the data channel's label.
data_media_description_options->AddRtpDataChannel(channel->label(),
@@ -690,6 +683,26 @@ class CreateSessionDescriptionObserverOperationWrapper
std::function<void()> operation_complete_callback_;
};
+// Check if the changes of IceTransportsType motives an ice restart.
+bool NeedIceRestart(bool surface_ice_candidates_on_ice_transport_type_changed,
+ PeerConnectionInterface::IceTransportsType current,
+ PeerConnectionInterface::IceTransportsType modified) {
+ if (current == modified) {
+ return false;
+ }
+
+ if (!surface_ice_candidates_on_ice_transport_type_changed) {
+ return true;
+ }
+
+ auto current_filter = ConvertIceTransportTypeToCandidateFilter(current);
+ auto modified_filter = ConvertIceTransportTypeToCandidateFilter(modified);
+
+ // If surface_ice_candidates_on_ice_transport_type_changed is true and we
+ // extend the filter, then no ice restart is needed.
+ return (current_filter & modified_filter) != current_filter;
+}
+
} // namespace
// Used by parameterless SetLocalDescription() to create an offer or answer.
@@ -896,11 +909,6 @@ bool PeerConnectionInterface::RTCConfiguration::operator==(
SdpSemantics sdp_semantics;
absl::optional<rtc::AdapterType> network_preference;
bool active_reset_srtp_params;
- bool use_media_transport;
- bool use_media_transport_for_data_channels;
- absl::optional<bool> use_datagram_transport;
- absl::optional<bool> use_datagram_transport_for_data_channels;
- absl::optional<bool> use_datagram_transport_for_data_channels_receive_only;
absl::optional<CryptoOptions> crypto_options;
bool offer_extmap_allow_mixed;
std::string turn_logging_id;
@@ -961,20 +969,11 @@ bool PeerConnectionInterface::RTCConfiguration::operator==(
sdp_semantics == o.sdp_semantics &&
network_preference == o.network_preference &&
active_reset_srtp_params == o.active_reset_srtp_params &&
- use_media_transport == o.use_media_transport &&
- use_media_transport_for_data_channels ==
- o.use_media_transport_for_data_channels &&
- use_datagram_transport == o.use_datagram_transport &&
- use_datagram_transport_for_data_channels ==
- o.use_datagram_transport_for_data_channels &&
- use_datagram_transport_for_data_channels_receive_only ==
- o.use_datagram_transport_for_data_channels_receive_only &&
crypto_options == o.crypto_options &&
offer_extmap_allow_mixed == o.offer_extmap_allow_mixed &&
turn_logging_id == o.turn_logging_id &&
enable_implicit_rollback == o.enable_implicit_rollback &&
- allow_codec_switching == o.allow_codec_switching &&
- enable_simulcast_stats == o.enable_simulcast_stats;
+ allow_codec_switching == o.allow_codec_switching;
}
bool PeerConnectionInterface::RTCConfiguration::operator!=(
@@ -1038,10 +1037,6 @@ PeerConnection::PeerConnection(PeerConnectionFactory* factory,
event_log_(std::move(event_log)),
event_log_ptr_(event_log_.get()),
operations_chain_(rtc::OperationsChain::Create()),
- datagram_transport_config_(
- field_trial::FindFullName(kDatagramTransportFieldTrial)),
- datagram_transport_data_channel_config_(
- field_trial::FindFullName(kDatagramTransportDataChannelFieldTrial)),
rtcp_cname_(GenerateRtcpCname()),
local_streams_(StreamCollection::Create()),
remote_streams_(StreamCollection::Create()),
@@ -1246,33 +1241,6 @@ bool PeerConnection::Initialize(
#endif
config.active_reset_srtp_params = configuration.active_reset_srtp_params;
- use_datagram_transport_ = datagram_transport_config_.enabled &&
- configuration.use_datagram_transport.value_or(
- datagram_transport_config_.default_value);
- use_datagram_transport_for_data_channels_ =
- datagram_transport_data_channel_config_.enabled &&
- configuration.use_datagram_transport_for_data_channels.value_or(
- datagram_transport_data_channel_config_.default_value);
- use_datagram_transport_for_data_channels_receive_only_ =
- configuration.use_datagram_transport_for_data_channels_receive_only
- .value_or(datagram_transport_data_channel_config_.receive_only);
- if (use_datagram_transport_ || use_datagram_transport_for_data_channels_) {
- if (!factory_->media_transport_factory()) {
- RTC_DCHECK(false)
- << "PeerConnecton is initialized with use_datagram_transport = true "
- "or use_datagram_transport_for_data_channels = true "
- "but media transport factory is not set in PeerConnectionFactory";
- return false;
- }
-
- config.use_datagram_transport = use_datagram_transport_;
- config.use_datagram_transport_for_data_channels =
- use_datagram_transport_for_data_channels_;
- config.use_datagram_transport_for_data_channels_receive_only =
- use_datagram_transport_for_data_channels_receive_only_;
- config.media_transport_factory = factory_->media_transport_factory();
- }
-
// Obtain a certificate from RTCConfiguration if any were provided (optional).
rtc::scoped_refptr<rtc::RTCCertificate> certificate;
if (!configuration.certificates.empty()) {
@@ -1295,24 +1263,7 @@ bool PeerConnection::Initialize(
sctp_factory_ = factory_->CreateSctpTransportInternalFactory();
- if (use_datagram_transport_for_data_channels_) {
- if (configuration.enable_rtp_data_channel) {
- RTC_LOG(LS_ERROR) << "enable_rtp_data_channel and "
- "use_datagram_transport_for_data_channels are "
- "incompatible and cannot both be set to true";
- return false;
- }
- if (configuration.enable_dtls_srtp && !*configuration.enable_dtls_srtp) {
- RTC_LOG(LS_INFO) << "Using data channel transport with no fallback";
- data_channel_controller_.set_data_channel_type(
- cricket::DCT_DATA_CHANNEL_TRANSPORT);
- } else {
- RTC_LOG(LS_INFO) << "Using data channel transport with fallback to SCTP";
- data_channel_controller_.set_data_channel_type(
- cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP);
- config.sctp_factory = sctp_factory_.get();
- }
- } else if (configuration.enable_rtp_data_channel) {
+ if (configuration.enable_rtp_data_channel) {
// Enable creation of RTP data channels if the kEnableRtpDataChannels is
// set. It takes precendence over the disable_sctp_data_channels
// PeerConnectionFactoryInterface::Options.
@@ -2180,7 +2131,7 @@ rtc::scoped_refptr<DataChannelInterface> PeerConnection::CreateDataChannel(
internal_config.reset(new InternalDataChannelInit(*config));
}
rtc::scoped_refptr<DataChannelInterface> channel(
- data_channel_controller_.InternalCreateDataChannel(
+ data_channel_controller_.InternalCreateDataChannelWithProxy(
label, internal_config.get()));
if (!channel.get()) {
return nullptr;
@@ -2192,7 +2143,7 @@ rtc::scoped_refptr<DataChannelInterface> PeerConnection::CreateDataChannel(
UpdateNegotiationNeeded();
}
NoteUsageEvent(UsageEvent::DATA_ADDED);
- return DataChannelProxy::Create(signaling_thread(), channel.get());
+ return channel;
}
void PeerConnection::RestartIce() {
@@ -2780,7 +2731,7 @@ RTCError PeerConnection::ApplyLocalDescription(
// If setting the description decided our SSL role, allocate any necessary
// SCTP sids.
rtc::SSLRole role;
- if (DataChannel::IsSctpLike(data_channel_type()) && GetSctpSslRole(&role)) {
+ if (IsSctpLike(data_channel_type()) && GetSctpSslRole(&role)) {
data_channel_controller_.AllocateSctpSids(role);
}
@@ -3219,7 +3170,7 @@ RTCError PeerConnection::ApplyRemoteDescription(
// If setting the description decided our SSL role, allocate any necessary
// SCTP sids.
rtc::SSLRole role;
- if (DataChannel::IsSctpLike(data_channel_type()) && GetSctpSslRole(&role)) {
+ if (IsSctpLike(data_channel_type()) && GetSctpSslRole(&role)) {
data_channel_controller_.AllocateSctpSids(role);
}
@@ -3937,66 +3888,6 @@ RTCError PeerConnection::SetConfiguration(
"SetLocalDescription.");
}
- if (local_description() && configuration.use_datagram_transport !=
- configuration_.use_datagram_transport) {
- LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport "
- "after calling SetLocalDescription.");
- }
-
- if (remote_description() && configuration.use_datagram_transport !=
- configuration_.use_datagram_transport) {
- LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport "
- "after calling SetRemoteDescription.");
- }
-
- if (local_description() &&
- configuration.use_datagram_transport_for_data_channels !=
- configuration_.use_datagram_transport_for_data_channels) {
- LOG_AND_RETURN_ERROR(
- RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport_for_data_channels "
- "after calling SetLocalDescription.");
- }
-
- if (remote_description() &&
- configuration.use_datagram_transport_for_data_channels !=
- configuration_.use_datagram_transport_for_data_channels) {
- LOG_AND_RETURN_ERROR(
- RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport_for_data_channels "
- "after calling SetRemoteDescription.");
- }
-
- if (local_description() &&
- configuration.use_datagram_transport_for_data_channels_receive_only !=
- configuration_
- .use_datagram_transport_for_data_channels_receive_only) {
- LOG_AND_RETURN_ERROR(
- RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport_for_data_channels_receive_only "
- "after calling SetLocalDescription.");
- }
-
- if (remote_description() &&
- configuration.use_datagram_transport_for_data_channels_receive_only !=
- configuration_
- .use_datagram_transport_for_data_channels_receive_only) {
- LOG_AND_RETURN_ERROR(
- RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport_for_data_channels_receive_only "
- "after calling SetRemoteDescription.");
- }
-
- if ((configuration.use_datagram_transport &&
- *configuration.use_datagram_transport) ||
- (configuration.use_datagram_transport_for_data_channels &&
- *configuration.use_datagram_transport_for_data_channels)) {
- RTC_CHECK(configuration.bundle_policy == kBundlePolicyMaxBundle)
- << "Media transport requires MaxBundle policy.";
- }
-
// The simplest (and most future-compatible) way to tell if the config was
// modified in an invalid way is to copy each property we do support
// modifying, then use operator==. There are far more properties we don't
@@ -4025,11 +3916,6 @@ RTCError PeerConnection::SetConfiguration(
modified_config.network_preference = configuration.network_preference;
modified_config.active_reset_srtp_params =
configuration.active_reset_srtp_params;
- modified_config.use_datagram_transport = configuration.use_datagram_transport;
- modified_config.use_datagram_transport_for_data_channels =
- configuration.use_datagram_transport_for_data_channels;
- modified_config.use_datagram_transport_for_data_channels_receive_only =
- configuration.use_datagram_transport_for_data_channels_receive_only;
modified_config.turn_logging_id = configuration.turn_logging_id;
modified_config.allow_codec_switching = configuration.allow_codec_switching;
if (configuration != modified_config) {
@@ -4089,7 +3975,9 @@ RTCError PeerConnection::SetConfiguration(
// candidate policy must set a "needs-ice-restart" bit so that the next offer
// triggers an ICE restart which will pick up the changes.
if (modified_config.servers != configuration_.servers ||
- modified_config.type != configuration_.type ||
+ NeedIceRestart(
+ configuration_.surface_ice_candidates_on_ice_transport_type_changed,
+ configuration_.type, modified_config.type) ||
modified_config.GetTurnPortPrunePolicy() !=
configuration_.GetTurnPortPrunePolicy()) {
transport_controller_->SetNeedsIceRestartFlag();
@@ -4097,20 +3985,6 @@ RTCError PeerConnection::SetConfiguration(
transport_controller_->SetIceConfig(ParseIceConfig(modified_config));
- use_datagram_transport_ = datagram_transport_config_.enabled &&
- modified_config.use_datagram_transport.value_or(
- datagram_transport_config_.default_value);
- use_datagram_transport_for_data_channels_ =
- datagram_transport_data_channel_config_.enabled &&
- modified_config.use_datagram_transport_for_data_channels.value_or(
- datagram_transport_data_channel_config_.default_value);
- use_datagram_transport_for_data_channels_receive_only_ =
- modified_config.use_datagram_transport_for_data_channels_receive_only
- .value_or(datagram_transport_data_channel_config_.receive_only);
- transport_controller_->SetMediaTransportSettings(
- use_datagram_transport_, use_datagram_transport_for_data_channels_,
- use_datagram_transport_for_data_channels_receive_only_);
-
if (configuration_.active_reset_srtp_params !=
modified_config.active_reset_srtp_params) {
transport_controller_->SetActiveResetSrtpParams(
@@ -4348,6 +4222,21 @@ PeerConnection::GetFirstAudioTransceiver() const {
return nullptr;
}
+void PeerConnection::AddAdaptationResource(
+ rtc::scoped_refptr<Resource> resource) {
+ if (!worker_thread()->IsCurrent()) {
+ return worker_thread()->Invoke<void>(RTC_FROM_HERE, [this, resource]() {
+ return AddAdaptationResource(resource);
+ });
+ }
+ RTC_DCHECK_RUN_ON(worker_thread());
+ if (!call_) {
+ // The PeerConnection has been closed.
+ return;
+ }
+ call_->AddAdaptationResource(resource);
+}
+
bool PeerConnection::StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output,
int64_t output_period_ms) {
return worker_thread()->Invoke<bool>(
@@ -4910,25 +4799,6 @@ void PeerConnection::GetOptionsForOffer(
session_options->offer_extmap_allow_mixed =
configuration_.offer_extmap_allow_mixed;
- // If datagram transport is in use, add opaque transport parameters.
- if (use_datagram_transport_ || use_datagram_transport_for_data_channels_) {
- for (auto& options : session_options->media_description_options) {
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport_controller_->GetTransportParameters(options.mid);
- if (!params) {
- continue;
- }
- options.transport_options.opaque_parameters = params;
- if ((use_datagram_transport_ &&
- (options.type == cricket::MEDIA_TYPE_AUDIO ||
- options.type == cricket::MEDIA_TYPE_VIDEO)) ||
- (use_datagram_transport_for_data_channels_ &&
- options.type == cricket::MEDIA_TYPE_DATA)) {
- options.alt_protocol = params->protocol;
- }
- }
- }
-
// Allow fallback for using obsolete SCTP syntax.
// Note that the default in |session_options| is true, while
// the default in |options| is false.
@@ -4940,8 +4810,8 @@ void PeerConnection::GetOptionsForPlanBOffer(
const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options,
cricket::MediaSessionOptions* session_options) {
// Figure out transceiver directional preferences.
- bool send_audio = HasRtpSender(cricket::MEDIA_TYPE_AUDIO);
- bool send_video = HasRtpSender(cricket::MEDIA_TYPE_VIDEO);
+ bool send_audio = !GetAudioTransceiver()->internal()->senders().empty();
+ bool send_video = !GetVideoTransceiver()->internal()->senders().empty();
// By default, generate sendrecv/recvonly m= sections.
bool recv_audio = true;
@@ -4984,21 +4854,21 @@ void PeerConnection::GetOptionsForPlanBOffer(
// Add audio/video/data m= sections to the end if needed.
if (!audio_index && offer_new_audio_description) {
- session_options->media_description_options.push_back(
- cricket::MediaDescriptionOptions(
- cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO,
- RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio),
- false));
-
+ cricket::MediaDescriptionOptions options(
+ cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO,
+ RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), false);
+ options.header_extensions =
+ channel_manager()->GetSupportedAudioRtpHeaderExtensions();
+ session_options->media_description_options.push_back(options);
audio_index = session_options->media_description_options.size() - 1;
}
if (!video_index && offer_new_video_description) {
- session_options->media_description_options.push_back(
- cricket::MediaDescriptionOptions(
- cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO,
- RtpTransceiverDirectionFromSendRecv(send_video, recv_video),
- false));
-
+ cricket::MediaDescriptionOptions options(
+ cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO,
+ RtpTransceiverDirectionFromSendRecv(send_video, recv_video), false);
+ options.header_extensions =
+ channel_manager()->GetSupportedVideoRtpHeaderExtensions();
+ session_options->media_description_options.push_back(options);
video_index = session_options->media_description_options.size() - 1;
}
if (!data_index && offer_new_data_description) {
@@ -5030,6 +4900,8 @@ GetMediaDescriptionOptionsForTransceiver(
transceiver->stopped());
media_description_options.codec_preferences =
transceiver->codec_preferences();
+ media_description_options.header_extensions =
+ transceiver->HeaderExtensionsToOffer();
// This behavior is specified in JSEP. The gist is that:
// 1. The MSID is included if the RtpTransceiver's direction is sendonly or
// sendrecv.
@@ -5234,33 +5106,14 @@ void PeerConnection::GetOptionsForAnswer(
RTC_FROM_HERE,
rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials,
port_allocator_.get()));
-
- // If datagram transport is in use, add opaque transport parameters.
- if (use_datagram_transport_ || use_datagram_transport_for_data_channels_) {
- for (auto& options : session_options->media_description_options) {
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport_controller_->GetTransportParameters(options.mid);
- if (!params) {
- continue;
- }
- options.transport_options.opaque_parameters = params;
- if ((use_datagram_transport_ &&
- (options.type == cricket::MEDIA_TYPE_AUDIO ||
- options.type == cricket::MEDIA_TYPE_VIDEO)) ||
- (use_datagram_transport_for_data_channels_ &&
- options.type == cricket::MEDIA_TYPE_DATA)) {
- options.alt_protocol = params->protocol;
- }
- }
- }
}
void PeerConnection::GetOptionsForPlanBAnswer(
const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options,
cricket::MediaSessionOptions* session_options) {
// Figure out transceiver directional preferences.
- bool send_audio = HasRtpSender(cricket::MEDIA_TYPE_AUDIO);
- bool send_video = HasRtpSender(cricket::MEDIA_TYPE_VIDEO);
+ bool send_audio = !GetAudioTransceiver()->internal()->senders().empty();
+ bool send_video = !GetVideoTransceiver()->internal()->senders().empty();
// By default, generate sendrecv/recvonly m= sections. The direction is also
// restricted by the direction in the offer.
@@ -5361,6 +5214,8 @@ void PeerConnection::GenerateMediaDescriptionOptions(
stopped));
*audio_index = session_options->media_description_options.size() - 1;
}
+ session_options->media_description_options.back().header_extensions =
+ channel_manager()->GetSupportedAudioRtpHeaderExtensions();
} else if (IsVideoContent(&content)) {
// If we already have an video m= section, reject this extra one.
if (*video_index) {
@@ -5376,6 +5231,8 @@ void PeerConnection::GenerateMediaDescriptionOptions(
stopped));
*video_index = session_options->media_description_options.size() - 1;
}
+ session_options->media_description_options.back().header_extensions =
+ channel_manager()->GetSupportedVideoRtpHeaderExtensions();
} else {
RTC_DCHECK(IsDataContent(&content));
// If we already have an data m= section, reject this extra one.
@@ -5423,8 +5280,6 @@ absl::optional<std::string> PeerConnection::GetDataMid() const {
}
return data_channel_controller_.rtp_data_channel()->content_name();
case cricket::DCT_SCTP:
- case cricket::DCT_DATA_CHANNEL_TRANSPORT:
- case cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP:
return sctp_mid_s_;
default:
return absl::nullopt;
@@ -5684,10 +5539,11 @@ void PeerConnection::OnLocalSenderRemoved(const RtpSenderInfo& sender_info,
sender->internal()->SetSsrc(0);
}
-void PeerConnection::OnSctpDataChannelClosed(DataChannel* channel) {
+void PeerConnection::OnSctpDataChannelClosed(DataChannelInterface* channel) {
// Since data_channel_controller doesn't do signals, this
// signal is relayed here.
- data_channel_controller_.OnSctpDataChannelClosed(channel);
+ data_channel_controller_.OnSctpDataChannelClosed(
+ static_cast<SctpDataChannel*>(channel));
}
rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
@@ -5718,21 +5574,6 @@ PeerConnection::GetVideoTransceiver() const {
return nullptr;
}
-// TODO(bugs.webrtc.org/7600): Remove this when multiple transceivers with
-// individual transceiver directions are supported.
-bool PeerConnection::HasRtpSender(cricket::MediaType type) const {
- switch (type) {
- case cricket::MEDIA_TYPE_AUDIO:
- return !GetAudioTransceiver()->internal()->senders().empty();
- case cricket::MEDIA_TYPE_VIDEO:
- return !GetVideoTransceiver()->internal()->senders().empty();
- case cricket::MEDIA_TYPE_DATA:
- return false;
- }
- RTC_NOTREACHED();
- return false;
-}
-
rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>
PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) const {
for (const auto& transceiver : transceivers_) {
@@ -5799,7 +5640,7 @@ const PeerConnection::RtpSenderInfo* PeerConnection::FindSenderInfo(
return nullptr;
}
-DataChannel* PeerConnection::FindDataChannelBySid(int sid) const {
+SctpDataChannel* PeerConnection::FindDataChannelBySid(int sid) const {
return data_channel_controller_.FindDataChannelBySid(sid);
}
@@ -6188,6 +6029,11 @@ cricket::IceConfig PeerConnection::ParseIceConfig(
return ice_config;
}
+std::vector<DataChannelStats> PeerConnection::GetDataChannelStats() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return data_channel_controller_.GetDataChannelStats();
+}
+
absl::optional<std::string> PeerConnection::sctp_transport_name() const {
RTC_DCHECK_RUN_ON(signaling_thread());
if (sctp_mid_s_ && transport_controller_) {
@@ -6612,13 +6458,11 @@ RTCError PeerConnection::CreateChannels(const SessionDescription& desc) {
cricket::VoiceChannel* PeerConnection::CreateVoiceChannel(
const std::string& mid) {
RtpTransportInternal* rtp_transport = GetRtpTransport(mid);
- MediaTransportConfig media_transport_config =
- transport_controller_->GetMediaTransportConfig(mid);
cricket::VoiceChannel* voice_channel = channel_manager()->CreateVoiceChannel(
- call_ptr_, configuration_.media_config, rtp_transport,
- media_transport_config, signaling_thread(), mid, SrtpRequired(),
- GetCryptoOptions(), &ssrc_generator_, audio_options_);
+ call_ptr_, configuration_.media_config, rtp_transport, signaling_thread(),
+ mid, SrtpRequired(), GetCryptoOptions(), &ssrc_generator_,
+ audio_options_);
if (!voice_channel) {
return nullptr;
}
@@ -6635,13 +6479,10 @@ cricket::VoiceChannel* PeerConnection::CreateVoiceChannel(
cricket::VideoChannel* PeerConnection::CreateVideoChannel(
const std::string& mid) {
RtpTransportInternal* rtp_transport = GetRtpTransport(mid);
- MediaTransportConfig media_transport_config =
- transport_controller_->GetMediaTransportConfig(mid);
cricket::VideoChannel* video_channel = channel_manager()->CreateVideoChannel(
- call_ptr_, configuration_.media_config, rtp_transport,
- media_transport_config, signaling_thread(), mid, SrtpRequired(),
- GetCryptoOptions(), &ssrc_generator_, video_options_,
+ call_ptr_, configuration_.media_config, rtp_transport, signaling_thread(),
+ mid, SrtpRequired(), GetCryptoOptions(), &ssrc_generator_, video_options_,
video_bitrate_allocator_factory_.get());
if (!video_channel) {
return nullptr;
@@ -6658,8 +6499,6 @@ cricket::VideoChannel* PeerConnection::CreateVideoChannel(
bool PeerConnection::CreateDataChannel(const std::string& mid) {
switch (data_channel_type()) {
case cricket::DCT_SCTP:
- case cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP:
- case cricket::DCT_DATA_CHANNEL_TRANSPORT:
if (network_thread()->Invoke<bool>(
RTC_FROM_HERE,
rtc::Bind(&PeerConnection::SetupDataChannelTransport_n, this,
@@ -6668,16 +6507,12 @@ bool PeerConnection::CreateDataChannel(const std::string& mid) {
} else {
return false;
}
-
- // All non-RTP data channels must initialize |sctp_data_channels_|.
- for (const auto& channel :
- *data_channel_controller_.sctp_data_channels()) {
- channel->OnTransportChannelCreated();
- }
return true;
case cricket::DCT_RTP:
default:
RtpTransportInternal* rtp_transport = GetRtpTransport(mid);
+ // TODO(bugs.webrtc.org/9987): set_rtp_data_channel() should be called on
+ // the network thread like set_data_channel_transport is.
data_channel_controller_.set_rtp_data_channel(
channel_manager()->CreateRtpDataChannel(
configuration_.media_config, rtp_transport, signaling_thread(),
@@ -6704,6 +6539,7 @@ Call::Stats PeerConnection::GetCallStats() {
RTC_FROM_HERE, rtc::Bind(&PeerConnection::GetCallStats, this));
}
RTC_DCHECK_RUN_ON(worker_thread());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
if (call_) {
return call_->GetStats();
} else {
@@ -7090,8 +6926,7 @@ bool PeerConnection::ReadyToUseRemoteCandidate(
}
bool PeerConnection::SrtpRequired() const {
- return !use_datagram_transport_ &&
- (dtls_enabled_ ||
+ return (dtls_enabled_ ||
webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED);
}
diff --git a/pc/peer_connection.h b/pc/peer_connection.h
index f3102572fb..2591c4b75f 100644
--- a/pc/peer_connection.h
+++ b/pc/peer_connection.h
@@ -237,6 +237,8 @@ class PeerConnection : public PeerConnectionInternal,
rtc::scoped_refptr<SctpTransportInterface> GetSctpTransport() const override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+
bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output,
int64_t output_period_ms) override;
bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output) override;
@@ -270,19 +272,19 @@ class PeerConnection : public PeerConnectionInternal,
return transceivers_;
}
- sigslot::signal1<DataChannel*>& SignalDataChannelCreated() override {
- return data_channel_controller_.SignalDataChannelCreated();
+ sigslot::signal1<RtpDataChannel*>& SignalRtpDataChannelCreated() override {
+ return data_channel_controller_.SignalRtpDataChannelCreated();
+ }
+
+ sigslot::signal1<SctpDataChannel*>& SignalSctpDataChannelCreated() override {
+ return data_channel_controller_.SignalSctpDataChannelCreated();
}
cricket::RtpDataChannel* rtp_data_channel() const override {
return data_channel_controller_.rtp_data_channel();
}
- std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels()
- const override {
- RTC_DCHECK_RUN_ON(signaling_thread());
- return *data_channel_controller_.sctp_data_channels();
- }
+ std::vector<DataChannelStats> GetDataChannelStats() const override;
absl::optional<std::string> sctp_transport_name() const override;
@@ -312,7 +314,7 @@ class PeerConnection : public PeerConnectionInternal,
// Get current SSL role used by SCTP's underlying transport.
bool GetSctpSslRole(rtc::SSLRole* role);
// Handler for the "channel closed" signal
- void OnSctpDataChannelClosed(DataChannel* channel);
+ void OnSctpDataChannelClosed(DataChannelInterface* channel);
// Functions made public for testing.
void ReturnHistogramVeryQuicklyForTesting() {
@@ -360,57 +362,6 @@ class PeerConnection : public PeerConnectionInternal,
uint32_t first_ssrc;
};
- // Field-trial based configuration for datagram transport.
- struct DatagramTransportConfig {
- explicit DatagramTransportConfig(const std::string& field_trial)
- : enabled("enabled", true), default_value("default_value", false) {
- ParseFieldTrial({&enabled, &default_value}, field_trial);
- }
-
- // Whether datagram transport support is enabled at all. Defaults to true,
- // allowing datagram transport to be used if (a) the application provides a
- // factory for it and (b) the configuration specifies its use. This flag
- // provides a kill-switch to force-disable datagram transport across all
- // applications, without code changes.
- FieldTrialFlag enabled;
-
- // Whether the datagram transport is enabled or disabled by default.
- // Defaults to false, meaning that applications must configure use of
- // datagram transport through RTCConfiguration. If set to true,
- // applications will use the datagram transport by default (but may still
- // explicitly configure themselves not to use it through RTCConfiguration).
- FieldTrialFlag default_value;
- };
-
- // Field-trial based configuration for datagram transport data channels.
- struct DatagramTransportDataChannelConfig {
- explicit DatagramTransportDataChannelConfig(const std::string& field_trial)
- : enabled("enabled", true),
- default_value("default_value", false),
- receive_only("receive_only", false) {
- ParseFieldTrial({&enabled, &default_value, &receive_only}, field_trial);
- }
-
- // Whether datagram transport data channel support is enabled at all.
- // Defaults to true, allowing datagram transport to be used if (a) the
- // application provides a factory for it and (b) the configuration specifies
- // its use. This flag provides a kill-switch to force-disable datagram
- // transport across all applications, without code changes.
- FieldTrialFlag enabled;
-
- // Whether the datagram transport data channels are enabled or disabled by
- // default. Defaults to false, meaning that applications must configure use
- // of datagram transport through RTCConfiguration. If set to true,
- // applications will use the datagram transport by default (but may still
- // explicitly configure themselves not to use it through RTCConfiguration).
- FieldTrialFlag default_value;
-
- // Whether the datagram transport is enabled in receive-only mode. If true,
- // and if the datagram transport is enabled, it will only be used when
- // receiving incoming calls, not when placing outgoing calls.
- FieldTrialFlag receive_only;
- };
-
// Captures partial state to be used for rollback. Applicable only in
// Unified Plan.
class TransceiverStableState {
@@ -859,10 +810,6 @@ class PeerConnection : public PeerConnectionInternal,
void FillInMissingRemoteMids(cricket::SessionDescription* remote_description)
RTC_RUN_ON(signaling_thread());
- // Is there an RtpSender of the given type?
- bool HasRtpSender(cricket::MediaType type) const
- RTC_RUN_ON(signaling_thread());
-
// Return the RtpSender with the given track attached.
rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>
FindSenderForTrack(MediaStreamTrackInterface* track) const
@@ -888,7 +835,7 @@ class PeerConnection : public PeerConnectionInternal,
// Returns the specified SCTP DataChannel in sctp_data_channels_,
// or nullptr if not found.
- DataChannel* FindDataChannelBySid(int sid) const
+ SctpDataChannel* FindDataChannelBySid(int sid) const
RTC_RUN_ON(signaling_thread());
// Called when first configuring the port allocator.
@@ -1214,25 +1161,6 @@ class PeerConnection : public PeerConnectionInternal,
PeerConnectionInterface::RTCConfiguration configuration_
RTC_GUARDED_BY(signaling_thread());
- // Field-trial based configuration for datagram transport.
- const DatagramTransportConfig datagram_transport_config_;
-
- // Field-trial based configuration for datagram transport data channels.
- const DatagramTransportDataChannelConfig
- datagram_transport_data_channel_config_;
-
- // Final, resolved value for whether datagram transport is in use.
- bool use_datagram_transport_ RTC_GUARDED_BY(signaling_thread()) = false;
-
- // Equivalent of |use_datagram_transport_|, but for its use with data
- // channels.
- bool use_datagram_transport_for_data_channels_
- RTC_GUARDED_BY(signaling_thread()) = false;
-
- // Resolved value of whether to use data channels only for incoming calls.
- bool use_datagram_transport_for_data_channels_receive_only_
- RTC_GUARDED_BY(signaling_thread()) = false;
-
// TODO(zstein): |async_resolver_factory_| can currently be nullptr if it
// is not injected. It should be required once chromium supplies it.
std::unique_ptr<AsyncResolverFactory> async_resolver_factory_
diff --git a/pc/peer_connection_adaptation_integrationtest.cc b/pc/peer_connection_adaptation_integrationtest.cc
new file mode 100644
index 0000000000..71d054eb90
--- /dev/null
+++ b/pc/peer_connection_adaptation_integrationtest.cc
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/rtp_parameters.h"
+#include "api/scoped_refptr.h"
+#include "call/adaptation/test/fake_resource.h"
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/test/fake_periodic_video_track_source.h"
+#include "pc/test/peer_connection_test_wrapper.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+const int64_t kDefaultTimeoutMs = 5000;
+
+struct TrackWithPeriodicSource {
+ rtc::scoped_refptr<VideoTrackInterface> track;
+ rtc::scoped_refptr<FakePeriodicVideoTrackSource> periodic_track_source;
+};
+
+// Performs an O/A exchange and waits until the signaling state is stable again.
+void Negotiate(rtc::scoped_refptr<PeerConnectionTestWrapper> caller,
+ rtc::scoped_refptr<PeerConnectionTestWrapper> callee) {
+ // Wire up callbacks and listeners such that a full O/A is performed in
+ // response to CreateOffer().
+ PeerConnectionTestWrapper::Connect(caller.get(), callee.get());
+ caller->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions());
+ caller->WaitForNegotiation();
+}
+
+TrackWithPeriodicSource CreateTrackWithPeriodicSource(
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory) {
+ FakePeriodicVideoSource::Config periodic_track_source_config;
+ periodic_track_source_config.frame_interval_ms = 100;
+ periodic_track_source_config.timestamp_offset_ms = rtc::TimeMillis();
+ rtc::scoped_refptr<FakePeriodicVideoTrackSource> periodic_track_source =
+ new rtc::RefCountedObject<FakePeriodicVideoTrackSource>(
+ periodic_track_source_config, /* remote */ false);
+ TrackWithPeriodicSource track_with_source;
+ track_with_source.track =
+ factory->CreateVideoTrack("PeriodicTrack", periodic_track_source);
+ track_with_source.periodic_track_source = periodic_track_source;
+ return track_with_source;
+}
+
+// Triggers overuse and obtains VideoSinkWants. Adaptation processing happens in
+// parallel and this function makes no guarantee that the returnd VideoSinkWants
+// have yet to reflect the overuse signal. Used together with EXPECT_TRUE_WAIT
+// to "spam overuse until a change is observed".
+rtc::VideoSinkWants TriggerOveruseAndGetSinkWants(
+ rtc::scoped_refptr<FakeResource> fake_resource,
+ const FakePeriodicVideoSource& source) {
+ fake_resource->SetUsageState(ResourceUsageState::kOveruse);
+ return source.wants();
+}
+
+class PeerConnectionAdaptationIntegrationTest : public ::testing::Test {
+ public:
+ PeerConnectionAdaptationIntegrationTest()
+ : virtual_socket_server_(),
+ network_thread_(new rtc::Thread(&virtual_socket_server_)),
+ worker_thread_(rtc::Thread::Create()) {
+ RTC_CHECK(network_thread_->Start());
+ RTC_CHECK(worker_thread_->Start());
+ }
+
+ rtc::scoped_refptr<PeerConnectionTestWrapper> CreatePcWrapper(
+ const char* name) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper =
+ new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+ name, network_thread_.get(), worker_thread_.get());
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ EXPECT_TRUE(pc_wrapper->CreatePc(config, CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory()));
+ return pc_wrapper;
+ }
+
+ protected:
+ rtc::VirtualSocketServer virtual_socket_server_;
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+};
+
+TEST_F(PeerConnectionAdaptationIntegrationTest,
+ ResouceInjectedAfterNegotiationCausesReductionInResolution) {
+ auto caller_wrapper = CreatePcWrapper("caller");
+ auto caller = caller_wrapper->pc();
+ auto callee_wrapper = CreatePcWrapper("callee");
+
+ // Adding a track and negotiating ensures that a VideoSendStream exists.
+ TrackWithPeriodicSource track_with_source =
+ CreateTrackWithPeriodicSource(caller_wrapper->pc_factory());
+ auto sender = caller->AddTrack(track_with_source.track, {}).value();
+ Negotiate(caller_wrapper, callee_wrapper);
+ // Prefer degrading resolution.
+ auto parameters = sender->GetParameters();
+ parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE;
+ sender->SetParameters(parameters);
+
+ const auto& source =
+ track_with_source.periodic_track_source->fake_periodic_source();
+ int pixel_count_before_overuse = source.wants().max_pixel_count;
+
+ // Inject a fake resource and spam kOveruse until resolution becomes limited.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ caller->AddAdaptationResource(fake_resource);
+ EXPECT_TRUE_WAIT(
+ TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count <
+ pixel_count_before_overuse,
+ kDefaultTimeoutMs);
+}
+
+TEST_F(PeerConnectionAdaptationIntegrationTest,
+ ResouceInjectedBeforeNegotiationCausesReductionInResolution) {
+ auto caller_wrapper = CreatePcWrapper("caller");
+ auto caller = caller_wrapper->pc();
+ auto callee_wrapper = CreatePcWrapper("callee");
+
+ // Inject a fake resource before adding any tracks or negotiating.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ caller->AddAdaptationResource(fake_resource);
+
+ // Adding a track and negotiating ensures that a VideoSendStream exists.
+ TrackWithPeriodicSource track_with_source =
+ CreateTrackWithPeriodicSource(caller_wrapper->pc_factory());
+ auto sender = caller->AddTrack(track_with_source.track, {}).value();
+ Negotiate(caller_wrapper, callee_wrapper);
+ // Prefer degrading resolution.
+ auto parameters = sender->GetParameters();
+ parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE;
+ sender->SetParameters(parameters);
+
+ const auto& source =
+ track_with_source.periodic_track_source->fake_periodic_source();
+ int pixel_count_before_overuse = source.wants().max_pixel_count;
+
+ // Spam kOveruse until resolution becomes limited.
+ EXPECT_TRUE_WAIT(
+ TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count <
+ pixel_count_before_overuse,
+ kDefaultTimeoutMs);
+}
+
+} // namespace webrtc
diff --git a/pc/peer_connection_crypto_unittest.cc b/pc/peer_connection_crypto_unittest.cc
index 99eb5cd7ac..32e8cbd74c 100644
--- a/pc/peer_connection_crypto_unittest.cc
+++ b/pc/peer_connection_crypto_unittest.cc
@@ -149,9 +149,12 @@ SdpContentPredicate HaveSdesGcmCryptos(size_t num_crypto_suites) {
if (cryptos.size() != num_crypto_suites) {
return false;
}
- const cricket::CryptoParams first_params = cryptos[0];
- return first_params.key_params.size() == 67U &&
- first_params.cipher_suite == "AEAD_AES_256_GCM";
+ for (size_t i = 0; i < cryptos.size(); ++i) {
+ if (cryptos[i].key_params.size() == 67U &&
+ cryptos[i].cipher_suite == "AEAD_AES_256_GCM")
+ return true;
+ }
+ return false;
};
}
@@ -333,7 +336,14 @@ TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWithSdesAndGcm) {
auto caller = CreatePeerConnectionWithAudioVideo(config);
auto callee = CreatePeerConnectionWithAudioVideo(config);
- callee->SetRemoteDescription(caller->CreateOffer());
+ auto offer = caller->CreateOffer();
+ for (cricket::ContentInfo& content : offer->description()->contents()) {
+ auto cryptos = content.media_description()->cryptos();
+ cryptos.erase(cryptos.begin()); // Assumes that non-GCM is the default.
+ content.media_description()->set_cryptos(cryptos);
+ }
+
+ callee->SetRemoteDescription(std::move(offer));
auto answer = callee->CreateAnswer();
ASSERT_TRUE(answer);
diff --git a/pc/peer_connection_factory.cc b/pc/peer_connection_factory.cc
index 9a758bea2d..b274d57235 100644
--- a/pc/peer_connection_factory.cc
+++ b/pc/peer_connection_factory.cc
@@ -10,6 +10,7 @@
#include "pc/peer_connection_factory.h"
+#include <cstdio>
#include <memory>
#include <utility>
#include <vector>
@@ -23,7 +24,6 @@
#include "api/peer_connection_proxy.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/transport/field_trial_based_config.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/turn_customizer.h"
#include "api/units/data_rate.h"
#include "api/video_track_source_proxy.h"
@@ -82,7 +82,6 @@ PeerConnectionFactory::PeerConnectionFactory(
std::move(dependencies.network_state_predictor_factory)),
injected_network_controller_factory_(
std::move(dependencies.network_controller_factory)),
- media_transport_factory_(std::move(dependencies.media_transport_factory)),
neteq_factory_(std::move(dependencies.neteq_factory)),
trials_(dependencies.trials ? std::move(dependencies.trials)
: std::make_unique<FieldTrialBasedConfig>()) {
@@ -109,6 +108,10 @@ PeerConnectionFactory::PeerConnectionFactory(
wraps_current_thread_ = true;
}
}
+ signaling_thread_->AllowInvokesToThread(worker_thread_);
+ signaling_thread_->AllowInvokesToThread(network_thread_);
+ worker_thread_->AllowInvokesToThread(network_thread_);
+ network_thread_->DisallowAllInvokes();
}
PeerConnectionFactory::~PeerConnectionFactory() {
diff --git a/pc/peer_connection_factory.h b/pc/peer_connection_factory.h
index 962b08c7c9..58859a0296 100644
--- a/pc/peer_connection_factory.h
+++ b/pc/peer_connection_factory.h
@@ -18,7 +18,6 @@
#include "api/media_stream_interface.h"
#include "api/peer_connection_interface.h"
#include "api/scoped_refptr.h"
-#include "api/transport/media/media_transport_interface.h"
#include "media/sctp/sctp_transport_internal.h"
#include "pc/channel_manager.h"
#include "rtc_base/rtc_certificate_generator.h"
@@ -87,10 +86,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
const Options& options() const { return options_; }
- MediaTransportFactory* media_transport_factory() {
- return media_transport_factory_.get();
- }
-
protected:
// This structure allows simple management of all new dependencies being added
// to the PeerConnectionFactory.
@@ -128,7 +123,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
network_state_predictor_factory_;
std::unique_ptr<NetworkControllerFactoryInterface>
injected_network_controller_factory_;
- std::unique_ptr<MediaTransportFactory> media_transport_factory_;
std::unique_ptr<NetEqFactory> neteq_factory_;
const std::unique_ptr<WebRtcKeyValueConfig> trials_;
};
diff --git a/pc/peer_connection_header_extension_unittest.cc b/pc/peer_connection_header_extension_unittest.cc
index 3f44d4f877..62fda59212 100644
--- a/pc/peer_connection_header_extension_unittest.cc
+++ b/pc/peer_connection_header_extension_unittest.cc
@@ -33,16 +33,31 @@ class PeerConnectionHeaderExtensionTest
: public ::testing::TestWithParam<
std::tuple<cricket::MediaType, SdpSemantics>> {
protected:
+ PeerConnectionHeaderExtensionTest()
+ : extensions_(
+ {RtpHeaderExtensionCapability("uri1",
+ 1,
+ RtpTransceiverDirection::kStopped),
+ RtpHeaderExtensionCapability("uri2",
+ 2,
+ RtpTransceiverDirection::kSendOnly),
+ RtpHeaderExtensionCapability("uri3",
+ 3,
+ RtpTransceiverDirection::kRecvOnly),
+ RtpHeaderExtensionCapability(
+ "uri4",
+ 4,
+ RtpTransceiverDirection::kSendRecv)}) {}
+
std::unique_ptr<PeerConnectionWrapper> CreatePeerConnection(
cricket::MediaType media_type,
- absl::optional<SdpSemantics> semantics,
- std::vector<RtpHeaderExtensionCapability> extensions) {
+ absl::optional<SdpSemantics> semantics) {
auto voice = std::make_unique<cricket::FakeVoiceEngine>();
auto video = std::make_unique<cricket::FakeVideoEngine>();
if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO)
- voice->SetRtpHeaderExtensions(extensions);
+ voice->SetRtpHeaderExtensions(extensions_);
else
- video->SetRtpHeaderExtensions(extensions);
+ video->SetRtpHeaderExtensions(extensions_);
auto media_engine = std::make_unique<cricket::CompositeMediaEngine>(
std::move(voice), std::move(video));
PeerConnectionFactoryDependencies factory_dependencies;
@@ -71,6 +86,8 @@ class PeerConnectionHeaderExtensionTest
return std::make_unique<PeerConnectionWrapper>(pc_factory, pc,
std::move(observer));
}
+
+ std::vector<RtpHeaderExtensionCapability> extensions_;
};
TEST_P(PeerConnectionHeaderExtensionTest, TransceiverOffersHeaderExtensions) {
@@ -79,19 +96,10 @@ TEST_P(PeerConnectionHeaderExtensionTest, TransceiverOffersHeaderExtensions) {
std::tie(media_type, semantics) = GetParam();
if (semantics != SdpSemantics::kUnifiedPlan)
return;
- std::vector<RtpHeaderExtensionCapability> extensions(
- {RtpHeaderExtensionCapability("uri1", 1,
- RtpTransceiverDirection::kStopped),
- RtpHeaderExtensionCapability("uri2", 2,
- RtpTransceiverDirection::kSendOnly),
- RtpHeaderExtensionCapability("uri3", 3,
- RtpTransceiverDirection::kRecvOnly),
- RtpHeaderExtensionCapability("uri4", 4,
- RtpTransceiverDirection::kSendRecv)});
std::unique_ptr<PeerConnectionWrapper> wrapper =
- CreatePeerConnection(media_type, semantics, extensions);
+ CreatePeerConnection(media_type, semantics);
auto transceiver = wrapper->AddTransceiver(media_type);
- EXPECT_EQ(transceiver->HeaderExtensionsToOffer(), extensions);
+ EXPECT_EQ(transceiver->HeaderExtensionsToOffer(), extensions_);
}
TEST_P(PeerConnectionHeaderExtensionTest,
@@ -99,20 +107,14 @@ TEST_P(PeerConnectionHeaderExtensionTest,
cricket::MediaType media_type;
SdpSemantics semantics;
std::tie(media_type, semantics) = GetParam();
- std::unique_ptr<PeerConnectionWrapper> wrapper = CreatePeerConnection(
- media_type, semantics,
- std::vector<RtpHeaderExtensionCapability>(
- {RtpHeaderExtensionCapability("uri1", 1,
- RtpTransceiverDirection::kSendRecv),
- RtpHeaderExtensionCapability("uri2", 2,
- RtpTransceiverDirection::kStopped),
- RtpHeaderExtensionCapability("uri3", 3,
- RtpTransceiverDirection::kRecvOnly)}));
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
EXPECT_THAT(wrapper->pc_factory()
->GetRtpSenderCapabilities(media_type)
.header_extensions,
- ElementsAre(Field(&RtpHeaderExtensionCapability::uri, "uri1"),
- Field(&RtpHeaderExtensionCapability::uri, "uri3")));
+ ElementsAre(Field(&RtpHeaderExtensionCapability::uri, "uri2"),
+ Field(&RtpHeaderExtensionCapability::uri, "uri3"),
+ Field(&RtpHeaderExtensionCapability::uri, "uri4")));
EXPECT_EQ(wrapper->pc_factory()
->GetRtpReceiverCapabilities(media_type)
.header_extensions,
@@ -121,6 +123,49 @@ TEST_P(PeerConnectionHeaderExtensionTest,
.header_extensions);
}
+TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedDefaultExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver = wrapper->AddTransceiver(media_type);
+ auto session_description = wrapper->CreateOffer();
+ EXPECT_THAT(session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri4")));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedModifiedExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver = wrapper->AddTransceiver(media_type);
+ auto modified_extensions = transceiver->HeaderExtensionsToOffer();
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendRecv;
+ modified_extensions[3].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_TRUE(
+ transceiver->SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ auto session_description = wrapper->CreateOffer();
+ EXPECT_THAT(session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3")));
+}
+
INSTANTIATE_TEST_SUITE_P(
,
PeerConnectionHeaderExtensionTest,
diff --git a/pc/peer_connection_integrationtest.cc b/pc/peer_connection_integrationtest.cc
index f3b4f28360..afb5f2ba75 100644
--- a/pc/peer_connection_integrationtest.cc
+++ b/pc/peer_connection_integrationtest.cc
@@ -28,7 +28,6 @@
#include "api/rtc_event_log/rtc_event_log_factory.h"
#include "api/rtp_receiver_interface.h"
#include "api/task_queue/default_task_queue_factory.h"
-#include "api/test/loopback_media_transport.h"
#include "api/uma_metrics.h"
#include "api/video_codecs/sdp_video_format.h"
#include "call/call.h"
@@ -215,7 +214,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
dependencies.cert_generator = std::move(cert_generator);
if (!client->Init(nullptr, nullptr, std::move(dependencies), network_thread,
worker_thread, nullptr,
- /*media_transport_factory=*/nullptr,
/*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false)) {
delete client;
@@ -611,7 +609,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
rtc::Thread* network_thread,
rtc::Thread* worker_thread,
std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory,
- std::unique_ptr<webrtc::MediaTransportFactory> media_transport_factory,
bool reset_encoder_factory,
bool reset_decoder_factory) {
// There's an error in this test code if Init ends up being called twice.
@@ -666,10 +663,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
std::make_unique<webrtc::RtcEventLogFactory>(
pc_factory_dependencies.task_queue_factory.get());
}
- if (media_transport_factory) {
- pc_factory_dependencies.media_transport_factory =
- std::move(media_transport_factory);
- }
peer_connection_factory_ = webrtc::CreateModularPeerConnectionFactory(
std::move(pc_factory_dependencies));
@@ -1082,8 +1075,8 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
class MockRtcEventLogOutput : public webrtc::RtcEventLogOutput {
public:
virtual ~MockRtcEventLogOutput() = default;
- MOCK_CONST_METHOD0(IsActive, bool());
- MOCK_METHOD1(Write, bool(const std::string&));
+ MOCK_METHOD(bool, IsActive, (), (const, override));
+ MOCK_METHOD(bool, Write, (const std::string&), (override));
};
// This helper object is used for both specifying how many audio/video frames
@@ -1216,7 +1209,7 @@ class MockIceTransportFactory : public IceTransportFactory {
return new rtc::RefCountedObject<MockIceTransport>(transport_name,
component);
}
- MOCK_METHOD0(RecordIceTransportCreated, void());
+ MOCK_METHOD(void, RecordIceTransportCreated, ());
};
// Tests two PeerConnections connecting to each other end-to-end, using a
@@ -1230,8 +1223,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
ss_(new rtc::VirtualSocketServer()),
fss_(new rtc::FirewallSocketServer(ss_.get())),
network_thread_(new rtc::Thread(fss_.get())),
- worker_thread_(rtc::Thread::Create()),
- loopback_media_transports_(network_thread_.get()) {
+ worker_thread_(rtc::Thread::Create()) {
network_thread_->SetName("PCNetworkThread", this);
worker_thread_->SetName("PCWorkerThread", this);
RTC_CHECK(network_thread_->Start());
@@ -1288,7 +1280,6 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
const RTCConfiguration* config,
webrtc::PeerConnectionDependencies dependencies,
std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory,
- std::unique_ptr<webrtc::MediaTransportFactory> media_transport_factory,
bool reset_encoder_factory,
bool reset_decoder_factory) {
RTCConfiguration modified_config;
@@ -1305,8 +1296,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
if (!client->Init(options, &modified_config, std::move(dependencies),
network_thread_.get(), worker_thread_.get(),
- std::move(event_log_factory),
- std::move(media_transport_factory), reset_encoder_factory,
+ std::move(event_log_factory), reset_encoder_factory,
reset_decoder_factory)) {
return nullptr;
}
@@ -1321,11 +1311,11 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
webrtc::PeerConnectionDependencies dependencies) {
std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory(
new webrtc::FakeRtcEventLogFactory(rtc::Thread::Current()));
- return CreatePeerConnectionWrapper(
- debug_name, options, config, std::move(dependencies),
- std::move(event_log_factory),
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
+ return CreatePeerConnectionWrapper(debug_name, options, config,
+ std::move(dependencies),
+ std::move(event_log_factory),
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
}
bool CreatePeerConnectionWrappers() {
@@ -1346,13 +1336,13 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
sdp_semantics_ = caller_semantics;
caller_ = CreatePeerConnectionWrapper(
"Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
- nullptr, /*media_transport_factory=*/nullptr,
+ nullptr,
/*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
sdp_semantics_ = callee_semantics;
callee_ = CreatePeerConnectionWrapper(
"Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
- nullptr, /*media_transport_factory=*/nullptr,
+ nullptr,
/*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
sdp_semantics_ = original_semantics;
@@ -1365,30 +1355,12 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
caller_ = CreatePeerConnectionWrapper(
"Caller", nullptr, &caller_config,
webrtc::PeerConnectionDependencies(nullptr), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
- callee_ = CreatePeerConnectionWrapper(
- "Callee", nullptr, &callee_config,
- webrtc::PeerConnectionDependencies(nullptr), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
- return caller_ && callee_;
- }
-
- bool CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- const PeerConnectionInterface::RTCConfiguration& caller_config,
- const PeerConnectionInterface::RTCConfiguration& callee_config,
- std::unique_ptr<webrtc::MediaTransportFactory> caller_factory,
- std::unique_ptr<webrtc::MediaTransportFactory> callee_factory) {
- caller_ = CreatePeerConnectionWrapper(
- "Caller", nullptr, &caller_config,
- webrtc::PeerConnectionDependencies(nullptr), nullptr,
- std::move(caller_factory), /*reset_encoder_factory=*/false,
+ /*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
callee_ = CreatePeerConnectionWrapper(
"Callee", nullptr, &callee_config,
webrtc::PeerConnectionDependencies(nullptr), nullptr,
- std::move(callee_factory), /*reset_encoder_factory=*/false,
+ /*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
return caller_ && callee_;
}
@@ -1398,16 +1370,16 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
webrtc::PeerConnectionDependencies caller_dependencies,
const PeerConnectionInterface::RTCConfiguration& callee_config,
webrtc::PeerConnectionDependencies callee_dependencies) {
- caller_ = CreatePeerConnectionWrapper(
- "Caller", nullptr, &caller_config, std::move(caller_dependencies),
- nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
- callee_ = CreatePeerConnectionWrapper(
- "Callee", nullptr, &callee_config, std::move(callee_dependencies),
- nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
+ caller_ =
+ CreatePeerConnectionWrapper("Caller", nullptr, &caller_config,
+ std::move(caller_dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ callee_ =
+ CreatePeerConnectionWrapper("Callee", nullptr, &callee_config,
+ std::move(callee_dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
return caller_ && callee_;
}
@@ -1417,12 +1389,12 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
caller_ = CreatePeerConnectionWrapper(
"Caller", &caller_options, nullptr,
webrtc::PeerConnectionDependencies(nullptr), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
+ /*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
callee_ = CreatePeerConnectionWrapper(
"Callee", &callee_options, nullptr,
webrtc::PeerConnectionDependencies(nullptr), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
+ /*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
return caller_ && callee_;
}
@@ -1446,21 +1418,21 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
webrtc::PeerConnectionDependencies dependencies(nullptr);
dependencies.cert_generator = std::move(cert_generator);
- return CreatePeerConnectionWrapper(
- "New Peer", nullptr, nullptr, std::move(dependencies), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
+ return CreatePeerConnectionWrapper("New Peer", nullptr, nullptr,
+ std::move(dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
}
bool CreateOneDirectionalPeerConnectionWrappers(bool caller_to_callee) {
caller_ = CreatePeerConnectionWrapper(
"Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
- nullptr, /*media_transport_factory=*/nullptr,
+ nullptr,
/*reset_encoder_factory=*/!caller_to_callee,
/*reset_decoder_factory=*/caller_to_callee);
callee_ = CreatePeerConnectionWrapper(
"Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
- nullptr, /*media_transport_factory=*/nullptr,
+ nullptr,
/*reset_encoder_factory=*/caller_to_callee,
/*reset_decoder_factory=*/!caller_to_callee);
return caller_ && callee_;
@@ -1551,10 +1523,6 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
rtc::VirtualSocketServer* virtual_socket_server() { return ss_.get(); }
- webrtc::MediaTransportPair* loopback_media_transports() {
- return &loopback_media_transports_;
- }
-
PeerConnectionWrapper* caller() { return caller_.get(); }
// Set the |caller_| to the |wrapper| passed in and return the
@@ -1752,7 +1720,6 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
// on the network thread.
std::vector<std::unique_ptr<cricket::TestTurnServer>> turn_servers_;
std::vector<std::unique_ptr<cricket::TestTurnCustomizer>> turn_customizers_;
- webrtc::MediaTransportPair loopback_media_transports_;
std::unique_ptr<PeerConnectionWrapper> caller_;
std::unique_ptr<PeerConnectionWrapper> callee_;
};
@@ -3875,877 +3842,8 @@ TEST_P(PeerConnectionIntegrationTest,
kDefaultTimeout);
}
-// Tests that the datagram transport to SCTP fallback works correctly when
-// datagram transport negotiation fails.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelFallbackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, RTCConfiguration(),
- loopback_media_transports()->first_factory(), nullptr));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Negotiation should fallback to SCTP, allowing the data channel to be
- // established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that the data channel transport works correctly when datagram transport
-// negotiation succeeds and does not fall back to SCTP.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelDoesNotFallbackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Negotiation should succeed, allowing the data channel to be established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that the datagram transport to SCTP fallback works correctly when
-// datagram transports do not advertise compatible transport parameters.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportIncompatibleParametersFallsBackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // By default, only equal parameters are compatible.
- loopback_media_transports()->SetFirstDatagramTransportParameters("foo");
- loopback_media_transports()->SetSecondDatagramTransportParameters("bar");
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Negotiation should fallback to SCTP, allowing the data channel to be
- // established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use SCTP for data channels.
- EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that the datagram transport to SCTP fallback works correctly when
-// only the answerer believes datagram transport parameters are incompatible.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportIncompatibleParametersOnAnswererFallsBackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // By default, only equal parameters are compatible.
- loopback_media_transports()->SetFirstDatagramTransportParameters("foo");
- loopback_media_transports()->SetSecondDatagramTransportParameters("bar");
-
- // Set the offerer to accept different parameters, while the answerer rejects
- // them.
- loopback_media_transports()->SetFirstDatagramTransportParametersComparison(
- [](absl::string_view a, absl::string_view b) { return true; });
- loopback_media_transports()->SetSecondDatagramTransportParametersComparison(
- [](absl::string_view a, absl::string_view b) { return false; });
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Negotiation should fallback to SCTP, allowing the data channel to be
- // established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use SCTP for data channels.
- EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that the data channel transport works correctly when datagram
-// transports provide different, but compatible, transport parameters.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportCompatibleParametersDoNotFallbackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // By default, only equal parameters are compatible.
- loopback_media_transports()->SetFirstDatagramTransportParameters("foo");
- loopback_media_transports()->SetSecondDatagramTransportParameters("bar");
-
- // Change the comparison used to treat these transport parameters are
- // compatible (on both sides).
- loopback_media_transports()->SetFirstDatagramTransportParametersComparison(
- [](absl::string_view a, absl::string_view b) { return true; });
- loopback_media_transports()->SetSecondDatagramTransportParametersComparison(
- [](absl::string_view a, absl::string_view b) { return true; });
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Negotiation should succeed, allowing the data channel to be established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use datagram transport for data channels.
- EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelWithMediaOnCaller) {
- // Configure the caller to attempt use of datagram transport for media and
- // data channels.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
- offerer_config.use_datagram_transport = true;
-
- // Configure the callee to only use datagram transport for data channels.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport_for_data_channels = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use datagram transport for data channels.
- EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportMediaWithDataChannelOnCaller) {
- // Configure the caller to attempt use of datagram transport for media and
- // data channels.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
- offerer_config.use_datagram_transport = true;
-
- // Configure the callee to only use datagram transport for media.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use SCTP for data channels.
- EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelWithMediaOnCallee) {
- // Configure the caller to attempt use of datagram transport for data
- // channels.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
-
- // Configure the callee to use datagram transport for data channels and media.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport_for_data_channels = true;
- answerer_config.use_datagram_transport = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use datagram transport for data channels.
- EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportMediaWithDataChannelOnCallee) {
- // Configure the caller to attempt use of datagram transport for media.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport = true;
-
- // Configure the callee to only use datagram transport for media and data
- // channels.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport = true;
- answerer_config.use_datagram_transport_for_data_channels = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use SCTP for data channels.
- EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelAndMedia) {
- // Configure the caller to use datagram transport for data channels and media.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
- offerer_config.use_datagram_transport = true;
-
- // Configure the callee to use datagram transport for data channels and media.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport_for_data_channels = true;
- answerer_config.use_datagram_transport = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use datagram transport for data channels.
- EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that data channels use SCTP instead of datagram transport if datagram
-// transport is configured in receive-only mode on the caller.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelReceiveOnlyOnCallerUsesSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.use_datagram_transport_for_data_channels_receive_only = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller should offer a data channel using SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // SCTP transports should be present, since they are in use.
- EXPECT_NE(caller()->pc()->GetSctpTransport(), nullptr);
- EXPECT_NE(callee()->pc()->GetSctpTransport(), nullptr);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-}
-
#endif // HAVE_SCTP
-// Tests that a callee configured for receive-only use of datagram transport
-// data channels accepts them on incoming calls.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelReceiveOnlyOnCallee) {
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
-
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport_for_data_channels = true;
- answerer_config.use_datagram_transport_for_data_channels_receive_only = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // SCTP transports should not be present, since datagram transport is used.
- EXPECT_EQ(caller()->pc()->GetSctpTransport(), nullptr);
- EXPECT_EQ(callee()->pc()->GetSctpTransport(), nullptr);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-}
-
-// This test sets up a call between two parties with a datagram transport data
-// channel.
-TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelEndToEnd) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.enable_dtls_srtp = false;
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Expect that data channel created on caller side will show up for callee as
- // well.
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Caller data channel should already exist (it created one). Callee data
- // channel may not exist yet, since negotiation happens in-band, not in SDP.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-}
-
-// Tests that 'zero-rtt' data channel transports (which are ready-to-send as
-// soon as they're created) work correctly.
-TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelZeroRtt) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.enable_dtls_srtp = false; // SDES is required for media transport.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Ensure that the callee's media transport is ready-to-send immediately.
- // Note that only the callee can become writable in zero RTTs. The caller
- // must wait for the callee's answer.
- loopback_media_transports()->SetSecondStateAfterConnect(
- webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Expect that data channel created on caller side will show up for callee as
- // well.
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- loopback_media_transports()->SetFirstState(
- webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Caller data channel should already exist (it created one). Callee data
- // channel may not exist yet, since negotiation happens in-band, not in SDP.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-}
-
-// Ensures that when the callee closes a datagram transport data channel, the
-// closing procedure results in the data channel being closed for the caller
-// as well.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelCalleeCloses) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.enable_dtls_srtp = false;
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Create a data channel on the caller and signal it to the callee.
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Data channels exist and open on both ends of the connection.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Close the data channel on the callee side, and wait for it to reach the
- // "closed" state on both sides.
- callee()->data_channel()->Close();
- EXPECT_TRUE_WAIT(!caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout);
-}
-
-// Tests that datagram transport data channels can do in-band negotiation.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelConfigSentToOtherSide) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.enable_dtls_srtp = false;
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Create a data channel with a non-default configuration and signal it to the
- // callee.
- webrtc::DataChannelInit init;
- init.id = 53;
- init.maxRetransmits = 52;
- caller()->CreateDataChannel("data-channel", &init);
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Ensure that the data channel exists on the callee with the correct
- // configuration.
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
- // Since "negotiate" is false, the "id" parameter is ignored.
- EXPECT_NE(init.id, callee()->data_channel()->id());
- EXPECT_EQ("data-channel", callee()->data_channel()->label());
- EXPECT_EQ(init.maxRetransmits, callee()->data_channel()->maxRetransmits());
- EXPECT_FALSE(callee()->data_channel()->negotiated());
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelRejectedWithNoFallback) {
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
- // Disabling DTLS precludes a fallback to SCTP.
- offerer_config.enable_dtls_srtp = false;
-
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- // Both endpoints must disable DTLS or SetRemoteDescription will fail.
- answerer_config.enable_dtls_srtp = false;
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(), nullptr));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Caller data channel should already exist (it created one). Callee data
- // channel should not exist, since negotiation happens in-band, not in SDP.
- EXPECT_NE(nullptr, caller()->data_channel());
- EXPECT_EQ(nullptr, callee()->data_channel());
-
- // The caller's data channel should close when the datagram transport is
- // rejected.
- EXPECT_FALSE(caller()->data_observer()->IsOpen());
-
- // Media flow should not be impacted by the failed data channel.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
// Test that the ICE connection and gathering states eventually reach
// "complete".
TEST_P(PeerConnectionIntegrationTest, IceStatesReachCompletion) {
@@ -5702,10 +4800,10 @@ TEST_P(PeerConnectionIntegrationTest, IceTransportFactoryUsedForConnections) {
auto ice_transport_factory = std::make_unique<MockIceTransportFactory>();
EXPECT_CALL(*ice_transport_factory, RecordIceTransportCreated()).Times(1);
dependencies.ice_transport_factory = std::move(ice_transport_factory);
- auto wrapper = CreatePeerConnectionWrapper(
- "Caller", nullptr, &default_config, std::move(dependencies), nullptr,
- nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
+ auto wrapper = CreatePeerConnectionWrapper("Caller", nullptr, &default_config,
+ std::move(dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
ASSERT_TRUE(wrapper);
wrapper->CreateDataChannel();
rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
@@ -6134,6 +5232,23 @@ TEST_P(PeerConnectionIntegrationTest, RegatherAfterChangingIceTransportType) {
callee()->pc()->SetConfiguration(callee_config);
EXPECT_EQ_WAIT(cricket::LOCAL_PORT_TYPE,
callee()->last_candidate_gathered().type(), kDefaultTimeout);
+
+ // Create an offer and verify that it does not contain an ICE restart (i.e new
+ // ice credentials).
+ std::string caller_ufrag_pre_offer = caller()
+ ->pc()
+ ->local_description()
+ ->description()
+ ->transport_infos()[0]
+ .description.ice_ufrag;
+ caller()->CreateAndSetAndSignalOffer();
+ std::string caller_ufrag_post_offer = caller()
+ ->pc()
+ ->local_description()
+ ->description()
+ ->transport_infos()[0]
+ .description.ice_ufrag;
+ EXPECT_EQ(caller_ufrag_pre_offer, caller_ufrag_post_offer);
}
TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) {
@@ -6173,6 +5288,35 @@ TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) {
EXPECT_NE(caller()->error_event().address, "");
}
+TEST_P(PeerConnectionIntegrationTest, OnIceCandidateErrorWithEmptyAddress) {
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back("turn:127.0.0.1:3478?transport=tcp");
+ ice_server.username = "test";
+ ice_server.password = "test";
+
+ PeerConnectionInterface::RTCConfiguration caller_config;
+ caller_config.servers.push_back(ice_server);
+ caller_config.type = webrtc::PeerConnectionInterface::kRelay;
+ caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+
+ PeerConnectionInterface::RTCConfiguration callee_config;
+ callee_config.servers.push_back(ice_server);
+ callee_config.type = webrtc::PeerConnectionInterface::kRelay;
+ callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithConfig(caller_config, callee_config));
+
+ // Do normal offer/answer and wait for ICE to complete.
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(701, caller()->error_event().error_code, kDefaultTimeout);
+ EXPECT_EQ(caller()->error_event().address, "");
+}
+
TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
AudioKeepsFlowingAfterImplicitRollback) {
PeerConnectionInterface::RTCConfiguration config;
diff --git a/pc/peer_connection_interface_unittest.cc b/pc/peer_connection_interface_unittest.cc
index 2a36970597..901e5c572c 100644
--- a/pc/peer_connection_interface_unittest.cc
+++ b/pc/peer_connection_interface_unittest.cc
@@ -627,7 +627,7 @@ class MockTrackObserver : public ObserverInterface {
}
}
- MOCK_METHOD0(OnChanged, void());
+ MOCK_METHOD(void, OnChanged, (), (override));
private:
NotifierInterface* notifier_;
@@ -1421,15 +1421,11 @@ TEST_P(PeerConnectionInterfaceTest, GetConfigurationAfterSetConfiguration) {
PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration();
config.type = PeerConnectionInterface::kRelay;
- config.use_datagram_transport = true;
- config.use_datagram_transport_for_data_channels = true;
EXPECT_TRUE(pc_->SetConfiguration(config).ok());
PeerConnectionInterface::RTCConfiguration returned_config =
pc_->GetConfiguration();
EXPECT_EQ(PeerConnectionInterface::kRelay, returned_config.type);
- EXPECT_TRUE(returned_config.use_datagram_transport);
- EXPECT_TRUE(returned_config.use_datagram_transport_for_data_channels);
}
TEST_P(PeerConnectionInterfaceTest, SetConfigurationFailsAfterClose) {
@@ -3619,44 +3615,44 @@ TEST_P(PeerConnectionInterfaceTest,
TEST_P(PeerConnectionInterfaceTest, SetBitrateWithoutMinSucceeds) {
CreatePeerConnection();
- PeerConnectionInterface::BitrateParameters bitrate;
- bitrate.current_bitrate_bps = 100000;
+ BitrateSettings bitrate;
+ bitrate.start_bitrate_bps = 100000;
EXPECT_TRUE(pc_->SetBitrate(bitrate).ok());
}
TEST_P(PeerConnectionInterfaceTest, SetBitrateNegativeMinFails) {
CreatePeerConnection();
- PeerConnectionInterface::BitrateParameters bitrate;
+ BitrateSettings bitrate;
bitrate.min_bitrate_bps = -1;
EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
}
TEST_P(PeerConnectionInterfaceTest, SetBitrateCurrentLessThanMinFails) {
CreatePeerConnection();
- PeerConnectionInterface::BitrateParameters bitrate;
+ BitrateSettings bitrate;
bitrate.min_bitrate_bps = 5;
- bitrate.current_bitrate_bps = 3;
+ bitrate.start_bitrate_bps = 3;
EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
}
TEST_P(PeerConnectionInterfaceTest, SetBitrateCurrentNegativeFails) {
CreatePeerConnection();
- PeerConnectionInterface::BitrateParameters bitrate;
- bitrate.current_bitrate_bps = -1;
+ BitrateSettings bitrate;
+ bitrate.start_bitrate_bps = -1;
EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
}
TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxLessThanCurrentFails) {
CreatePeerConnection();
- PeerConnectionInterface::BitrateParameters bitrate;
- bitrate.current_bitrate_bps = 10;
+ BitrateSettings bitrate;
+ bitrate.start_bitrate_bps = 10;
bitrate.max_bitrate_bps = 8;
EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
}
TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxLessThanMinFails) {
CreatePeerConnection();
- PeerConnectionInterface::BitrateParameters bitrate;
+ BitrateSettings bitrate;
bitrate.min_bitrate_bps = 10;
bitrate.max_bitrate_bps = 8;
EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
@@ -3664,7 +3660,7 @@ TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxLessThanMinFails) {
TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxNegativeFails) {
CreatePeerConnection();
- PeerConnectionInterface::BitrateParameters bitrate;
+ BitrateSettings bitrate;
bitrate.max_bitrate_bps = -1;
EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
}
@@ -3675,8 +3671,8 @@ TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxNegativeFails) {
// be clamped succeeds.
TEST_P(PeerConnectionInterfaceTest, SetBitrateCurrentLessThanImplicitMin) {
CreatePeerConnection();
- PeerConnectionInterface::BitrateParameters bitrate;
- bitrate.current_bitrate_bps = 1;
+ BitrateSettings bitrate;
+ bitrate.start_bitrate_bps = 1;
EXPECT_TRUE(pc_->SetBitrate(bitrate).ok());
}
diff --git a/pc/peer_connection_internal.h b/pc/peer_connection_internal.h
index 52ffe85c2c..1a78ed204b 100644
--- a/pc/peer_connection_internal.h
+++ b/pc/peer_connection_internal.h
@@ -19,8 +19,9 @@
#include "api/peer_connection_interface.h"
#include "call/call.h"
-#include "pc/data_channel.h"
+#include "pc/rtp_data_channel.h"
#include "pc/rtp_transceiver.h"
+#include "pc/sctp_data_channel.h"
namespace webrtc {
@@ -41,13 +42,18 @@ class PeerConnectionInternal : public PeerConnectionInterface {
rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
GetTransceiversInternal() const = 0;
- virtual sigslot::signal1<DataChannel*>& SignalDataChannelCreated() = 0;
+ virtual sigslot::signal1<RtpDataChannel*>& SignalRtpDataChannelCreated() = 0;
+ virtual sigslot::signal1<SctpDataChannel*>&
+ SignalSctpDataChannelCreated() = 0;
// Only valid when using deprecated RTP data channels.
virtual cricket::RtpDataChannel* rtp_data_channel() const = 0;
- virtual std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels()
- const = 0;
+ // Call on the network thread to fetch stats for all the data channels.
+ // TODO(tommi): Make pure virtual after downstream updates.
+ virtual std::vector<DataChannelStats> GetDataChannelStats() const {
+ return {};
+ }
virtual absl::optional<std::string> sctp_transport_name() const = 0;
diff --git a/pc/peer_connection_media_unittest.cc b/pc/peer_connection_media_unittest.cc
index c9ffd776d9..3c117c3ecd 100644
--- a/pc/peer_connection_media_unittest.cc
+++ b/pc/peer_connection_media_unittest.cc
@@ -825,8 +825,10 @@ TEST_P(PeerConnectionMediaTest, AnswerHasDifferentDirectionsForAudioVideo) {
}
void AddComfortNoiseCodecsToSend(cricket::FakeMediaEngine* media_engine) {
- const cricket::AudioCodec kComfortNoiseCodec8k(102, "CN", 8000, 0, 1);
- const cricket::AudioCodec kComfortNoiseCodec16k(103, "CN", 16000, 0, 1);
+ const cricket::AudioCodec kComfortNoiseCodec8k(102, cricket::kCnCodecName,
+ 8000, 0, 1);
+ const cricket::AudioCodec kComfortNoiseCodec16k(103, cricket::kCnCodecName,
+ 16000, 0, 1);
auto codecs = media_engine->voice().send_codecs();
codecs.push_back(kComfortNoiseCodec8k);
@@ -837,7 +839,7 @@ void AddComfortNoiseCodecsToSend(cricket::FakeMediaEngine* media_engine) {
bool HasAnyComfortNoiseCodecs(const cricket::SessionDescription* desc) {
const auto* audio_desc = cricket::GetFirstAudioContentDescription(desc);
for (const auto& codec : audio_desc->codecs()) {
- if (codec.name == "CN") {
+ if (codec.name == cricket::kCnCodecName) {
return true;
}
}
@@ -1118,10 +1120,11 @@ TEST_P(PeerConnectionMediaTest, MediaEngineErrorPropagatedToClients) {
std::string error;
ASSERT_FALSE(caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal(),
&error));
- EXPECT_EQ(
- "Failed to set remote answer sdp: Failed to set remote video description "
- "send parameters.",
- error);
+ EXPECT_EQ(std::string("Failed to set remote answer sdp: Failed to set remote "
+ "video description "
+ "send parameters for m-section with mid='") +
+ (IsUnifiedPlan() ? "1" : "video") + "'.",
+ error);
}
// Tests that if the underlying video encoder fails once then subsequent
diff --git a/pc/proxy_unittest.cc b/pc/proxy_unittest.cc
index a00b47ff6b..500828a03e 100644
--- a/pc/proxy_unittest.cc
+++ b/pc/proxy_unittest.cc
@@ -46,16 +46,16 @@ class Fake : public FakeInterface {
return new rtc::RefCountedObject<Fake>();
}
// Used to verify destructor is called on the correct thread.
- MOCK_METHOD0(Destroy, void());
+ MOCK_METHOD(void, Destroy, ());
- MOCK_METHOD0(VoidMethod0, void());
- MOCK_METHOD0(Method0, std::string());
- MOCK_CONST_METHOD0(ConstMethod0, std::string());
+ MOCK_METHOD(void, VoidMethod0, (), (override));
+ MOCK_METHOD(std::string, Method0, (), (override));
+ MOCK_METHOD(std::string, ConstMethod0, (), (const, override));
- MOCK_METHOD1(Method1, std::string(std::string));
- MOCK_CONST_METHOD1(ConstMethod1, std::string(std::string));
+ MOCK_METHOD(std::string, Method1, (std::string), (override));
+ MOCK_METHOD(std::string, ConstMethod1, (std::string), (const, override));
- MOCK_METHOD2(Method2, std::string(std::string, std::string));
+ MOCK_METHOD(std::string, Method2, (std::string, std::string), (override));
protected:
Fake() {}
@@ -266,7 +266,7 @@ class FooInterface {
class Foo : public FooInterface {
public:
Foo() {}
- MOCK_METHOD0(Bar, void());
+ MOCK_METHOD(void, Bar, (), (override));
};
BEGIN_OWNED_PROXY_MAP(Foo)
diff --git a/pc/remote_audio_source.cc b/pc/remote_audio_source.cc
index da00402e41..301cd3fb5b 100644
--- a/pc/remote_audio_source.cc
+++ b/pc/remote_audio_source.cc
@@ -127,7 +127,7 @@ void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) {
return;
}
- rtc::CritScope lock(&sink_lock_);
+ MutexLock lock(&sink_lock_);
RTC_DCHECK(!absl::c_linear_search(sinks_, sink));
sinks_.push_back(sink);
}
@@ -136,13 +136,13 @@ void RemoteAudioSource::RemoveSink(AudioTrackSinkInterface* sink) {
RTC_DCHECK(main_thread_->IsCurrent());
RTC_DCHECK(sink);
- rtc::CritScope lock(&sink_lock_);
+ MutexLock lock(&sink_lock_);
sinks_.remove(sink);
}
void RemoteAudioSource::OnData(const AudioSinkInterface::Data& audio) {
// Called on the externally-owned audio callback thread, via/from webrtc.
- rtc::CritScope lock(&sink_lock_);
+ MutexLock lock(&sink_lock_);
for (auto* sink : sinks_) {
// When peerconnection acts as an audio source, it should not provide
// absolute capture timestamp.
diff --git a/pc/remote_audio_source.h b/pc/remote_audio_source.h
index 15dc75b511..9ec09165cf 100644
--- a/pc/remote_audio_source.h
+++ b/pc/remote_audio_source.h
@@ -18,8 +18,8 @@
#include "api/call/audio_sink.h"
#include "api/notifier.h"
#include "pc/channel.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/message_handler.h"
+#include "rtc_base/synchronization/mutex.h"
namespace rtc {
struct Message;
@@ -69,7 +69,7 @@ class RemoteAudioSource : public Notifier<AudioSourceInterface>,
rtc::Thread* const main_thread_;
rtc::Thread* const worker_thread_;
std::list<AudioObserver*> audio_observers_;
- rtc::CriticalSection sink_lock_;
+ Mutex sink_lock_;
std::list<AudioTrackSinkInterface*> sinks_;
SourceState state_;
};
diff --git a/pc/rtc_stats_collector.cc b/pc/rtc_stats_collector.cc
index 0e2f170ff0..5851b0680e 100644
--- a/pc/rtc_stats_collector.cc
+++ b/pc/rtc_stats_collector.cc
@@ -291,6 +291,24 @@ void SetInboundRTPStreamStatsFromVoiceReceiverInfo(
}
inbound_audio->jitter = static_cast<double>(voice_receiver_info.jitter_ms) /
rtc::kNumMillisecsPerSec;
+ inbound_audio->jitter_buffer_delay =
+ voice_receiver_info.jitter_buffer_delay_seconds;
+ inbound_audio->jitter_buffer_emitted_count =
+ voice_receiver_info.jitter_buffer_emitted_count;
+ inbound_audio->total_samples_received =
+ voice_receiver_info.total_samples_received;
+ inbound_audio->concealed_samples = voice_receiver_info.concealed_samples;
+ inbound_audio->silent_concealed_samples =
+ voice_receiver_info.silent_concealed_samples;
+ inbound_audio->concealment_events = voice_receiver_info.concealment_events;
+ inbound_audio->inserted_samples_for_deceleration =
+ voice_receiver_info.inserted_samples_for_deceleration;
+ inbound_audio->removed_samples_for_acceleration =
+ voice_receiver_info.removed_samples_for_acceleration;
+ inbound_audio->audio_level = voice_receiver_info.audio_level;
+ inbound_audio->total_audio_energy = voice_receiver_info.total_output_energy;
+ inbound_audio->total_samples_duration =
+ voice_receiver_info.total_output_duration;
// |fir_count|, |pli_count| and |sli_count| are only valid for video and are
// purposefully left undefined for audio.
if (voice_receiver_info.last_packet_received_timestamp_ms) {
@@ -327,8 +345,21 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo(
static_cast<uint32_t>(video_receiver_info.plis_sent);
inbound_video->nack_count =
static_cast<uint32_t>(video_receiver_info.nacks_sent);
+ inbound_video->frames_received = video_receiver_info.frames_received;
inbound_video->frames_decoded = video_receiver_info.frames_decoded;
+ inbound_video->frames_dropped = video_receiver_info.frames_dropped;
inbound_video->key_frames_decoded = video_receiver_info.key_frames_decoded;
+ if (video_receiver_info.frame_width > 0) {
+ inbound_video->frame_width =
+ static_cast<uint32_t>(video_receiver_info.frame_width);
+ }
+ if (video_receiver_info.frame_height > 0) {
+ inbound_video->frame_height =
+ static_cast<uint32_t>(video_receiver_info.frame_height);
+ }
+ if (video_receiver_info.framerate_rcvd > 0) {
+ inbound_video->frames_per_second = video_receiver_info.framerate_rcvd;
+ }
if (video_receiver_info.qp_sum)
inbound_video->qp_sum = *video_receiver_info.qp_sum;
inbound_video->total_decode_time =
@@ -397,7 +428,6 @@ void SetOutboundRTPStreamStatsFromVoiceSenderInfo(
void SetOutboundRTPStreamStatsFromVideoSenderInfo(
const std::string& mid,
const cricket::VideoSenderInfo& video_sender_info,
- bool enable_simulcast_stats,
RTCOutboundRTPStreamStats* outbound_video) {
SetOutboundRTPStreamStatsFromMediaSenderInfo(video_sender_info,
outbound_video);
@@ -422,21 +452,19 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo(
rtc::kNumMillisecsPerSec;
outbound_video->total_encoded_bytes_target =
video_sender_info.total_encoded_bytes_target;
- if (enable_simulcast_stats) {
- if (video_sender_info.send_frame_width > 0) {
- outbound_video->frame_width =
- static_cast<uint32_t>(video_sender_info.send_frame_width);
- }
- if (video_sender_info.send_frame_height > 0) {
- outbound_video->frame_height =
- static_cast<uint32_t>(video_sender_info.send_frame_height);
- }
- if (video_sender_info.framerate_sent > 0) {
- outbound_video->frames_per_second = video_sender_info.framerate_sent;
- }
- outbound_video->frames_sent = video_sender_info.frames_sent;
- outbound_video->huge_frames_sent = video_sender_info.huge_frames_sent;
+ if (video_sender_info.send_frame_width > 0) {
+ outbound_video->frame_width =
+ static_cast<uint32_t>(video_sender_info.send_frame_width);
}
+ if (video_sender_info.send_frame_height > 0) {
+ outbound_video->frame_height =
+ static_cast<uint32_t>(video_sender_info.send_frame_height);
+ }
+ if (video_sender_info.framerate_sent > 0) {
+ outbound_video->frames_per_second = video_sender_info.framerate_sent;
+ }
+ outbound_video->frames_sent = video_sender_info.frames_sent;
+ outbound_video->huge_frames_sent = video_sender_info.huge_frames_sent;
outbound_video->total_packet_send_delay =
static_cast<double>(video_sender_info.total_packet_send_delay_ms) /
rtc::kNumMillisecsPerSec;
@@ -462,7 +490,7 @@ std::unique_ptr<RTCRemoteInboundRtpStreamStats>
ProduceRemoteInboundRtpStreamStatsFromReportBlockData(
const ReportBlockData& report_block_data,
cricket::MediaType media_type,
- std::map<std::string, RTCOutboundRTPStreamStats*> outbound_rtps,
+ const std::map<std::string, RTCOutboundRTPStreamStats*>& outbound_rtps,
const RTCStatsReport& report) {
const auto& report_block = report_block_data.report_block();
// RTCStats' timestamp generally refers to when the metric was sampled, but
@@ -985,9 +1013,10 @@ RTCStatsCollector::RTCStatsCollector(PeerConnectionInternal* pc,
RTC_DCHECK(worker_thread_);
RTC_DCHECK(network_thread_);
RTC_DCHECK_GE(cache_lifetime_us_, 0);
- pc_->SignalDataChannelCreated().connect(
- this, &RTCStatsCollector::OnDataChannelCreated);
- enable_simulcast_stats_ = pc_->GetConfiguration().enable_simulcast_stats;
+ pc_->SignalRtpDataChannelCreated().connect(
+ this, &RTCStatsCollector::OnRtpDataChannelCreated);
+ pc_->SignalSctpDataChannelCreated().connect(
+ this, &RTCStatsCollector::OnSctpDataChannelCreated);
}
RTCStatsCollector::~RTCStatsCollector() {
@@ -1044,7 +1073,7 @@ void RTCStatsCollector::GetStatsReportInternal(
// Prepare |transceiver_stats_infos_| for use in
// |ProducePartialResultsOnNetworkThread| and
// |ProducePartialResultsOnSignalingThread|.
- transceiver_stats_infos_ = PrepareTransceiverStatsInfos_s();
+ transceiver_stats_infos_ = PrepareTransceiverStatsInfos_s_w();
// Prepare |transport_names_| for use in
// |ProducePartialResultsOnNetworkThread|.
transport_names_ = PrepareTransportNames_s();
@@ -1053,6 +1082,10 @@ void RTCStatsCollector::GetStatsReportInternal(
// thread.
// TODO(holmer): To avoid the hop we could move BWE and BWE stats to the
// network thread, where it more naturally belongs.
+ // TODO(https://crbug.com/webrtc/11767): In the meantime we can piggyback on
+ // the blocking-invoke that is already performed in
+ // PrepareTransceiverStatsInfos_s_w() so that we can call GetCallStats()
+ // without additional blocking-invokes.
call_stats_ = pc_->GetCallStats();
// Don't touch |network_report_| on the signaling thread until
@@ -1082,6 +1115,8 @@ void RTCStatsCollector::WaitForPendingRequest() {
void RTCStatsCollector::ProducePartialResultsOnSignalingThread(
int64_t timestamp_us) {
RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
partial_report_ = RTCStatsReport::Create(timestamp_us);
ProducePartialResultsOnSignalingThreadImpl(timestamp_us,
@@ -1099,6 +1134,8 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl(
int64_t timestamp_us,
RTCStatsReport* partial_report) {
RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
ProduceDataChannelStats_s(timestamp_us, partial_report);
ProduceMediaStreamStats_s(timestamp_us, partial_report);
ProduceMediaStreamTrackStats_s(timestamp_us, partial_report);
@@ -1109,6 +1146,8 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl(
void RTCStatsCollector::ProducePartialResultsOnNetworkThread(
int64_t timestamp_us) {
RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
// Touching |network_report_| on this thread is safe by this method because
// |network_report_event_| is reset before this method is invoked.
network_report_ = RTCStatsReport::Create(timestamp_us);
@@ -1136,6 +1175,8 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl(
const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
RTCStatsReport* partial_report) {
RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
ProduceCertificateStats_n(timestamp_us, transport_cert_stats, partial_report);
ProduceCodecStats_n(timestamp_us, transceiver_stats_infos_, partial_report);
ProduceIceCandidateAndPairStats_n(timestamp_us, transport_stats_by_name,
@@ -1222,6 +1263,8 @@ void RTCStatsCollector::ProduceCertificateStats_n(
const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
RTCStatsReport* report) const {
RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
for (const auto& transport_cert_stats_pair : transport_cert_stats) {
if (transport_cert_stats_pair.second.local) {
ProduceCertificateStatsFromSSLCertificateStats(
@@ -1239,6 +1282,8 @@ void RTCStatsCollector::ProduceCodecStats_n(
const std::vector<RtpTransceiverStatsInfo>& transceiver_stats_infos,
RTCStatsReport* report) const {
RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
for (const auto& stats : transceiver_stats_infos) {
if (!stats.mid) {
continue;
@@ -1279,22 +1324,22 @@ void RTCStatsCollector::ProduceCodecStats_n(
void RTCStatsCollector::ProduceDataChannelStats_s(
int64_t timestamp_us,
RTCStatsReport* report) const {
- RTC_DCHECK(signaling_thread_->IsCurrent());
- for (const rtc::scoped_refptr<DataChannel>& data_channel :
- pc_->sctp_data_channels()) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ std::vector<DataChannelStats> data_stats = pc_->GetDataChannelStats();
+ for (const auto& stats : data_stats) {
std::unique_ptr<RTCDataChannelStats> data_channel_stats(
new RTCDataChannelStats(
- "RTCDataChannel_" + rtc::ToString(data_channel->internal_id()),
+ "RTCDataChannel_" + rtc::ToString(stats.internal_id),
timestamp_us));
- data_channel_stats->label = data_channel->label();
- data_channel_stats->protocol = data_channel->protocol();
- data_channel_stats->datachannelid = data_channel->id();
- data_channel_stats->state =
- DataStateToRTCDataChannelState(data_channel->state());
- data_channel_stats->messages_sent = data_channel->messages_sent();
- data_channel_stats->bytes_sent = data_channel->bytes_sent();
- data_channel_stats->messages_received = data_channel->messages_received();
- data_channel_stats->bytes_received = data_channel->bytes_received();
+ data_channel_stats->label = std::move(stats.label);
+ data_channel_stats->protocol = std::move(stats.protocol);
+ data_channel_stats->data_channel_identifier = stats.id;
+ data_channel_stats->state = DataStateToRTCDataChannelState(stats.state);
+ data_channel_stats->messages_sent = stats.messages_sent;
+ data_channel_stats->bytes_sent = stats.bytes_sent;
+ data_channel_stats->messages_received = stats.messages_received;
+ data_channel_stats->bytes_received = stats.bytes_received;
report->AddStats(std::move(data_channel_stats));
}
}
@@ -1306,6 +1351,8 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n(
const Call::Stats& call_stats,
RTCStatsReport* report) const {
RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
for (const auto& entry : transport_stats_by_name) {
const std::string& transport_name = entry.first;
const cricket::TransportStats& transport_stats = entry.second;
@@ -1386,6 +1433,7 @@ void RTCStatsCollector::ProduceMediaStreamStats_s(
int64_t timestamp_us,
RTCStatsReport* report) const {
RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
std::map<std::string, std::vector<std::string>> track_ids;
@@ -1422,6 +1470,8 @@ void RTCStatsCollector::ProduceMediaStreamTrackStats_s(
int64_t timestamp_us,
RTCStatsReport* report) const {
RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos_) {
std::vector<rtc::scoped_refptr<RtpSenderInternal>> senders;
for (const auto& sender : stats.transceiver->senders()) {
@@ -1443,6 +1493,8 @@ void RTCStatsCollector::ProduceMediaSourceStats_s(
int64_t timestamp_us,
RTCStatsReport* report) const {
RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
for (const RtpTransceiverStatsInfo& transceiver_stats_info :
transceiver_stats_infos_) {
const auto& track_media_info_map =
@@ -1524,6 +1576,8 @@ void RTCStatsCollector::ProducePeerConnectionStats_s(
int64_t timestamp_us,
RTCStatsReport* report) const {
RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
std::unique_ptr<RTCPeerConnectionStats> stats(
new RTCPeerConnectionStats("RTCPeerConnection", timestamp_us));
stats->data_channels_opened = internal_record_.data_channels_opened;
@@ -1536,6 +1590,7 @@ void RTCStatsCollector::ProduceRTPStreamStats_n(
const std::vector<RtpTransceiverStatsInfo>& transceiver_stats_infos,
RTCStatsReport* report) const {
RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos) {
if (stats.media_type == cricket::MEDIA_TYPE_AUDIO) {
@@ -1552,6 +1607,9 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n(
int64_t timestamp_us,
const RtpTransceiverStatsInfo& stats,
RTCStatsReport* report) const {
+ RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
if (!stats.mid || !stats.transport_name) {
return;
}
@@ -1620,8 +1678,8 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n(
track_media_info_map.voice_media_info()->senders) {
for (const auto& report_block_data : voice_sender_info.report_block_datas) {
report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData(
- report_block_data, cricket::MEDIA_TYPE_AUDIO,
- std::move(audio_outbound_rtps), *report));
+ report_block_data, cricket::MEDIA_TYPE_AUDIO, audio_outbound_rtps,
+ *report));
}
}
}
@@ -1630,6 +1688,9 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n(
int64_t timestamp_us,
const RtpTransceiverStatsInfo& stats,
RTCStatsReport* report) const {
+ RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
if (!stats.mid || !stats.transport_name) {
return;
}
@@ -1663,16 +1724,14 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n(
// Outbound
std::map<std::string, RTCOutboundRTPStreamStats*> video_outbound_rtps;
for (const cricket::VideoSenderInfo& video_sender_info :
- enable_simulcast_stats_
- ? track_media_info_map.video_media_info()->senders
- : track_media_info_map.video_media_info()->aggregated_senders) {
+ track_media_info_map.video_media_info()->senders) {
if (!video_sender_info.connected())
continue;
auto outbound_video = std::make_unique<RTCOutboundRTPStreamStats>(
RTCOutboundRTPStreamStatsIDFromSSRC(false, video_sender_info.ssrc()),
timestamp_us);
- SetOutboundRTPStreamStatsFromVideoSenderInfo(
- mid, video_sender_info, enable_simulcast_stats_, outbound_video.get());
+ SetOutboundRTPStreamStatsFromVideoSenderInfo(mid, video_sender_info,
+ outbound_video.get());
rtc::scoped_refptr<VideoTrackInterface> video_track =
track_media_info_map.GetVideoTrack(video_sender_info);
if (video_track) {
@@ -1699,8 +1758,8 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n(
track_media_info_map.video_media_info()->senders) {
for (const auto& report_block_data : video_sender_info.report_block_datas) {
report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData(
- report_block_data, cricket::MEDIA_TYPE_VIDEO,
- std::move(video_outbound_rtps), *report));
+ report_block_data, cricket::MEDIA_TYPE_VIDEO, video_outbound_rtps,
+ *report));
}
}
}
@@ -1712,6 +1771,8 @@ void RTCStatsCollector::ProduceTransportStats_n(
const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
RTCStatsReport* report) const {
RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
for (const auto& entry : transport_stats_by_name) {
const std::string& transport_name = entry.first;
const cricket::TransportStats& transport_stats = entry.second;
@@ -1751,7 +1812,9 @@ void RTCStatsCollector::ProduceTransportStats_n(
transport_name, channel_stats.component),
timestamp_us));
transport_stats->bytes_sent = 0;
+ transport_stats->packets_sent = 0;
transport_stats->bytes_received = 0;
+ transport_stats->packets_received = 0;
transport_stats->dtls_state =
DtlsTransportStateToRTCDtlsTransportState(channel_stats.dtls_state);
transport_stats->selected_candidate_pair_changes =
@@ -1759,7 +1822,10 @@ void RTCStatsCollector::ProduceTransportStats_n(
for (const cricket::ConnectionInfo& info :
channel_stats.ice_transport_stats.connection_infos) {
*transport_stats->bytes_sent += info.sent_total_bytes;
+ *transport_stats->packets_sent +=
+ info.sent_total_packets - info.sent_discarded_packets;
*transport_stats->bytes_received += info.recv_total_bytes;
+ *transport_stats->packets_received += info.packets_received;
if (info.best_connection) {
transport_stats->selected_candidate_pair_id =
RTCIceCandidatePairStatsIDFromConnectionInfo(info);
@@ -1803,6 +1869,8 @@ RTCStatsCollector::PrepareTransportCertificateStats_n(
const std::map<std::string, cricket::TransportStats>&
transport_stats_by_name) const {
RTC_DCHECK(network_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
std::map<std::string, CertificateStatsPair> transport_cert_stats;
for (const auto& entry : transport_stats_by_name) {
const std::string& transport_name = entry.first;
@@ -1827,9 +1895,10 @@ RTCStatsCollector::PrepareTransportCertificateStats_n(
}
std::vector<RTCStatsCollector::RtpTransceiverStatsInfo>
-RTCStatsCollector::PrepareTransceiverStatsInfos_s() const {
- std::vector<RtpTransceiverStatsInfo> transceiver_stats_infos;
+RTCStatsCollector::PrepareTransceiverStatsInfos_s_w() const {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ std::vector<RtpTransceiverStatsInfo> transceiver_stats_infos;
// These are used to invoke GetStats for all the media channels together in
// one worker thread hop.
std::map<cricket::VoiceMediaChannel*,
@@ -1839,45 +1908,52 @@ RTCStatsCollector::PrepareTransceiverStatsInfos_s() const {
std::unique_ptr<cricket::VideoMediaInfo>>
video_stats;
- for (const auto& transceiver : pc_->GetTransceiversInternal()) {
- cricket::MediaType media_type = transceiver->media_type();
-
- // Prepare stats entry. The TrackMediaInfoMap will be filled in after the
- // stats have been fetched on the worker thread.
- transceiver_stats_infos.emplace_back();
- RtpTransceiverStatsInfo& stats = transceiver_stats_infos.back();
- stats.transceiver = transceiver->internal();
- stats.media_type = media_type;
-
- cricket::ChannelInterface* channel = transceiver->internal()->channel();
- if (!channel) {
- // The remaining fields require a BaseChannel.
- continue;
- }
+ {
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
- stats.mid = channel->content_name();
- stats.transport_name = channel->transport_name();
-
- if (media_type == cricket::MEDIA_TYPE_AUDIO) {
- auto* voice_channel = static_cast<cricket::VoiceChannel*>(channel);
- RTC_DCHECK(voice_stats.find(voice_channel->media_channel()) ==
- voice_stats.end());
- voice_stats[voice_channel->media_channel()] =
- std::make_unique<cricket::VoiceMediaInfo>();
- } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
- auto* video_channel = static_cast<cricket::VideoChannel*>(channel);
- RTC_DCHECK(video_stats.find(video_channel->media_channel()) ==
- video_stats.end());
- video_stats[video_channel->media_channel()] =
- std::make_unique<cricket::VideoMediaInfo>();
- } else {
- RTC_NOTREACHED();
+ for (const auto& transceiver : pc_->GetTransceiversInternal()) {
+ cricket::MediaType media_type = transceiver->media_type();
+
+ // Prepare stats entry. The TrackMediaInfoMap will be filled in after the
+ // stats have been fetched on the worker thread.
+ transceiver_stats_infos.emplace_back();
+ RtpTransceiverStatsInfo& stats = transceiver_stats_infos.back();
+ stats.transceiver = transceiver->internal();
+ stats.media_type = media_type;
+
+ cricket::ChannelInterface* channel = transceiver->internal()->channel();
+ if (!channel) {
+ // The remaining fields require a BaseChannel.
+ continue;
+ }
+
+ stats.mid = channel->content_name();
+ stats.transport_name = channel->transport_name();
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ auto* voice_channel = static_cast<cricket::VoiceChannel*>(channel);
+ RTC_DCHECK(voice_stats.find(voice_channel->media_channel()) ==
+ voice_stats.end());
+ voice_stats[voice_channel->media_channel()] =
+ std::make_unique<cricket::VoiceMediaInfo>();
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ auto* video_channel = static_cast<cricket::VideoChannel*>(channel);
+ RTC_DCHECK(video_stats.find(video_channel->media_channel()) ==
+ video_stats.end());
+ video_stats[video_channel->media_channel()] =
+ std::make_unique<cricket::VideoMediaInfo>();
+ } else {
+ RTC_NOTREACHED();
+ }
}
}
- // Call GetStats for all media channels together on the worker thread in one
- // hop.
+ // We jump to the worker thread and call GetStats() on each media channel. At
+ // the same time we construct the TrackMediaInfoMaps, which also needs info
+ // from the worker thread. This minimizes the number of thread jumps.
worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
for (const auto& entry : voice_stats) {
if (!entry.first->GetStats(entry.second.get())) {
RTC_LOG(LS_WARNING) << "Failed to get voice stats.";
@@ -1888,46 +1964,49 @@ RTCStatsCollector::PrepareTransceiverStatsInfos_s() const {
RTC_LOG(LS_WARNING) << "Failed to get video stats.";
}
}
- });
- // Create the TrackMediaInfoMap for each transceiver stats object.
- for (auto& stats : transceiver_stats_infos) {
- auto transceiver = stats.transceiver;
- std::unique_ptr<cricket::VoiceMediaInfo> voice_media_info;
- std::unique_ptr<cricket::VideoMediaInfo> video_media_info;
- if (transceiver->channel()) {
- cricket::MediaType media_type = transceiver->media_type();
- if (media_type == cricket::MEDIA_TYPE_AUDIO) {
- auto* voice_channel =
- static_cast<cricket::VoiceChannel*>(transceiver->channel());
- RTC_DCHECK(voice_stats[voice_channel->media_channel()]);
- voice_media_info =
- std::move(voice_stats[voice_channel->media_channel()]);
- } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
- auto* video_channel =
- static_cast<cricket::VideoChannel*>(transceiver->channel());
- RTC_DCHECK(video_stats[video_channel->media_channel()]);
- video_media_info =
- std::move(video_stats[video_channel->media_channel()]);
+ // Create the TrackMediaInfoMap for each transceiver stats object.
+ for (auto& stats : transceiver_stats_infos) {
+ auto transceiver = stats.transceiver;
+ std::unique_ptr<cricket::VoiceMediaInfo> voice_media_info;
+ std::unique_ptr<cricket::VideoMediaInfo> video_media_info;
+ if (transceiver->channel()) {
+ cricket::MediaType media_type = transceiver->media_type();
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ auto* voice_channel =
+ static_cast<cricket::VoiceChannel*>(transceiver->channel());
+ RTC_DCHECK(voice_stats[voice_channel->media_channel()]);
+ voice_media_info =
+ std::move(voice_stats[voice_channel->media_channel()]);
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ auto* video_channel =
+ static_cast<cricket::VideoChannel*>(transceiver->channel());
+ RTC_DCHECK(video_stats[video_channel->media_channel()]);
+ video_media_info =
+ std::move(video_stats[video_channel->media_channel()]);
+ }
}
+ std::vector<rtc::scoped_refptr<RtpSenderInternal>> senders;
+ for (const auto& sender : transceiver->senders()) {
+ senders.push_back(sender->internal());
+ }
+ std::vector<rtc::scoped_refptr<RtpReceiverInternal>> receivers;
+ for (const auto& receiver : transceiver->receivers()) {
+ receivers.push_back(receiver->internal());
+ }
+ stats.track_media_info_map = std::make_unique<TrackMediaInfoMap>(
+ std::move(voice_media_info), std::move(video_media_info), senders,
+ receivers);
}
- std::vector<rtc::scoped_refptr<RtpSenderInternal>> senders;
- for (const auto& sender : transceiver->senders()) {
- senders.push_back(sender->internal());
- }
- std::vector<rtc::scoped_refptr<RtpReceiverInternal>> receivers;
- for (const auto& receiver : transceiver->receivers()) {
- receivers.push_back(receiver->internal());
- }
- stats.track_media_info_map = std::make_unique<TrackMediaInfoMap>(
- std::move(voice_media_info), std::move(video_media_info), senders,
- receivers);
- }
+ });
return transceiver_stats_infos;
}
std::set<std::string> RTCStatsCollector::PrepareTransportNames_s() const {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
std::set<std::string> transport_names;
for (const auto& transceiver : pc_->GetTransceiversInternal()) {
if (transceiver->internal()->channel()) {
@@ -1944,12 +2023,17 @@ std::set<std::string> RTCStatsCollector::PrepareTransportNames_s() const {
return transport_names;
}
-void RTCStatsCollector::OnDataChannelCreated(DataChannel* channel) {
+void RTCStatsCollector::OnRtpDataChannelCreated(RtpDataChannel* channel) {
+ channel->SignalOpened.connect(this, &RTCStatsCollector::OnDataChannelOpened);
+ channel->SignalClosed.connect(this, &RTCStatsCollector::OnDataChannelClosed);
+}
+
+void RTCStatsCollector::OnSctpDataChannelCreated(SctpDataChannel* channel) {
channel->SignalOpened.connect(this, &RTCStatsCollector::OnDataChannelOpened);
channel->SignalClosed.connect(this, &RTCStatsCollector::OnDataChannelClosed);
}
-void RTCStatsCollector::OnDataChannelOpened(DataChannel* channel) {
+void RTCStatsCollector::OnDataChannelOpened(DataChannelInterface* channel) {
RTC_DCHECK(signaling_thread_->IsCurrent());
bool result = internal_record_.opened_data_channels
.insert(reinterpret_cast<uintptr_t>(channel))
@@ -1958,7 +2042,7 @@ void RTCStatsCollector::OnDataChannelOpened(DataChannel* channel) {
RTC_DCHECK(result);
}
-void RTCStatsCollector::OnDataChannelClosed(DataChannel* channel) {
+void RTCStatsCollector::OnDataChannelClosed(DataChannelInterface* channel) {
RTC_DCHECK(signaling_thread_->IsCurrent());
// Only channels that have been fully opened (and have increased the
// |data_channels_opened_| counter) increase the closed counter.
diff --git a/pc/rtc_stats_collector.h b/pc/rtc_stats_collector.h
index 7c85a35fe0..e1bc27d9e5 100644
--- a/pc/rtc_stats_collector.h
+++ b/pc/rtc_stats_collector.h
@@ -24,7 +24,7 @@
#include "api/stats/rtcstats_objects.h"
#include "call/call.h"
#include "media/base/media_channel.h"
-#include "pc/data_channel.h"
+#include "pc/data_channel_utils.h"
#include "pc/peer_connection_internal.h"
#include "pc/track_media_info_map.h"
#include "rtc_base/event.h"
@@ -215,7 +215,7 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface,
PrepareTransportCertificateStats_n(
const std::map<std::string, cricket::TransportStats>&
transport_stats_by_name) const;
- std::vector<RtpTransceiverStatsInfo> PrepareTransceiverStatsInfos_s() const;
+ std::vector<RtpTransceiverStatsInfo> PrepareTransceiverStatsInfos_s_w() const;
std::set<std::string> PrepareTransportNames_s() const;
// Stats gathering on a particular thread.
@@ -226,10 +226,11 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface,
void MergeNetworkReport_s();
// Slots for signals (sigslot) that are wired up to |pc_|.
- void OnDataChannelCreated(DataChannel* channel);
+ void OnRtpDataChannelCreated(RtpDataChannel* channel);
+ void OnSctpDataChannelCreated(SctpDataChannel* channel);
// Slots for signals (sigslot) that are wired up to |channel|.
- void OnDataChannelOpened(DataChannel* channel);
- void OnDataChannelClosed(DataChannel* channel);
+ void OnDataChannelOpened(DataChannelInterface* channel);
+ void OnDataChannelClosed(DataChannelInterface* channel);
PeerConnectionInternal* const pc_;
rtc::Thread* const signaling_thread_;
@@ -288,7 +289,6 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface,
std::set<uintptr_t> opened_data_channels;
};
InternalRecord internal_record_;
- bool enable_simulcast_stats_ = false;
};
const char* CandidateTypeToRTCIceCandidateTypeForTesting(
diff --git a/pc/rtc_stats_collector_unittest.cc b/pc/rtc_stats_collector_unittest.cc
index db00dd7d91..becf7350a3 100644
--- a/pc/rtc_stats_collector_unittest.cc
+++ b/pc/rtc_stats_collector_unittest.cc
@@ -32,6 +32,7 @@
#include "p2p/base/port.h"
#include "pc/media_stream.h"
#include "pc/media_stream_track.h"
+#include "pc/test/fake_data_channel_provider.h"
#include "pc/test/fake_peer_connection_for_stats.h"
#include "pc/test/mock_data_channel.h"
#include "pc/test/mock_rtp_receiver_internal.h"
@@ -43,6 +44,7 @@
#include "rtc_base/gunit.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/json.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/time_utils.h"
using ::testing::AtLeast;
@@ -975,9 +977,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCertificateStatsChain) {
TEST_F(RTCStatsCollectorTest, CollectTwoRTCDataChannelStatsWithPendingId) {
pc_->AddSctpDataChannel(
- new MockDataChannel(/*id=*/-1, DataChannelInterface::kConnecting));
+ new MockSctpDataChannel(/*id=*/-1, DataChannelInterface::kConnecting));
pc_->AddSctpDataChannel(
- new MockDataChannel(/*id=*/-1, DataChannelInterface::kConnecting));
+ new MockSctpDataChannel(/*id=*/-1, DataChannelInterface::kConnecting));
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
}
@@ -986,52 +988,53 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) {
// Note: The test assumes data channel IDs are predictable.
// This is not a safe assumption, but in order to make it work for
// the test, we reset the ID allocator at test start.
- DataChannel::ResetInternalIdAllocatorForTesting(-1);
- pc_->AddSctpDataChannel(new MockDataChannel(0, "MockDataChannel0",
- DataChannelInterface::kConnecting,
- "udp", 1, 2, 3, 4));
+ SctpDataChannel::ResetInternalIdAllocatorForTesting(-1);
+ pc_->AddSctpDataChannel(new MockSctpDataChannel(
+ 0, "MockSctpDataChannel0", DataChannelInterface::kConnecting, "udp", 1, 2,
+ 3, 4));
RTCDataChannelStats expected_data_channel0("RTCDataChannel_0", 0);
- expected_data_channel0.label = "MockDataChannel0";
+ expected_data_channel0.label = "MockSctpDataChannel0";
expected_data_channel0.protocol = "udp";
- expected_data_channel0.datachannelid = 0;
+ expected_data_channel0.data_channel_identifier = 0;
expected_data_channel0.state = "connecting";
expected_data_channel0.messages_sent = 1;
expected_data_channel0.bytes_sent = 2;
expected_data_channel0.messages_received = 3;
expected_data_channel0.bytes_received = 4;
- pc_->AddSctpDataChannel(new MockDataChannel(
- 1, "MockDataChannel1", DataChannelInterface::kOpen, "tcp", 5, 6, 7, 8));
+ pc_->AddSctpDataChannel(new MockSctpDataChannel(1, "MockSctpDataChannel1",
+ DataChannelInterface::kOpen,
+ "tcp", 5, 6, 7, 8));
RTCDataChannelStats expected_data_channel1("RTCDataChannel_1", 0);
- expected_data_channel1.label = "MockDataChannel1";
+ expected_data_channel1.label = "MockSctpDataChannel1";
expected_data_channel1.protocol = "tcp";
- expected_data_channel1.datachannelid = 1;
+ expected_data_channel1.data_channel_identifier = 1;
expected_data_channel1.state = "open";
expected_data_channel1.messages_sent = 5;
expected_data_channel1.bytes_sent = 6;
expected_data_channel1.messages_received = 7;
expected_data_channel1.bytes_received = 8;
- pc_->AddSctpDataChannel(new MockDataChannel(2, "MockDataChannel2",
- DataChannelInterface::kClosing,
- "udp", 9, 10, 11, 12));
+ pc_->AddSctpDataChannel(new MockSctpDataChannel(
+ 2, "MockSctpDataChannel2", DataChannelInterface::kClosing, "udp", 9, 10,
+ 11, 12));
RTCDataChannelStats expected_data_channel2("RTCDataChannel_2", 0);
- expected_data_channel2.label = "MockDataChannel2";
+ expected_data_channel2.label = "MockSctpDataChannel2";
expected_data_channel2.protocol = "udp";
- expected_data_channel2.datachannelid = 2;
+ expected_data_channel2.data_channel_identifier = 2;
expected_data_channel2.state = "closing";
expected_data_channel2.messages_sent = 9;
expected_data_channel2.bytes_sent = 10;
expected_data_channel2.messages_received = 11;
expected_data_channel2.bytes_received = 12;
- pc_->AddSctpDataChannel(new MockDataChannel(3, "MockDataChannel3",
- DataChannelInterface::kClosed,
- "tcp", 13, 14, 15, 16));
+ pc_->AddSctpDataChannel(new MockSctpDataChannel(3, "MockSctpDataChannel3",
+ DataChannelInterface::kClosed,
+ "tcp", 13, 14, 15, 16));
RTCDataChannelStats expected_data_channel3("RTCDataChannel_3", 0);
- expected_data_channel3.label = "MockDataChannel3";
+ expected_data_channel3.label = "MockSctpDataChannel3";
expected_data_channel3.protocol = "tcp";
- expected_data_channel3.datachannelid = 3;
+ expected_data_channel3.data_channel_identifier = 3;
expected_data_channel3.state = "closed";
expected_data_channel3.messages_sent = 13;
expected_data_channel3.bytes_sent = 14;
@@ -1398,12 +1401,16 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) {
report->Get("RTCPeerConnection")->cast_to<RTCPeerConnectionStats>());
}
- rtc::scoped_refptr<DataChannel> dummy_channel_a = DataChannel::Create(
- nullptr, cricket::DCT_NONE, "DummyChannelA", InternalDataChannelInit());
- pc_->SignalDataChannelCreated()(dummy_channel_a.get());
- rtc::scoped_refptr<DataChannel> dummy_channel_b = DataChannel::Create(
- nullptr, cricket::DCT_NONE, "DummyChannelB", InternalDataChannelInit());
- pc_->SignalDataChannelCreated()(dummy_channel_b.get());
+ // TODO(bugs.webrtc.org/11547): Supply a separate network thread.
+ FakeDataChannelProvider provider;
+ rtc::scoped_refptr<SctpDataChannel> dummy_channel_a = SctpDataChannel::Create(
+ &provider, "DummyChannelA", InternalDataChannelInit(),
+ rtc::Thread::Current(), rtc::Thread::Current());
+ pc_->SignalSctpDataChannelCreated()(dummy_channel_a.get());
+ rtc::scoped_refptr<SctpDataChannel> dummy_channel_b = SctpDataChannel::Create(
+ &provider, "DummyChannelB", InternalDataChannelInit(),
+ rtc::Thread::Current(), rtc::Thread::Current());
+ pc_->SignalSctpDataChannelCreated()(dummy_channel_b.get());
dummy_channel_a->SignalOpened(dummy_channel_a.get());
// Closing a channel that is not opened should not affect the counts.
@@ -1776,6 +1783,18 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Audio) {
voice_media_info.receivers[0].header_and_padding_bytes_rcvd = 4;
voice_media_info.receivers[0].codec_payload_type = 42;
voice_media_info.receivers[0].jitter_ms = 4500;
+ voice_media_info.receivers[0].jitter_buffer_delay_seconds = 1.0;
+ voice_media_info.receivers[0].jitter_buffer_emitted_count = 2;
+ voice_media_info.receivers[0].total_samples_received = 3;
+ voice_media_info.receivers[0].concealed_samples = 4;
+ voice_media_info.receivers[0].silent_concealed_samples = 5;
+ voice_media_info.receivers[0].concealment_events = 6;
+ voice_media_info.receivers[0].inserted_samples_for_deceleration = 7;
+ voice_media_info.receivers[0].removed_samples_for_acceleration = 8;
+ voice_media_info.receivers[0].audio_level = 9.0;
+ voice_media_info.receivers[0].total_output_energy = 10.0;
+ voice_media_info.receivers[0].total_output_duration = 11.0;
+
voice_media_info.receivers[0].last_packet_received_timestamp_ms =
absl::nullopt;
@@ -1814,6 +1833,18 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Audio) {
expected_audio.packets_lost = -1;
// |expected_audio.last_packet_received_timestamp| should be undefined.
expected_audio.jitter = 4.5;
+ expected_audio.jitter_buffer_delay = 1.0;
+ expected_audio.jitter_buffer_emitted_count = 2;
+ expected_audio.total_samples_received = 3;
+ expected_audio.concealed_samples = 4;
+ expected_audio.silent_concealed_samples = 5;
+ expected_audio.concealment_events = 6;
+ expected_audio.inserted_samples_for_deceleration = 7;
+ expected_audio.removed_samples_for_acceleration = 8;
+ expected_audio.audio_level = 9.0;
+ expected_audio.total_audio_energy = 10.0;
+ expected_audio.total_samples_duration = 11.0;
+
ASSERT_TRUE(report->Get(expected_audio.id()));
EXPECT_EQ(
report->Get(expected_audio.id())->cast_to<RTCInboundRTPStreamStats>(),
@@ -1852,8 +1883,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) {
video_media_info.receivers[0].firs_sent = 5;
video_media_info.receivers[0].plis_sent = 6;
video_media_info.receivers[0].nacks_sent = 7;
- video_media_info.receivers[0].frames_decoded = 8;
+ video_media_info.receivers[0].frames_received = 8;
+ video_media_info.receivers[0].frames_decoded = 9;
video_media_info.receivers[0].key_frames_decoded = 3;
+ video_media_info.receivers[0].frames_dropped = 13;
video_media_info.receivers[0].qp_sum = absl::nullopt;
video_media_info.receivers[0].total_decode_time_ms = 9000;
video_media_info.receivers[0].total_inter_frame_delay = 0.123;
@@ -1897,8 +1930,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) {
expected_video.bytes_received = 3;
expected_video.header_bytes_received = 12;
expected_video.packets_lost = 42;
- expected_video.frames_decoded = 8;
+ expected_video.frames_received = 8;
+ expected_video.frames_decoded = 9;
expected_video.key_frames_decoded = 3;
+ expected_video.frames_dropped = 13;
// |expected_video.qp_sum| should be undefined.
expected_video.total_decode_time = 9.0;
expected_video.total_inter_frame_delay = 0.123;
@@ -2075,13 +2110,11 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) {
expected_video.total_packet_send_delay = 10.0;
expected_video.quality_limitation_reason = "bandwidth";
expected_video.quality_limitation_resolution_changes = 56u;
- if (pc_->GetConfiguration().enable_simulcast_stats) {
- expected_video.frame_width = 200u;
- expected_video.frame_height = 100u;
- expected_video.frames_per_second = 10.0;
- expected_video.frames_sent = 5;
- expected_video.huge_frames_sent = 2;
- }
+ expected_video.frame_width = 200u;
+ expected_video.frame_height = 100u;
+ expected_video.frames_per_second = 10.0;
+ expected_video.frames_sent = 5;
+ expected_video.huge_frames_sent = 2;
// |expected_video.content_type| should be undefined.
// |expected_video.qp_sum| should be undefined.
// |expected_video.encoder_implementation| should be undefined.
@@ -2138,6 +2171,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) {
rtp_connection_info.remote_candidate = *rtp_remote_candidate.get();
rtp_connection_info.sent_total_bytes = 42;
rtp_connection_info.recv_total_bytes = 1337;
+ rtp_connection_info.sent_total_packets = 3;
+ rtp_connection_info.sent_discarded_packets = 2;
+ rtp_connection_info.packets_received = 4;
cricket::TransportChannelStats rtp_transport_channel_stats;
rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
@@ -2155,7 +2191,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) {
rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP),
report->timestamp_us());
expected_rtp_transport.bytes_sent = 42;
+ expected_rtp_transport.packets_sent = 1;
expected_rtp_transport.bytes_received = 1337;
+ expected_rtp_transport.packets_received = 4;
expected_rtp_transport.dtls_state = RTCDtlsTransportState::kNew;
expected_rtp_transport.selected_candidate_pair_changes = 1;
@@ -2170,6 +2208,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) {
rtcp_connection_info.remote_candidate = *rtcp_remote_candidate.get();
rtcp_connection_info.sent_total_bytes = 1337;
rtcp_connection_info.recv_total_bytes = 42;
+ rtcp_connection_info.sent_total_packets = 3;
+ rtcp_connection_info.sent_discarded_packets = 2;
+ rtcp_connection_info.packets_received = 4;
cricket::TransportChannelStats rtcp_transport_channel_stats;
rtcp_transport_channel_stats.component =
cricket::ICE_CANDIDATE_COMPONENT_RTCP;
@@ -2187,7 +2228,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) {
rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTCP),
report->timestamp_us());
expected_rtcp_transport.bytes_sent = 1337;
+ expected_rtcp_transport.packets_sent = 1;
expected_rtcp_transport.bytes_received = 42;
+ expected_rtcp_transport.packets_received = 4;
expected_rtcp_transport.dtls_state = RTCDtlsTransportState::kConnecting;
expected_rtcp_transport.selected_candidate_pair_changes = 0;
@@ -2281,6 +2324,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStatsWithCrypto) {
rtp_connection_info.remote_candidate = *rtp_remote_candidate.get();
rtp_connection_info.sent_total_bytes = 42;
rtp_connection_info.recv_total_bytes = 1337;
+ rtp_connection_info.sent_total_packets = 3;
+ rtp_connection_info.sent_discarded_packets = 2;
+ rtp_connection_info.packets_received = 4;
cricket::TransportChannelStats rtp_transport_channel_stats;
rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
@@ -2303,7 +2349,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStatsWithCrypto) {
rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP),
report->timestamp_us());
expected_rtp_transport.bytes_sent = 42;
+ expected_rtp_transport.packets_sent = 1;
expected_rtp_transport.bytes_received = 1337;
+ expected_rtp_transport.packets_received = 4;
expected_rtp_transport.dtls_state = RTCDtlsTransportState::kConnected;
expected_rtp_transport.selected_candidate_pair_changes = 1;
// Crypto parameters
@@ -2562,44 +2610,46 @@ class RTCStatsCollectorTestWithParamKind
// Adds a sender and channel of the appropriate kind, creating a sender info
// with the report block's |source_ssrc| and report block data.
- void AddSenderInfoAndMediaChannel(std::string transport_name,
- ReportBlockData report_block_data,
- absl::optional<RtpCodecParameters> codec) {
+ void AddSenderInfoAndMediaChannel(
+ std::string transport_name,
+ const std::vector<ReportBlockData>& report_block_datas,
+ absl::optional<RtpCodecParameters> codec) {
switch (media_type_) {
case cricket::MEDIA_TYPE_AUDIO: {
cricket::VoiceMediaInfo voice_media_info;
- voice_media_info.senders.push_back(cricket::VoiceSenderInfo());
- voice_media_info.senders[0].local_stats.push_back(
- cricket::SsrcSenderInfo());
- voice_media_info.senders[0].local_stats[0].ssrc =
- report_block_data.report_block().source_ssrc;
- if (codec.has_value()) {
- voice_media_info.senders[0].codec_payload_type = codec->payload_type;
- voice_media_info.send_codecs.insert(
- std::make_pair(codec->payload_type, *codec));
+ for (const auto& report_block_data : report_block_datas) {
+ cricket::VoiceSenderInfo sender;
+ sender.local_stats.push_back(cricket::SsrcSenderInfo());
+ sender.local_stats[0].ssrc =
+ report_block_data.report_block().source_ssrc;
+ if (codec.has_value()) {
+ sender.codec_payload_type = codec->payload_type;
+ voice_media_info.send_codecs.insert(
+ std::make_pair(codec->payload_type, *codec));
+ }
+ sender.report_block_datas.push_back(report_block_data);
+ voice_media_info.senders.push_back(sender);
}
- voice_media_info.senders[0].report_block_datas.push_back(
- report_block_data);
auto* voice_media_channel = pc_->AddVoiceChannel("mid", transport_name);
voice_media_channel->SetStats(voice_media_info);
return;
}
case cricket::MEDIA_TYPE_VIDEO: {
cricket::VideoMediaInfo video_media_info;
- video_media_info.senders.push_back(cricket::VideoSenderInfo());
- video_media_info.senders[0].local_stats.push_back(
- cricket::SsrcSenderInfo());
- video_media_info.senders[0].local_stats[0].ssrc =
- report_block_data.report_block().source_ssrc;
- if (codec.has_value()) {
- video_media_info.senders[0].codec_payload_type = codec->payload_type;
- video_media_info.send_codecs.insert(
- std::make_pair(codec->payload_type, *codec));
+ for (const auto& report_block_data : report_block_datas) {
+ cricket::VideoSenderInfo sender;
+ sender.local_stats.push_back(cricket::SsrcSenderInfo());
+ sender.local_stats[0].ssrc =
+ report_block_data.report_block().source_ssrc;
+ if (codec.has_value()) {
+ sender.codec_payload_type = codec->payload_type;
+ video_media_info.send_codecs.insert(
+ std::make_pair(codec->payload_type, *codec));
+ }
+ sender.report_block_datas.push_back(report_block_data);
+ video_media_info.aggregated_senders.push_back(sender);
+ video_media_info.senders.push_back(sender);
}
- video_media_info.senders[0].report_block_datas.push_back(
- report_block_data);
- video_media_info.aggregated_senders.push_back(
- video_media_info.senders[0]);
auto* video_media_channel = pc_->AddVideoChannel("mid", transport_name);
video_media_channel->SetStats(video_media_info);
return;
@@ -2624,49 +2674,55 @@ TEST_P(RTCStatsCollectorTestWithParamKind,
// The report block's timestamp cannot be from the future, set the fake clock
// to match.
fake_clock_.SetTime(Timestamp::Micros(kReportBlockTimestampUtcUs));
-
- RTCPReportBlock report_block;
- // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the
- // |source_ssrc|, "SSRC of the RTP packet sender".
- report_block.source_ssrc = 12;
- report_block.packets_lost = 7;
- ReportBlockData report_block_data;
- report_block_data.SetReportBlock(report_block, kReportBlockTimestampUtcUs);
- report_block_data.AddRoundTripTimeSample(1234);
- // Only the last sample should be exposed as the
- // |RTCRemoteInboundRtpStreamStats::round_trip_time|.
- report_block_data.AddRoundTripTimeSample(kRoundTripTimeMs);
-
- AddSenderInfoAndMediaChannel("TransportName", report_block_data,
+ auto ssrcs = {12, 13};
+ std::vector<ReportBlockData> report_block_datas;
+ for (auto ssrc : ssrcs) {
+ RTCPReportBlock report_block;
+ // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the
+ // |source_ssrc|, "SSRC of the RTP packet sender".
+ report_block.source_ssrc = ssrc;
+ report_block.packets_lost = 7;
+ ReportBlockData report_block_data;
+ report_block_data.SetReportBlock(report_block, kReportBlockTimestampUtcUs);
+ report_block_data.AddRoundTripTimeSample(1234);
+ // Only the last sample should be exposed as the
+ // |RTCRemoteInboundRtpStreamStats::round_trip_time|.
+ report_block_data.AddRoundTripTimeSample(kRoundTripTimeMs);
+ report_block_datas.push_back(report_block_data);
+ }
+ AddSenderInfoAndMediaChannel("TransportName", report_block_datas,
absl::nullopt);
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
-
- RTCRemoteInboundRtpStreamStats expected_remote_inbound_rtp(
- "RTCRemoteInboundRtp" + MediaTypeUpperCase() + "Stream_12",
- kReportBlockTimestampUtcUs);
- expected_remote_inbound_rtp.ssrc = 12;
- expected_remote_inbound_rtp.kind = MediaTypeLowerCase();
- expected_remote_inbound_rtp.transport_id =
- "RTCTransport_TransportName_1"; // 1 for RTP (we have no RTCP transport)
- expected_remote_inbound_rtp.packets_lost = 7;
- expected_remote_inbound_rtp.local_id =
- "RTCOutboundRTP" + MediaTypeUpperCase() + "Stream_12";
- expected_remote_inbound_rtp.round_trip_time = kRoundTripTimeSeconds;
- // This test does not set up RTCCodecStats, so |codec_id| and |jitter| are
- // expected to be missing. These are tested separately.
-
- ASSERT_TRUE(report->Get(expected_remote_inbound_rtp.id()));
- EXPECT_EQ(report->Get(expected_remote_inbound_rtp.id())
- ->cast_to<RTCRemoteInboundRtpStreamStats>(),
- expected_remote_inbound_rtp);
- EXPECT_TRUE(report->Get(*expected_remote_inbound_rtp.transport_id));
- ASSERT_TRUE(report->Get(*expected_remote_inbound_rtp.local_id));
- // Lookup works in both directions.
- EXPECT_EQ(*report->Get(*expected_remote_inbound_rtp.local_id)
- ->cast_to<RTCOutboundRTPStreamStats>()
- .remote_id,
- expected_remote_inbound_rtp.id());
+ for (auto ssrc : ssrcs) {
+ std::string stream_id = "Stream_" + std::to_string(ssrc);
+ RTCRemoteInboundRtpStreamStats expected_remote_inbound_rtp(
+ "RTCRemoteInboundRtp" + MediaTypeUpperCase() + stream_id,
+ kReportBlockTimestampUtcUs);
+ expected_remote_inbound_rtp.ssrc = ssrc;
+ expected_remote_inbound_rtp.kind = MediaTypeLowerCase();
+ expected_remote_inbound_rtp.transport_id =
+ "RTCTransport_TransportName_1"; // 1 for RTP (we have no RTCP
+ // transport)
+ expected_remote_inbound_rtp.packets_lost = 7;
+ expected_remote_inbound_rtp.local_id =
+ "RTCOutboundRTP" + MediaTypeUpperCase() + stream_id;
+ expected_remote_inbound_rtp.round_trip_time = kRoundTripTimeSeconds;
+ // This test does not set up RTCCodecStats, so |codec_id| and |jitter| are
+ // expected to be missing. These are tested separately.
+
+ ASSERT_TRUE(report->Get(expected_remote_inbound_rtp.id()));
+ EXPECT_EQ(report->Get(expected_remote_inbound_rtp.id())
+ ->cast_to<RTCRemoteInboundRtpStreamStats>(),
+ expected_remote_inbound_rtp);
+ EXPECT_TRUE(report->Get(*expected_remote_inbound_rtp.transport_id));
+ ASSERT_TRUE(report->Get(*expected_remote_inbound_rtp.local_id));
+ // Lookup works in both directions.
+ EXPECT_EQ(*report->Get(*expected_remote_inbound_rtp.local_id)
+ ->cast_to<RTCOutboundRTPStreamStats>()
+ .remote_id,
+ expected_remote_inbound_rtp.id());
+ }
}
TEST_P(RTCStatsCollectorTestWithParamKind,
@@ -2681,7 +2737,7 @@ TEST_P(RTCStatsCollectorTestWithParamKind,
ReportBlockData report_block_data;
report_block_data.SetReportBlock(report_block, kReportBlockTimestampUtcUs);
- AddSenderInfoAndMediaChannel("TransportName", report_block_data,
+ AddSenderInfoAndMediaChannel("TransportName", {report_block_data},
absl::nullopt);
// Advance time, it should be OK to have fresher reports than report blocks.
@@ -2719,7 +2775,7 @@ TEST_P(RTCStatsCollectorTestWithParamKind,
codec.kind = media_type_;
codec.clock_rate = 1000;
- AddSenderInfoAndMediaChannel("TransportName", report_block_data, codec);
+ AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, codec);
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
@@ -2759,7 +2815,7 @@ TEST_P(RTCStatsCollectorTestWithParamKind,
rtcp_transport_channel_stats.dtls_state = cricket::DTLS_TRANSPORT_NEW;
pc_->SetTransportStats("TransportName", {rtp_transport_channel_stats,
rtcp_transport_channel_stats});
- AddSenderInfoAndMediaChannel("TransportName", report_block_data,
+ AddSenderInfoAndMediaChannel("TransportName", {report_block_data},
absl::nullopt);
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
@@ -2976,7 +3032,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector,
void OnStatsDelivered(
const rtc::scoped_refptr<const RTCStatsReport>& report) override {
EXPECT_TRUE(signaling_thread_->IsCurrent());
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
delivered_report_ = report;
}
@@ -2987,7 +3043,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector,
bool HasVerifiedResults() {
EXPECT_TRUE(signaling_thread_->IsCurrent());
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (!delivered_report_)
return false;
EXPECT_EQ(produced_on_signaling_thread_, 1);
@@ -3014,7 +3070,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector,
RTCStatsReport* partial_report) override {
EXPECT_TRUE(signaling_thread_->IsCurrent());
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
EXPECT_FALSE(delivered_report_);
++produced_on_signaling_thread_;
}
@@ -3030,7 +3086,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector,
RTCStatsReport* partial_report) override {
EXPECT_TRUE(network_thread_->IsCurrent());
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
EXPECT_FALSE(delivered_report_);
++produced_on_network_thread_;
}
@@ -3044,7 +3100,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector,
rtc::Thread* const worker_thread_;
rtc::Thread* const network_thread_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
rtc::scoped_refptr<const RTCStatsReport> delivered_report_;
int produced_on_signaling_thread_ = 0;
int produced_on_network_thread_ = 0;
diff --git a/pc/rtc_stats_integrationtest.cc b/pc/rtc_stats_integrationtest.cc
index d6d5c6f819..e627d45e2e 100644
--- a/pc/rtc_stats_integrationtest.cc
+++ b/pc/rtc_stats_integrationtest.cc
@@ -352,8 +352,7 @@ class RTCStatsReportVerifier {
explicit RTCStatsReportVerifier(const RTCStatsReport* report)
: report_(report) {}
- void VerifyReport(std::vector<const char*> allowed_missing_stats,
- bool enable_simulcast_stats) {
+ void VerifyReport(std::vector<const char*> allowed_missing_stats) {
std::set<const char*> missing_stats = StatsTypes();
bool verify_successful = true;
std::vector<const RTCTransportStats*> transport_stats =
@@ -396,7 +395,7 @@ class RTCStatsReportVerifier {
stats.cast_to<RTCInboundRTPStreamStats>());
} else if (stats.type() == RTCOutboundRTPStreamStats::kType) {
verify_successful &= VerifyRTCOutboundRTPStreamStats(
- stats.cast_to<RTCOutboundRTPStreamStats>(), enable_simulcast_stats);
+ stats.cast_to<RTCOutboundRTPStreamStats>());
} else if (stats.type() == RTCRemoteInboundRtpStreamStats::kType) {
verify_successful &= VerifyRTCRemoteInboundRtpStreamStats(
stats.cast_to<RTCRemoteInboundRtpStreamStats>());
@@ -461,7 +460,7 @@ class RTCStatsReportVerifier {
RTCStatsVerifier verifier(report_, &data_channel);
verifier.TestMemberIsDefined(data_channel.label);
verifier.TestMemberIsDefined(data_channel.protocol);
- verifier.TestMemberIsDefined(data_channel.datachannelid);
+ verifier.TestMemberIsDefined(data_channel.data_channel_identifier);
verifier.TestMemberIsDefined(data_channel.state);
verifier.TestMemberIsNonNegative<uint32_t>(data_channel.messages_sent);
verifier.TestMemberIsNonNegative<uint64_t>(data_channel.bytes_sent);
@@ -819,13 +818,62 @@ class RTCStatsReportVerifier {
// this test. See RFC 3550.
verifier.TestMemberIsNonNegative<int32_t>(inbound_stream.packets_lost);
verifier.TestMemberIsDefined(inbound_stream.last_packet_received_timestamp);
+ if (inbound_stream.frames_received.ValueOrDefault(0) > 0) {
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.frame_width);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.frame_height);
+ } else {
+ verifier.TestMemberIsUndefined(inbound_stream.frame_width);
+ verifier.TestMemberIsUndefined(inbound_stream.frame_height);
+ }
+ if (inbound_stream.frames_per_second.is_defined()) {
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.frames_per_second);
+ } else {
+ verifier.TestMemberIsUndefined(inbound_stream.frames_per_second);
+ }
+ verifier.TestMemberIsUndefined(inbound_stream.frame_bit_depth);
if (inbound_stream.media_type.is_defined() &&
*inbound_stream.media_type == "video") {
verifier.TestMemberIsUndefined(inbound_stream.jitter);
+ verifier.TestMemberIsUndefined(inbound_stream.jitter_buffer_delay);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.jitter_buffer_emitted_count);
+ verifier.TestMemberIsUndefined(inbound_stream.total_samples_received);
+ verifier.TestMemberIsUndefined(inbound_stream.concealed_samples);
+ verifier.TestMemberIsUndefined(inbound_stream.silent_concealed_samples);
+ verifier.TestMemberIsUndefined(inbound_stream.concealment_events);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.inserted_samples_for_deceleration);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.removed_samples_for_acceleration);
+ verifier.TestMemberIsUndefined(inbound_stream.audio_level);
+ verifier.TestMemberIsUndefined(inbound_stream.total_audio_energy);
+ verifier.TestMemberIsUndefined(inbound_stream.total_samples_duration);
+ verifier.TestMemberIsNonNegative<int32_t>(inbound_stream.frames_received);
} else {
verifier.TestMemberIsNonNegative<double>(inbound_stream.jitter);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.jitter_buffer_delay);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.jitter_buffer_emitted_count);
+ verifier.TestMemberIsPositive<uint64_t>(
+ inbound_stream.total_samples_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.concealed_samples);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.silent_concealed_samples);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.concealment_events);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.inserted_samples_for_deceleration);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.removed_samples_for_acceleration);
+ verifier.TestMemberIsPositive<double>(inbound_stream.audio_level);
+ verifier.TestMemberIsPositive<double>(inbound_stream.total_audio_energy);
+ verifier.TestMemberIsPositive<double>(
+ inbound_stream.total_samples_duration);
+ verifier.TestMemberIsUndefined(inbound_stream.frames_received);
}
-
verifier.TestMemberIsUndefined(inbound_stream.round_trip_time);
verifier.TestMemberIsUndefined(inbound_stream.packets_discarded);
verifier.TestMemberIsUndefined(inbound_stream.packets_repaired);
@@ -844,6 +892,7 @@ class RTCStatsReportVerifier {
*inbound_stream.media_type == "video") {
verifier.TestMemberIsDefined(inbound_stream.frames_decoded);
verifier.TestMemberIsDefined(inbound_stream.key_frames_decoded);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.frames_dropped);
verifier.TestMemberIsNonNegative<double>(
inbound_stream.total_decode_time);
verifier.TestMemberIsNonNegative<double>(
@@ -856,6 +905,7 @@ class RTCStatsReportVerifier {
} else {
verifier.TestMemberIsUndefined(inbound_stream.frames_decoded);
verifier.TestMemberIsUndefined(inbound_stream.key_frames_decoded);
+ verifier.TestMemberIsUndefined(inbound_stream.frames_dropped);
verifier.TestMemberIsUndefined(inbound_stream.total_decode_time);
verifier.TestMemberIsUndefined(inbound_stream.total_inter_frame_delay);
verifier.TestMemberIsUndefined(
@@ -866,8 +916,7 @@ class RTCStatsReportVerifier {
}
bool VerifyRTCOutboundRTPStreamStats(
- const RTCOutboundRTPStreamStats& outbound_stream,
- bool enable_simulcast_stats) {
+ const RTCOutboundRTPStreamStats& outbound_stream) {
RTCStatsVerifier verifier(report_, &outbound_stream);
VerifyRTCRTPStreamStats(outbound_stream, &verifier);
if (outbound_stream.media_type.is_defined() &&
@@ -912,24 +961,23 @@ class RTCStatsReportVerifier {
// this to be present.
verifier.MarkMemberTested(outbound_stream.content_type, true);
verifier.TestMemberIsDefined(outbound_stream.encoder_implementation);
- if (enable_simulcast_stats) {
+ // Unless an implementation-specific amount of time has passed and at
+ // least one frame has been encoded, undefined is reported. Because it
+ // is hard to tell what is the case here, we treat FPS as optional.
+ // TODO(hbos): Update the tests to run until all implemented metrics
+ // should be populated.
+ if (outbound_stream.frames_per_second.is_defined()) {
verifier.TestMemberIsNonNegative<double>(
outbound_stream.frames_per_second);
- verifier.TestMemberIsNonNegative<uint32_t>(
- outbound_stream.frame_height);
- verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frame_width);
- verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frames_sent);
- verifier.TestMemberIsNonNegative<uint32_t>(
- outbound_stream.huge_frames_sent);
- verifier.MarkMemberTested(outbound_stream.rid, true);
} else {
verifier.TestMemberIsUndefined(outbound_stream.frames_per_second);
- verifier.TestMemberIsUndefined(outbound_stream.frame_height);
- verifier.TestMemberIsUndefined(outbound_stream.frame_width);
- verifier.TestMemberIsUndefined(outbound_stream.frames_sent);
- verifier.TestMemberIsUndefined(outbound_stream.huge_frames_sent);
- verifier.TestMemberIsUndefined(outbound_stream.rid);
}
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frame_height);
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frame_width);
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frames_sent);
+ verifier.TestMemberIsNonNegative<uint32_t>(
+ outbound_stream.huge_frames_sent);
+ verifier.MarkMemberTested(outbound_stream.rid, true);
} else {
verifier.TestMemberIsUndefined(outbound_stream.frames_encoded);
verifier.TestMemberIsUndefined(outbound_stream.key_frames_encoded);
@@ -1017,7 +1065,9 @@ class RTCStatsReportVerifier {
bool VerifyRTCTransportStats(const RTCTransportStats& transport) {
RTCStatsVerifier verifier(report_, &transport);
verifier.TestMemberIsNonNegative<uint64_t>(transport.bytes_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(transport.packets_sent);
verifier.TestMemberIsNonNegative<uint64_t>(transport.bytes_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(transport.packets_received);
verifier.TestMemberIsOptionalIDReference(transport.rtcp_transport_stats_id,
RTCTransportStats::kType);
verifier.TestMemberIsDefined(transport.dtls_state);
@@ -1044,9 +1094,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsFromCaller) {
StartCall();
rtc::scoped_refptr<const RTCStatsReport> report = GetStatsFromCaller();
- RTCStatsReportVerifier(report.get())
- .VerifyReport({},
- caller_->pc()->GetConfiguration().enable_simulcast_stats);
+ RTCStatsReportVerifier(report.get()).VerifyReport({});
#if RTC_TRACE_EVENTS_ENABLED
EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace());
@@ -1057,9 +1105,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsFromCallee) {
StartCall();
rtc::scoped_refptr<const RTCStatsReport> report = GetStatsFromCallee();
- RTCStatsReportVerifier(report.get())
- .VerifyReport({},
- caller_->pc()->GetConfiguration().enable_simulcast_stats);
+ RTCStatsReportVerifier(report.get()).VerifyReport({});
#if RTC_TRACE_EVENTS_ENABLED
EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace());
@@ -1083,9 +1129,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithSenderSelector) {
RTCMediaStreamStats::kType,
RTCDataChannelStats::kType,
};
- RTCStatsReportVerifier(report.get())
- .VerifyReport(allowed_missing_stats,
- caller_->pc()->GetConfiguration().enable_simulcast_stats);
+ RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats);
EXPECT_TRUE(report->size());
}
@@ -1104,9 +1148,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithReceiverSelector) {
RTCMediaStreamStats::kType,
RTCDataChannelStats::kType,
};
- RTCStatsReportVerifier(report.get())
- .VerifyReport(allowed_missing_stats,
- caller_->pc()->GetConfiguration().enable_simulcast_stats);
+ RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats);
EXPECT_TRUE(report->size());
}
diff --git a/pc/rtp_data_channel.cc b/pc/rtp_data_channel.cc
new file mode 100644
index 0000000000..b08b2b2ffb
--- /dev/null
+++ b/pc/rtp_data_channel.cc
@@ -0,0 +1,394 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_data_channel.h"
+
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "api/proxy.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/location.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+namespace {
+
+static size_t kMaxQueuedReceivedDataBytes = 16 * 1024 * 1024;
+
+static std::atomic<int> g_unique_id{0};
+
+int GenerateUniqueId() {
+ return ++g_unique_id;
+}
+
+// Define proxy for DataChannelInterface.
+BEGIN_SIGNALING_PROXY_MAP(DataChannel)
+PROXY_SIGNALING_THREAD_DESTRUCTOR()
+PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
+PROXY_METHOD0(void, UnregisterObserver)
+BYPASS_PROXY_CONSTMETHOD0(std::string, label)
+BYPASS_PROXY_CONSTMETHOD0(bool, reliable)
+BYPASS_PROXY_CONSTMETHOD0(bool, ordered)
+BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
+BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
+BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxRetransmitsOpt)
+BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxPacketLifeTime)
+BYPASS_PROXY_CONSTMETHOD0(std::string, protocol)
+BYPASS_PROXY_CONSTMETHOD0(bool, negotiated)
+// Can't bypass the proxy since the id may change.
+PROXY_CONSTMETHOD0(int, id)
+BYPASS_PROXY_CONSTMETHOD0(Priority, priority)
+PROXY_CONSTMETHOD0(DataState, state)
+PROXY_CONSTMETHOD0(RTCError, error)
+PROXY_CONSTMETHOD0(uint32_t, messages_sent)
+PROXY_CONSTMETHOD0(uint64_t, bytes_sent)
+PROXY_CONSTMETHOD0(uint32_t, messages_received)
+PROXY_CONSTMETHOD0(uint64_t, bytes_received)
+PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
+PROXY_METHOD0(void, Close)
+// TODO(bugs.webrtc.org/11547): Change to run on the network thread.
+PROXY_METHOD1(bool, Send, const DataBuffer&)
+END_PROXY_MAP()
+
+} // namespace
+
+rtc::scoped_refptr<RtpDataChannel> RtpDataChannel::Create(
+ RtpDataChannelProviderInterface* provider,
+ const std::string& label,
+ const DataChannelInit& config,
+ rtc::Thread* signaling_thread) {
+ rtc::scoped_refptr<RtpDataChannel> channel(
+ new rtc::RefCountedObject<RtpDataChannel>(config, provider, label,
+ signaling_thread));
+ if (!channel->Init()) {
+ return nullptr;
+ }
+ return channel;
+}
+
+// static
+rtc::scoped_refptr<DataChannelInterface> RtpDataChannel::CreateProxy(
+ rtc::scoped_refptr<RtpDataChannel> channel) {
+ return DataChannelProxy::Create(channel->signaling_thread_, channel.get());
+}
+
+RtpDataChannel::RtpDataChannel(const DataChannelInit& config,
+ RtpDataChannelProviderInterface* provider,
+ const std::string& label,
+ rtc::Thread* signaling_thread)
+ : signaling_thread_(signaling_thread),
+ internal_id_(GenerateUniqueId()),
+ label_(label),
+ config_(config),
+ provider_(provider) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+}
+
+bool RtpDataChannel::Init() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (config_.reliable || config_.id != -1 || config_.maxRetransmits ||
+ config_.maxRetransmitTime) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
+ "invalid DataChannelInit.";
+ return false;
+ }
+
+ return true;
+}
+
+RtpDataChannel::~RtpDataChannel() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+}
+
+void RtpDataChannel::RegisterObserver(DataChannelObserver* observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ observer_ = observer;
+ DeliverQueuedReceivedData();
+}
+
+void RtpDataChannel::UnregisterObserver() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ observer_ = nullptr;
+}
+
+void RtpDataChannel::Close() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (state_ == kClosed)
+ return;
+ send_ssrc_ = 0;
+ send_ssrc_set_ = false;
+ SetState(kClosing);
+ UpdateState();
+}
+
+RtpDataChannel::DataState RtpDataChannel::state() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return state_;
+}
+
+RTCError RtpDataChannel::error() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return error_;
+}
+
+uint32_t RtpDataChannel::messages_sent() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return messages_sent_;
+}
+
+uint64_t RtpDataChannel::bytes_sent() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return bytes_sent_;
+}
+
+uint32_t RtpDataChannel::messages_received() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return messages_received_;
+}
+
+uint64_t RtpDataChannel::bytes_received() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return bytes_received_;
+}
+
+bool RtpDataChannel::Send(const DataBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
+ if (state_ != kOpen) {
+ return false;
+ }
+
+ // TODO(jiayl): the spec is unclear about if the remote side should get the
+ // onmessage event. We need to figure out the expected behavior and change the
+ // code accordingly.
+ if (buffer.size() == 0) {
+ return true;
+ }
+
+ return SendDataMessage(buffer);
+}
+
+void RtpDataChannel::SetReceiveSsrc(uint32_t receive_ssrc) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
+ if (receive_ssrc_set_) {
+ return;
+ }
+ receive_ssrc_ = receive_ssrc;
+ receive_ssrc_set_ = true;
+ UpdateState();
+}
+
+void RtpDataChannel::OnTransportChannelClosed() {
+ RTCError error = RTCError(RTCErrorType::OPERATION_ERROR_WITH_DATA,
+ "Transport channel closed");
+ CloseAbruptlyWithError(std::move(error));
+}
+
+DataChannelStats RtpDataChannel::GetStats() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ DataChannelStats stats{internal_id_, id(), label(),
+ protocol(), state(), messages_sent(),
+ messages_received(), bytes_sent(), bytes_received()};
+ return stats;
+}
+
+// The remote peer request that this channel shall be closed.
+void RtpDataChannel::RemotePeerRequestClose() {
+ // Close with error code explicitly set to OK.
+ CloseAbruptlyWithError(RTCError());
+}
+
+void RtpDataChannel::SetSendSsrc(uint32_t send_ssrc) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (send_ssrc_set_) {
+ return;
+ }
+ send_ssrc_ = send_ssrc;
+ send_ssrc_set_ = true;
+ UpdateState();
+}
+
+void RtpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (params.ssrc != receive_ssrc_) {
+ return;
+ }
+
+ RTC_DCHECK(params.type == cricket::DMT_BINARY ||
+ params.type == cricket::DMT_TEXT);
+
+ RTC_LOG(LS_VERBOSE) << "DataChannel received DATA message, sid = "
+ << params.sid;
+
+ bool binary = (params.type == cricket::DMT_BINARY);
+ auto buffer = std::make_unique<DataBuffer>(payload, binary);
+ if (state_ == kOpen && observer_) {
+ ++messages_received_;
+ bytes_received_ += buffer->size();
+ observer_->OnMessage(*buffer.get());
+ } else {
+ if (queued_received_data_.byte_count() + payload.size() >
+ kMaxQueuedReceivedDataBytes) {
+ RTC_LOG(LS_ERROR) << "Queued received data exceeds the max buffer size.";
+
+ queued_received_data_.Clear();
+ CloseAbruptlyWithError(
+ RTCError(RTCErrorType::RESOURCE_EXHAUSTED,
+ "Queued received data exceeds the max buffer size."));
+
+ return;
+ }
+ queued_received_data_.PushBack(std::move(buffer));
+ }
+}
+
+void RtpDataChannel::OnChannelReady(bool writable) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
+ writable_ = writable;
+ if (!writable) {
+ return;
+ }
+
+ UpdateState();
+}
+
+void RtpDataChannel::CloseAbruptlyWithError(RTCError error) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
+ if (state_ == kClosed) {
+ return;
+ }
+
+ if (connected_to_provider_) {
+ DisconnectFromProvider();
+ }
+
+ // Still go to "kClosing" before "kClosed", since observers may be expecting
+ // that.
+ SetState(kClosing);
+ error_ = std::move(error);
+ SetState(kClosed);
+}
+
+void RtpDataChannel::UpdateState() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ // UpdateState determines what to do from a few state variables. Include
+ // all conditions required for each state transition here for
+ // clarity.
+ switch (state_) {
+ case kConnecting: {
+ if (send_ssrc_set_ == receive_ssrc_set_) {
+ if (!connected_to_provider_) {
+ connected_to_provider_ = provider_->ConnectDataChannel(this);
+ }
+ if (connected_to_provider_ && writable_) {
+ SetState(kOpen);
+ // If we have received buffers before the channel got writable.
+ // Deliver them now.
+ DeliverQueuedReceivedData();
+ }
+ }
+ break;
+ }
+ case kOpen: {
+ break;
+ }
+ case kClosing: {
+ // For RTP data channels, we can go to "closed" after we finish
+ // sending data and the send/recv SSRCs are unset.
+ if (connected_to_provider_) {
+ DisconnectFromProvider();
+ }
+ if (!send_ssrc_set_ && !receive_ssrc_set_) {
+ SetState(kClosed);
+ }
+ break;
+ }
+ case kClosed:
+ break;
+ }
+}
+
+void RtpDataChannel::SetState(DataState state) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (state_ == state) {
+ return;
+ }
+
+ state_ = state;
+ if (observer_) {
+ observer_->OnStateChange();
+ }
+ if (state_ == kOpen) {
+ SignalOpened(this);
+ } else if (state_ == kClosed) {
+ SignalClosed(this);
+ }
+}
+
+void RtpDataChannel::DisconnectFromProvider() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (!connected_to_provider_)
+ return;
+
+ provider_->DisconnectDataChannel(this);
+ connected_to_provider_ = false;
+}
+
+void RtpDataChannel::DeliverQueuedReceivedData() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (!observer_) {
+ return;
+ }
+
+ while (!queued_received_data_.Empty()) {
+ std::unique_ptr<DataBuffer> buffer = queued_received_data_.PopFront();
+ ++messages_received_;
+ bytes_received_ += buffer->size();
+ observer_->OnMessage(*buffer);
+ }
+}
+
+bool RtpDataChannel::SendDataMessage(const DataBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ cricket::SendDataParams send_params;
+
+ send_params.ssrc = send_ssrc_;
+ send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT;
+
+ cricket::SendDataResult send_result = cricket::SDR_SUCCESS;
+ bool success = provider_->SendData(send_params, buffer.data, &send_result);
+
+ if (success) {
+ ++messages_sent_;
+ bytes_sent_ += buffer.size();
+ if (observer_ && buffer.size() > 0) {
+ observer_->OnBufferedAmountChange(buffer.size());
+ }
+ return true;
+ }
+
+ return false;
+}
+
+// static
+void RtpDataChannel::ResetInternalIdAllocatorForTesting(int new_value) {
+ g_unique_id = new_value;
+}
+
+} // namespace webrtc
diff --git a/pc/rtp_data_channel.h b/pc/rtp_data_channel.h
new file mode 100644
index 0000000000..adc724d64b
--- /dev/null
+++ b/pc/rtp_data_channel.h
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_DATA_CHANNEL_H_
+#define PC_RTP_DATA_CHANNEL_H_
+
+#include <memory>
+#include <string>
+
+#include "api/data_channel_interface.h"
+#include "api/priority.h"
+#include "api/scoped_refptr.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "media/base/media_channel.h"
+#include "pc/channel.h"
+#include "pc/data_channel_utils.h"
+#include "rtc_base/async_invoker.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+
+namespace webrtc {
+
+class RtpDataChannel;
+
+// TODO(deadbeef): Once RTP data channels go away, get rid of this and have
+// DataChannel depend on SctpTransportInternal (pure virtual SctpTransport
+// interface) instead.
+class RtpDataChannelProviderInterface {
+ public:
+ // Sends the data to the transport.
+ virtual bool SendData(const cricket::SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result) = 0;
+ // Connects to the transport signals.
+ virtual bool ConnectDataChannel(RtpDataChannel* data_channel) = 0;
+ // Disconnects from the transport signals.
+ virtual void DisconnectDataChannel(RtpDataChannel* data_channel) = 0;
+ // Returns true if the transport channel is ready to send data.
+ virtual bool ReadyToSendData() const = 0;
+
+ protected:
+ virtual ~RtpDataChannelProviderInterface() {}
+};
+
+// RtpDataChannel is an implementation of the DataChannelInterface based on
+// libjingle's data engine. It provides an implementation of unreliable data
+// channels.
+
+// DataChannel states:
+// kConnecting: The channel has been created the transport might not yet be
+// ready.
+// kOpen: The channel have a local SSRC set by a call to UpdateSendSsrc
+// and a remote SSRC set by call to UpdateReceiveSsrc and the transport
+// has been writable once.
+// kClosing: DataChannelInterface::Close has been called or UpdateReceiveSsrc
+// has been called with SSRC==0
+// kClosed: Both UpdateReceiveSsrc and UpdateSendSsrc has been called with
+// SSRC==0.
+class RtpDataChannel : public DataChannelInterface,
+ public sigslot::has_slots<> {
+ public:
+ static rtc::scoped_refptr<RtpDataChannel> Create(
+ RtpDataChannelProviderInterface* provider,
+ const std::string& label,
+ const DataChannelInit& config,
+ rtc::Thread* signaling_thread);
+
+ // Instantiates an API proxy for a DataChannel instance that will be handed
+ // out to external callers.
+ static rtc::scoped_refptr<DataChannelInterface> CreateProxy(
+ rtc::scoped_refptr<RtpDataChannel> channel);
+
+ void RegisterObserver(DataChannelObserver* observer) override;
+ void UnregisterObserver() override;
+
+ std::string label() const override { return label_; }
+ bool reliable() const override { return false; }
+ bool ordered() const override { return config_.ordered; }
+ // Backwards compatible accessors
+ uint16_t maxRetransmitTime() const override {
+ return config_.maxRetransmitTime ? *config_.maxRetransmitTime
+ : static_cast<uint16_t>(-1);
+ }
+ uint16_t maxRetransmits() const override {
+ return config_.maxRetransmits ? *config_.maxRetransmits
+ : static_cast<uint16_t>(-1);
+ }
+ absl::optional<int> maxPacketLifeTime() const override {
+ return config_.maxRetransmitTime;
+ }
+ absl::optional<int> maxRetransmitsOpt() const override {
+ return config_.maxRetransmits;
+ }
+ std::string protocol() const override { return config_.protocol; }
+ bool negotiated() const override { return config_.negotiated; }
+ int id() const override { return config_.id; }
+ Priority priority() const override {
+ return config_.priority ? *config_.priority : Priority::kLow;
+ }
+
+ virtual int internal_id() const { return internal_id_; }
+
+ uint64_t buffered_amount() const override { return 0; }
+ void Close() override;
+ DataState state() const override;
+ RTCError error() const override;
+ uint32_t messages_sent() const override;
+ uint64_t bytes_sent() const override;
+ uint32_t messages_received() const override;
+ uint64_t bytes_received() const override;
+ bool Send(const DataBuffer& buffer) override;
+
+ // Close immediately, ignoring any queued data or closing procedure.
+ // This is called when SDP indicates a channel should be removed.
+ void CloseAbruptlyWithError(RTCError error);
+
+ // Called when the channel's ready to use. That can happen when the
+ // underlying DataMediaChannel becomes ready, or when this channel is a new
+ // stream on an existing DataMediaChannel, and we've finished negotiation.
+ void OnChannelReady(bool writable);
+
+ // Slots for provider to connect signals to.
+ void OnDataReceived(const cricket::ReceiveDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload);
+
+ // Called when the transport channel is unusable.
+ // This method makes sure the DataChannel is disconnected and changes state
+ // to kClosed.
+ void OnTransportChannelClosed();
+
+ DataChannelStats GetStats() const;
+
+ // The remote peer requested that this channel should be closed.
+ void RemotePeerRequestClose();
+ // Set the SSRC this channel should use to send data on the
+ // underlying data engine. |send_ssrc| == 0 means that the channel is no
+ // longer part of the session negotiation.
+ void SetSendSsrc(uint32_t send_ssrc);
+ // Set the SSRC this channel should use to receive data from the
+ // underlying data engine.
+ void SetReceiveSsrc(uint32_t receive_ssrc);
+
+ // Emitted when state transitions to kOpen.
+ sigslot::signal1<DataChannelInterface*> SignalOpened;
+ // Emitted when state transitions to kClosed.
+ sigslot::signal1<DataChannelInterface*> SignalClosed;
+
+ // Reset the allocator for internal ID values for testing, so that
+ // the internal IDs generated are predictable. Test only.
+ static void ResetInternalIdAllocatorForTesting(int new_value);
+
+ protected:
+ RtpDataChannel(const DataChannelInit& config,
+ RtpDataChannelProviderInterface* client,
+ const std::string& label,
+ rtc::Thread* signaling_thread);
+ ~RtpDataChannel() override;
+
+ private:
+ bool Init();
+ void UpdateState();
+ void SetState(DataState state);
+ void DisconnectFromProvider();
+
+ void DeliverQueuedReceivedData();
+
+ bool SendDataMessage(const DataBuffer& buffer);
+
+ rtc::Thread* const signaling_thread_;
+ const int internal_id_;
+ const std::string label_;
+ const DataChannelInit config_;
+ DataChannelObserver* observer_ RTC_GUARDED_BY(signaling_thread_) = nullptr;
+ DataState state_ RTC_GUARDED_BY(signaling_thread_) = kConnecting;
+ RTCError error_ RTC_GUARDED_BY(signaling_thread_);
+ uint32_t messages_sent_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ uint64_t bytes_sent_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ uint32_t messages_received_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ uint64_t bytes_received_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ RtpDataChannelProviderInterface* const provider_;
+ bool connected_to_provider_ RTC_GUARDED_BY(signaling_thread_) = false;
+ bool send_ssrc_set_ RTC_GUARDED_BY(signaling_thread_) = false;
+ bool receive_ssrc_set_ RTC_GUARDED_BY(signaling_thread_) = false;
+ bool writable_ RTC_GUARDED_BY(signaling_thread_) = false;
+ uint32_t send_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ uint32_t receive_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ PacketQueue queued_received_data_ RTC_GUARDED_BY(signaling_thread_);
+ rtc::AsyncInvoker invoker_ RTC_GUARDED_BY(signaling_thread_);
+};
+
+} // namespace webrtc
+
+#endif // PC_RTP_DATA_CHANNEL_H_
diff --git a/pc/rtp_sender.cc b/pc/rtp_sender.cc
index c56f4a94d9..3c56bf0724 100644
--- a/pc/rtp_sender.cc
+++ b/pc/rtp_sender.cc
@@ -381,7 +381,7 @@ void RtpSenderBase::SetEncoderToPacketizerFrameTransformer(
LocalAudioSinkAdapter::LocalAudioSinkAdapter() : sink_(nullptr) {}
LocalAudioSinkAdapter::~LocalAudioSinkAdapter() {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (sink_)
sink_->OnClose();
}
@@ -393,7 +393,7 @@ void LocalAudioSinkAdapter::OnData(
size_t number_of_channels,
size_t number_of_frames,
absl::optional<int64_t> absolute_capture_timestamp_ms) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (sink_) {
sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels,
number_of_frames, absolute_capture_timestamp_ms);
@@ -401,7 +401,7 @@ void LocalAudioSinkAdapter::OnData(
}
void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
RTC_DCHECK(!sink || !sink_);
sink_ = sink;
}
diff --git a/pc/rtp_sender.h b/pc/rtp_sender.h
index 1e0de22c5c..15d47fd90d 100644
--- a/pc/rtp_sender.h
+++ b/pc/rtp_sender.h
@@ -24,7 +24,7 @@
#include "media/base/audio_source.h"
#include "media/base/media_channel.h"
#include "pc/dtmf_sender.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -237,7 +237,7 @@ class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
cricket::AudioSource::Sink* sink_;
// Critical section protecting |sink_|.
- rtc::CriticalSection lock_;
+ Mutex lock_;
};
class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase {
diff --git a/pc/rtp_sender_receiver_unittest.cc b/pc/rtp_sender_receiver_unittest.cc
index 9736f183b5..364e87a89f 100644
--- a/pc/rtp_sender_receiver_unittest.cc
+++ b/pc/rtp_sender_receiver_unittest.cc
@@ -88,7 +88,7 @@ static const int kDefaultTimeout = 10000; // 10 seconds.
class MockSetStreamsObserver
: public webrtc::RtpSenderBase::SetStreamsObserver {
public:
- MOCK_METHOD0(OnSetStreams, void());
+ MOCK_METHOD(void, OnSetStreams, (), (override));
};
} // namespace
@@ -123,14 +123,13 @@ class RtpSenderReceiverTest
voice_channel_ = channel_manager_.CreateVoiceChannel(
&fake_call_, cricket::MediaConfig(), rtp_transport_.get(),
- MediaTransportConfig(), rtc::Thread::Current(), cricket::CN_AUDIO,
- srtp_required, webrtc::CryptoOptions(), &ssrc_generator_,
- cricket::AudioOptions());
+ rtc::Thread::Current(), cricket::CN_AUDIO, srtp_required,
+ webrtc::CryptoOptions(), &ssrc_generator_, cricket::AudioOptions());
video_channel_ = channel_manager_.CreateVideoChannel(
&fake_call_, cricket::MediaConfig(), rtp_transport_.get(),
- MediaTransportConfig(), rtc::Thread::Current(), cricket::CN_VIDEO,
- srtp_required, webrtc::CryptoOptions(), &ssrc_generator_,
- cricket::VideoOptions(), video_bitrate_allocator_factory_.get());
+ rtc::Thread::Current(), cricket::CN_VIDEO, srtp_required,
+ webrtc::CryptoOptions(), &ssrc_generator_, cricket::VideoOptions(),
+ video_bitrate_allocator_factory_.get());
voice_channel_->Enable(true);
video_channel_->Enable(true);
voice_media_channel_ = media_engine_->GetVoiceChannel(0);
diff --git a/pc/rtp_transceiver.cc b/pc/rtp_transceiver.cc
index d6e5ff46a1..b4e500bbc8 100644
--- a/pc/rtp_transceiver.cc
+++ b/pc/rtp_transceiver.cc
@@ -114,7 +114,7 @@ RtpTransceiver::RtpTransceiver(
: unified_plan_(true),
media_type_(sender->media_type()),
channel_manager_(channel_manager),
- HeaderExtensionsToOffer_(std::move(header_extensions_offered)) {
+ header_extensions_to_offer_(std::move(header_extensions_offered)) {
RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO ||
media_type_ == cricket::MEDIA_TYPE_VIDEO);
RTC_DCHECK_EQ(sender->media_type(), receiver->media_type());
@@ -356,7 +356,51 @@ RTCError RtpTransceiver::SetCodecPreferences(
std::vector<RtpHeaderExtensionCapability>
RtpTransceiver::HeaderExtensionsToOffer() const {
- return HeaderExtensionsToOffer_;
+ return header_extensions_to_offer_;
+}
+
+RTCError RtpTransceiver::SetOfferedRtpHeaderExtensions(
+ rtc::ArrayView<const RtpHeaderExtensionCapability>
+ header_extensions_to_offer) {
+ for (const auto& entry : header_extensions_to_offer) {
+ // Handle unsupported requests for mandatory extensions as per
+ // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface.
+ // Note:
+ // - We do not handle setOfferedRtpHeaderExtensions algorithm step 2.1,
+ // this has to be checked on a higher level. We naturally error out
+ // in the handling of Step 2.2 if an unset URI is encountered.
+
+ // Step 2.2.
+ // Handle unknown extensions.
+ auto it = std::find_if(
+ header_extensions_to_offer_.begin(), header_extensions_to_offer_.end(),
+ [&entry](const auto& offered) { return entry.uri == offered.uri; });
+ if (it == header_extensions_to_offer_.end()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Attempted to modify an unoffered extension.");
+ }
+
+ // Step 2.4-2.5.
+ // - Use of the transceiver interface indicates unified plan is in effect,
+ // hence the MID extension needs to be enabled.
+ // - Also handle the mandatory video orientation extensions.
+ if ((entry.uri == RtpExtension::kMidUri ||
+ entry.uri == RtpExtension::kVideoRotationUri) &&
+ entry.direction != RtpTransceiverDirection::kSendRecv) {
+ return RTCError(RTCErrorType::INVALID_MODIFICATION,
+ "Attempted to stop a mandatory extension.");
+ }
+ }
+
+ // Apply mutation after error checking.
+ for (const auto& entry : header_extensions_to_offer) {
+ auto it = std::find_if(
+ header_extensions_to_offer_.begin(), header_extensions_to_offer_.end(),
+ [&entry](const auto& offered) { return entry.uri == offered.uri; });
+ it->direction = entry.direction;
+ }
+
+ return RTCError::OK();
}
} // namespace webrtc
diff --git a/pc/rtp_transceiver.h b/pc/rtp_transceiver.h
index 0668447b9f..be46ccfd5c 100644
--- a/pc/rtp_transceiver.h
+++ b/pc/rtp_transceiver.h
@@ -195,6 +195,9 @@ class RtpTransceiver final
}
std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToOffer()
const override;
+ RTCError SetOfferedRtpHeaderExtensions(
+ rtc::ArrayView<const RtpHeaderExtensionCapability>
+ header_extensions_to_offer) override;
private:
void OnFirstPacketReceived(cricket::ChannelInterface* channel);
@@ -220,7 +223,7 @@ class RtpTransceiver final
cricket::ChannelInterface* channel_ = nullptr;
cricket::ChannelManager* channel_manager_ = nullptr;
std::vector<RtpCodecCapability> codec_preferences_;
- std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToOffer_;
+ std::vector<RtpHeaderExtensionCapability> header_extensions_to_offer_;
};
BEGIN_SIGNALING_PROXY_MAP(RtpTransceiver)
@@ -241,6 +244,9 @@ PROXY_METHOD1(webrtc::RTCError,
PROXY_CONSTMETHOD0(std::vector<RtpCodecCapability>, codec_preferences)
PROXY_CONSTMETHOD0(std::vector<RtpHeaderExtensionCapability>,
HeaderExtensionsToOffer)
+PROXY_METHOD1(webrtc::RTCError,
+ SetOfferedRtpHeaderExtensions,
+ rtc::ArrayView<const RtpHeaderExtensionCapability>)
END_PROXY_MAP()
} // namespace webrtc
diff --git a/pc/rtp_transceiver_unittest.cc b/pc/rtp_transceiver_unittest.cc
index 5e345739f1..e3f05c4dd9 100644
--- a/pc/rtp_transceiver_unittest.cc
+++ b/pc/rtp_transceiver_unittest.cc
@@ -25,6 +25,7 @@ using ::testing::ElementsAre;
using ::testing::Eq;
using ::testing::Field;
using ::testing::Not;
+using ::testing::Property;
using ::testing::Return;
using ::testing::ReturnRef;
@@ -78,27 +79,95 @@ TEST(RtpTransceiverTest, CanUnsetChannelOnStoppedTransceiver) {
EXPECT_EQ(nullptr, transceiver.channel());
}
-TEST(RtpTransceiverTest,
- InitsWithChannelManagerRtpHeaderExtensionCapabilities) {
- cricket::ChannelManager channel_manager(
- std::make_unique<cricket::FakeMediaEngine>(),
- std::make_unique<cricket::FakeDataEngine>(), rtc::Thread::Current(),
- rtc::Thread::Current());
- std::vector<RtpHeaderExtensionCapability> extensions({
- RtpHeaderExtensionCapability("uri1", 1,
- RtpTransceiverDirection::kSendRecv),
- RtpHeaderExtensionCapability("uri2", 2,
- RtpTransceiverDirection::kRecvOnly),
- });
- RtpTransceiver transceiver(
- RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
- rtc::Thread::Current(),
- new rtc::RefCountedObject<MockRtpSenderInternal>()),
- RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
- rtc::Thread::Current(),
- new rtc::RefCountedObject<MockRtpReceiverInternal>()),
- &channel_manager, extensions);
- EXPECT_EQ(transceiver.HeaderExtensionsToOffer(), extensions);
+class RtpTransceiverTestForHeaderExtensions : public ::testing::Test {
+ public:
+ RtpTransceiverTestForHeaderExtensions()
+ : channel_manager_(std::make_unique<cricket::FakeMediaEngine>(),
+ std::make_unique<cricket::FakeDataEngine>(),
+ rtc::Thread::Current(),
+ rtc::Thread::Current()),
+ extensions_(
+ {RtpHeaderExtensionCapability("uri1",
+ 1,
+ RtpTransceiverDirection::kSendOnly),
+ RtpHeaderExtensionCapability("uri2",
+ 2,
+ RtpTransceiverDirection::kRecvOnly),
+ RtpHeaderExtensionCapability(RtpExtension::kMidUri,
+ 3,
+ RtpTransceiverDirection::kSendRecv),
+ RtpHeaderExtensionCapability(RtpExtension::kVideoRotationUri,
+ 4,
+ RtpTransceiverDirection::kSendRecv)}),
+ transceiver_(RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ rtc::Thread::Current(),
+ new rtc::RefCountedObject<MockRtpSenderInternal>()),
+ RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ rtc::Thread::Current(),
+ new rtc::RefCountedObject<MockRtpReceiverInternal>()),
+ &channel_manager_,
+ extensions_) {}
+
+ cricket::ChannelManager channel_manager_;
+ std::vector<RtpHeaderExtensionCapability> extensions_;
+ RtpTransceiver transceiver_;
+};
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, OffersChannelManagerList) {
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, ModifiesDirection) {
+ auto modified_extensions = extensions_;
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendOnly;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+ modified_extensions[0].direction = RtpTransceiverDirection::kRecvOnly;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendRecv;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+ modified_extensions[0].direction = RtpTransceiverDirection::kInactive;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, AcceptsStoppedExtension) {
+ auto modified_extensions = extensions_;
+ modified_extensions[0].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, RejectsUnsupportedExtension) {
+ std::vector<RtpHeaderExtensionCapability> modified_extensions(
+ {RtpHeaderExtensionCapability("uri3", 1,
+ RtpTransceiverDirection::kSendRecv)});
+ EXPECT_THAT(transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_PARAMETER));
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions,
+ RejectsStoppedMandatoryExtensions) {
+ std::vector<RtpHeaderExtensionCapability> modified_extensions = extensions_;
+ // Attempting to stop the mandatory MID extension.
+ modified_extensions[2].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_THAT(transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION));
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_);
+ modified_extensions = extensions_;
+ // Attempting to stop the mandatory video orientation extension.
+ modified_extensions[3].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_THAT(transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION));
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_);
}
} // namespace webrtc
diff --git a/pc/data_channel.cc b/pc/sctp_data_channel.cc
index 4f871b4d50..e603dddd0f 100644
--- a/pc/data_channel.cc
+++ b/pc/sctp_data_channel.cc
@@ -1,5 +1,5 @@
/*
- * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,12 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "pc/data_channel.h"
+#include "pc/sctp_data_channel.h"
#include <memory>
#include <string>
#include <utility>
+#include "api/proxy.h"
#include "media/sctp/sctp_transport_internal.h"
#include "pc/sctp_utils.h"
#include "rtc_base/checks.h"
@@ -24,17 +25,46 @@
namespace webrtc {
+namespace {
+
static size_t kMaxQueuedReceivedDataBytes = 16 * 1024 * 1024;
static size_t kMaxQueuedSendDataBytes = 16 * 1024 * 1024;
-namespace {
-
static std::atomic<int> g_unique_id{0};
int GenerateUniqueId() {
return ++g_unique_id;
}
+// Define proxy for DataChannelInterface.
+BEGIN_SIGNALING_PROXY_MAP(DataChannel)
+PROXY_SIGNALING_THREAD_DESTRUCTOR()
+PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
+PROXY_METHOD0(void, UnregisterObserver)
+BYPASS_PROXY_CONSTMETHOD0(std::string, label)
+BYPASS_PROXY_CONSTMETHOD0(bool, reliable)
+BYPASS_PROXY_CONSTMETHOD0(bool, ordered)
+BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
+BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
+BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxRetransmitsOpt)
+BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxPacketLifeTime)
+BYPASS_PROXY_CONSTMETHOD0(std::string, protocol)
+BYPASS_PROXY_CONSTMETHOD0(bool, negotiated)
+// Can't bypass the proxy since the id may change.
+PROXY_CONSTMETHOD0(int, id)
+BYPASS_PROXY_CONSTMETHOD0(Priority, priority)
+PROXY_CONSTMETHOD0(DataState, state)
+PROXY_CONSTMETHOD0(RTCError, error)
+PROXY_CONSTMETHOD0(uint32_t, messages_sent)
+PROXY_CONSTMETHOD0(uint64_t, bytes_sent)
+PROXY_CONSTMETHOD0(uint32_t, messages_received)
+PROXY_CONSTMETHOD0(uint64_t, bytes_received)
+PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
+PROXY_METHOD0(void, Close)
+// TODO(bugs.webrtc.org/11547): Change to run on the network thread.
+PROXY_METHOD1(bool, Send, const DataBuffer&)
+END_PROXY_MAP()
+
} // namespace
InternalDataChannelInit::InternalDataChannelInit(const DataChannelInit& base)
@@ -98,175 +128,161 @@ bool SctpSidAllocator::IsSidAvailable(int sid) const {
return used_sids_.find(sid) == used_sids_.end();
}
-bool DataChannel::PacketQueue::Empty() const {
- return packets_.empty();
-}
-
-std::unique_ptr<DataBuffer> DataChannel::PacketQueue::PopFront() {
- RTC_DCHECK(!packets_.empty());
- byte_count_ -= packets_.front()->size();
- std::unique_ptr<DataBuffer> packet = std::move(packets_.front());
- packets_.pop_front();
- return packet;
-}
-
-void DataChannel::PacketQueue::PushFront(std::unique_ptr<DataBuffer> packet) {
- byte_count_ += packet->size();
- packets_.push_front(std::move(packet));
-}
-
-void DataChannel::PacketQueue::PushBack(std::unique_ptr<DataBuffer> packet) {
- byte_count_ += packet->size();
- packets_.push_back(std::move(packet));
-}
-
-void DataChannel::PacketQueue::Clear() {
- packets_.clear();
- byte_count_ = 0;
-}
-
-void DataChannel::PacketQueue::Swap(PacketQueue* other) {
- size_t other_byte_count = other->byte_count_;
- other->byte_count_ = byte_count_;
- byte_count_ = other_byte_count;
-
- other->packets_.swap(packets_);
-}
-
-rtc::scoped_refptr<DataChannel> DataChannel::Create(
- DataChannelProviderInterface* provider,
- cricket::DataChannelType dct,
+rtc::scoped_refptr<SctpDataChannel> SctpDataChannel::Create(
+ SctpDataChannelProviderInterface* provider,
const std::string& label,
- const InternalDataChannelInit& config) {
- rtc::scoped_refptr<DataChannel> channel(
- new rtc::RefCountedObject<DataChannel>(provider, dct, label));
- if (!channel->Init(config)) {
- return NULL;
+ const InternalDataChannelInit& config,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread) {
+ rtc::scoped_refptr<SctpDataChannel> channel(
+ new rtc::RefCountedObject<SctpDataChannel>(
+ config, provider, label, signaling_thread, network_thread));
+ if (!channel->Init()) {
+ return nullptr;
}
return channel;
}
-bool DataChannel::IsSctpLike(cricket::DataChannelType type) {
- return type == cricket::DCT_SCTP || type == cricket::DCT_MEDIA_TRANSPORT ||
- type == cricket::DCT_DATA_CHANNEL_TRANSPORT ||
- type == cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP;
-}
-
-DataChannel::DataChannel(DataChannelProviderInterface* provider,
- cricket::DataChannelType dct,
- const std::string& label)
- : internal_id_(GenerateUniqueId()),
+// static
+rtc::scoped_refptr<DataChannelInterface> SctpDataChannel::CreateProxy(
+ rtc::scoped_refptr<SctpDataChannel> channel) {
+ // TODO(bugs.webrtc.org/11547): incorporate the network thread in the proxy.
+ // Also, consider allowing the proxy object to own the reference (std::move).
+ // As is, the proxy has a raw pointer and no reference to the channel object
+ // and trusting that the lifetime management aligns with the
+ // sctp_data_channels_ array in SctpDataChannelController.
+ return DataChannelProxy::Create(channel->signaling_thread_, channel.get());
+}
+
+SctpDataChannel::SctpDataChannel(const InternalDataChannelInit& config,
+ SctpDataChannelProviderInterface* provider,
+ const std::string& label,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread)
+ : signaling_thread_(signaling_thread),
+ network_thread_(network_thread),
+ internal_id_(GenerateUniqueId()),
label_(label),
+ config_(config),
observer_(nullptr),
- state_(kConnecting),
- messages_sent_(0),
- bytes_sent_(0),
- messages_received_(0),
- bytes_received_(0),
- buffered_amount_(0),
- data_channel_type_(dct),
- provider_(provider),
- handshake_state_(kHandshakeInit),
- connected_to_provider_(false),
- send_ssrc_set_(false),
- receive_ssrc_set_(false),
- writable_(false),
- send_ssrc_(0),
- receive_ssrc_(0) {}
-
-bool DataChannel::Init(const InternalDataChannelInit& config) {
- if (data_channel_type_ == cricket::DCT_RTP) {
- if (config.reliable || config.id != -1 || config.maxRetransmits ||
- config.maxRetransmitTime) {
- RTC_LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
- "invalid DataChannelInit.";
- return false;
- }
- handshake_state_ = kHandshakeReady;
- } else if (IsSctpLike(data_channel_type_)) {
- if (config.id < -1 ||
- (config.maxRetransmits && *config.maxRetransmits < 0) ||
- (config.maxRetransmitTime && *config.maxRetransmitTime < 0)) {
- RTC_LOG(LS_ERROR) << "Failed to initialize the SCTP data channel due to "
- "invalid DataChannelInit.";
- return false;
- }
- if (config.maxRetransmits && config.maxRetransmitTime) {
- RTC_LOG(LS_ERROR)
- << "maxRetransmits and maxRetransmitTime should not be both set.";
- return false;
- }
- config_ = config;
-
- switch (config_.open_handshake_role) {
- case webrtc::InternalDataChannelInit::kNone: // pre-negotiated
- handshake_state_ = kHandshakeReady;
- break;
- case webrtc::InternalDataChannelInit::kOpener:
- handshake_state_ = kHandshakeShouldSendOpen;
- break;
- case webrtc::InternalDataChannelInit::kAcker:
- handshake_state_ = kHandshakeShouldSendAck;
- break;
- }
+ provider_(provider) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+}
- // Try to connect to the transport in case the transport channel already
- // exists.
- OnTransportChannelCreated();
-
- // Checks if the transport is ready to send because the initial channel
- // ready signal may have been sent before the DataChannel creation.
- // This has to be done async because the upper layer objects (e.g.
- // Chrome glue and WebKit) are not wired up properly until after this
- // function returns.
- if (provider_->ReadyToSendData()) {
- invoker_.AsyncInvoke<void>(RTC_FROM_HERE, rtc::Thread::Current(),
- [this] { OnChannelReady(true); });
- }
+bool SctpDataChannel::Init() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (config_.id < -1 ||
+ (config_.maxRetransmits && *config_.maxRetransmits < 0) ||
+ (config_.maxRetransmitTime && *config_.maxRetransmitTime < 0)) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize the SCTP data channel due to "
+ "invalid DataChannelInit.";
+ return false;
+ }
+ if (config_.maxRetransmits && config_.maxRetransmitTime) {
+ RTC_LOG(LS_ERROR)
+ << "maxRetransmits and maxRetransmitTime should not be both set.";
+ return false;
+ }
+
+ switch (config_.open_handshake_role) {
+ case webrtc::InternalDataChannelInit::kNone: // pre-negotiated
+ handshake_state_ = kHandshakeReady;
+ break;
+ case webrtc::InternalDataChannelInit::kOpener:
+ handshake_state_ = kHandshakeShouldSendOpen;
+ break;
+ case webrtc::InternalDataChannelInit::kAcker:
+ handshake_state_ = kHandshakeShouldSendAck;
+ break;
+ }
+
+ // Try to connect to the transport in case the transport channel already
+ // exists.
+ OnTransportChannelCreated();
+
+ // Checks if the transport is ready to send because the initial channel
+ // ready signal may have been sent before the DataChannel creation.
+ // This has to be done async because the upper layer objects (e.g.
+ // Chrome glue and WebKit) are not wired up properly until after this
+ // function returns.
+ if (provider_->ReadyToSendData()) {
+ invoker_.AsyncInvoke<void>(RTC_FROM_HERE, rtc::Thread::Current(),
+ [this] { OnTransportReady(true); });
}
return true;
}
-DataChannel::~DataChannel() {}
+SctpDataChannel::~SctpDataChannel() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+}
-void DataChannel::RegisterObserver(DataChannelObserver* observer) {
+void SctpDataChannel::RegisterObserver(DataChannelObserver* observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
observer_ = observer;
DeliverQueuedReceivedData();
}
-void DataChannel::UnregisterObserver() {
- observer_ = NULL;
+void SctpDataChannel::UnregisterObserver() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ observer_ = nullptr;
}
-bool DataChannel::reliable() const {
- if (data_channel_type_ == cricket::DCT_RTP) {
- return false;
- } else {
- return !config_.maxRetransmits && !config_.maxRetransmitTime;
- }
+bool SctpDataChannel::reliable() const {
+ // May be called on any thread.
+ return !config_.maxRetransmits && !config_.maxRetransmitTime;
}
-uint64_t DataChannel::buffered_amount() const {
+uint64_t SctpDataChannel::buffered_amount() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
return buffered_amount_;
}
-void DataChannel::Close() {
+void SctpDataChannel::Close() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (state_ == kClosed)
return;
- send_ssrc_ = 0;
- send_ssrc_set_ = false;
SetState(kClosing);
// Will send queued data before beginning the underlying closing procedure.
UpdateState();
}
-RTCError DataChannel::error() const {
+SctpDataChannel::DataState SctpDataChannel::state() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return state_;
+}
+
+RTCError SctpDataChannel::error() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
return error_;
}
-bool DataChannel::Send(const DataBuffer& buffer) {
- buffered_amount_ += buffer.size();
+uint32_t SctpDataChannel::messages_sent() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return messages_sent_;
+}
+
+uint64_t SctpDataChannel::bytes_sent() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return bytes_sent_;
+}
+
+uint32_t SctpDataChannel::messages_received() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return messages_received_;
+}
+
+uint64_t SctpDataChannel::bytes_received() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return bytes_received_;
+}
+
+bool SctpDataChannel::Send(const DataBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ // TODO(bugs.webrtc.org/11547): Expect this method to be called on the network
+ // thread. Bring buffer management etc to the network thread and keep the
+ // operational state management on the signaling thread.
+
if (state_ != kOpen) {
return false;
}
@@ -278,12 +294,11 @@ bool DataChannel::Send(const DataBuffer& buffer) {
return true;
}
+ buffered_amount_ += buffer.size();
+
// If the queue is non-empty, we're waiting for SignalReadyToSend,
// so just add to the end of the queue and keep waiting.
if (!queued_send_data_.Empty()) {
- // Only SCTP DataChannel queues the outgoing data when the transport is
- // blocked.
- RTC_DCHECK(IsSctpLike(data_channel_type_));
if (!QueueSendDataMessage(buffer)) {
RTC_LOG(LS_ERROR) << "Closing the DataChannel due to a failure to queue "
"additional data.";
@@ -295,41 +310,30 @@ bool DataChannel::Send(const DataBuffer& buffer) {
return true;
}
- bool success = SendDataMessage(buffer, true);
- if (data_channel_type_ == cricket::DCT_RTP) {
- return success;
- }
+ SendDataMessage(buffer, true);
// Always return true for SCTP DataChannel per the spec.
return true;
}
-void DataChannel::SetReceiveSsrc(uint32_t receive_ssrc) {
- RTC_DCHECK(data_channel_type_ == cricket::DCT_RTP);
-
- if (receive_ssrc_set_) {
- return;
- }
- receive_ssrc_ = receive_ssrc;
- receive_ssrc_set_ = true;
- UpdateState();
-}
-
-void DataChannel::SetSctpSid(int sid) {
+void SctpDataChannel::SetSctpSid(int sid) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
RTC_DCHECK_LT(config_.id, 0);
RTC_DCHECK_GE(sid, 0);
- RTC_DCHECK(IsSctpLike(data_channel_type_));
+ RTC_DCHECK_NE(handshake_state_, kHandshakeWaitingForAck);
+ RTC_DCHECK_EQ(state_, kConnecting);
+
if (config_.id == sid) {
return;
}
- config_.id = sid;
+ const_cast<InternalDataChannelInit&>(config_).id = sid;
provider_->AddSctpDataStream(sid);
}
-void DataChannel::OnClosingProcedureStartedRemotely(int sid) {
- if (IsSctpLike(data_channel_type_) && sid == config_.id &&
- state_ != kClosing && state_ != kClosed) {
+void SctpDataChannel::OnClosingProcedureStartedRemotely(int sid) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (sid == config_.id && state_ != kClosing && state_ != kClosed) {
// Don't bother sending queued data since the side that initiated the
// closure wouldn't receive it anyway. See crbug.com/559394 for a lengthy
// discussion about this.
@@ -343,8 +347,9 @@ void DataChannel::OnClosingProcedureStartedRemotely(int sid) {
}
}
-void DataChannel::OnClosingProcedureComplete(int sid) {
- if (IsSctpLike(data_channel_type_) && sid == config_.id) {
+void SctpDataChannel::OnClosingProcedureComplete(int sid) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (sid == config_.id) {
// If the closing procedure is complete, we should have finished sending
// all pending data and transitioned to kClosing already.
RTC_DCHECK_EQ(state_, kClosing);
@@ -354,8 +359,8 @@ void DataChannel::OnClosingProcedureComplete(int sid) {
}
}
-void DataChannel::OnTransportChannelCreated() {
- RTC_DCHECK(IsSctpLike(data_channel_type_));
+void SctpDataChannel::OnTransportChannelCreated() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (!connected_to_provider_) {
connected_to_provider_ = provider_->ConnectDataChannel(this);
}
@@ -366,44 +371,32 @@ void DataChannel::OnTransportChannelCreated() {
}
}
-void DataChannel::OnTransportChannelClosed() {
+void SctpDataChannel::OnTransportChannelClosed() {
// The SctpTransport is unusable (for example, because the SCTP m= section
// was rejected, or because the DTLS transport closed), so we need to close
// abruptly.
- // Note: this needs to differentiate between normal close and error close.
- // https://w3c.github.io/webrtc-pc/#announcing-a-data-channel-as-closed
- CloseAbruptlyWithError(
- RTCError(RTCErrorType::NETWORK_ERROR, "Transport channel closed"));
+ RTCError error = RTCError(RTCErrorType::OPERATION_ERROR_WITH_DATA,
+ "Transport channel closed");
+ error.set_error_detail(RTCErrorDetailType::SCTP_FAILURE);
+ CloseAbruptlyWithError(std::move(error));
}
-// The remote peer request that this channel shall be closed.
-void DataChannel::RemotePeerRequestClose() {
- RTC_DCHECK(data_channel_type_ == cricket::DCT_RTP);
- // Close with error code explicitly set to OK.
- CloseAbruptlyWithError(RTCError());
+DataChannelStats SctpDataChannel::GetStats() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ DataChannelStats stats{internal_id_, id(), label(),
+ protocol(), state(), messages_sent(),
+ messages_received(), bytes_sent(), bytes_received()};
+ return stats;
}
-void DataChannel::SetSendSsrc(uint32_t send_ssrc) {
- RTC_DCHECK(data_channel_type_ == cricket::DCT_RTP);
- if (send_ssrc_set_) {
- return;
- }
- send_ssrc_ = send_ssrc;
- send_ssrc_set_ = true;
- UpdateState();
-}
-
-void DataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
- const rtc::CopyOnWriteBuffer& payload) {
- if (data_channel_type_ == cricket::DCT_RTP && params.ssrc != receive_ssrc_) {
- return;
- }
- if (IsSctpLike(data_channel_type_) && params.sid != config_.id) {
+void SctpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (params.sid != config_.id) {
return;
}
if (params.type == cricket::DMT_CONTROL) {
- RTC_DCHECK(IsSctpLike(data_channel_type_));
if (handshake_state_ != kHandshakeWaitingForAck) {
// Ignore it if we are not expecting an ACK message.
RTC_LOG(LS_WARNING)
@@ -448,11 +441,9 @@ void DataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
RTC_LOG(LS_ERROR) << "Queued received data exceeds the max buffer size.";
queued_received_data_.Clear();
- if (data_channel_type_ != cricket::DCT_RTP) {
- CloseAbruptlyWithError(
- RTCError(RTCErrorType::RESOURCE_EXHAUSTED,
- "Queued received data exceeds the max buffer size."));
- }
+ CloseAbruptlyWithError(
+ RTCError(RTCErrorType::RESOURCE_EXHAUSTED,
+ "Queued received data exceeds the max buffer size."));
return;
}
@@ -460,7 +451,9 @@ void DataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
}
}
-void DataChannel::OnChannelReady(bool writable) {
+void SctpDataChannel::OnTransportReady(bool writable) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
writable_ = writable;
if (!writable) {
return;
@@ -468,10 +461,13 @@ void DataChannel::OnChannelReady(bool writable) {
SendQueuedControlMessages();
SendQueuedDataMessages();
+
UpdateState();
}
-void DataChannel::CloseAbruptlyWithError(RTCError error) {
+void SctpDataChannel::CloseAbruptlyWithError(RTCError error) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
if (state_ == kClosed) {
return;
}
@@ -481,8 +477,9 @@ void DataChannel::CloseAbruptlyWithError(RTCError error) {
}
// Closing abruptly means any queued data gets thrown away.
- queued_send_data_.Clear();
buffered_amount_ = 0;
+
+ queued_send_data_.Clear();
queued_control_data_.Clear();
// Still go to "kClosing" before "kClosed", since observers may be expecting
@@ -492,41 +489,38 @@ void DataChannel::CloseAbruptlyWithError(RTCError error) {
SetState(kClosed);
}
-void DataChannel::CloseAbruptlyWithDataChannelFailure(
+void SctpDataChannel::CloseAbruptlyWithDataChannelFailure(
const std::string& message) {
RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, message);
error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE);
CloseAbruptlyWithError(std::move(error));
}
-void DataChannel::UpdateState() {
- // UpdateState determines what to do from a few state variables. Include
+void SctpDataChannel::UpdateState() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ // UpdateState determines what to do from a few state variables. Include
// all conditions required for each state transition here for
- // clarity. OnChannelReady(true) will send any queued data and then invoke
+ // clarity. OnTransportReady(true) will send any queued data and then invoke
// UpdateState().
+
switch (state_) {
case kConnecting: {
- if (send_ssrc_set_ == receive_ssrc_set_) {
- if (data_channel_type_ == cricket::DCT_RTP && !connected_to_provider_) {
- connected_to_provider_ = provider_->ConnectDataChannel(this);
+ if (connected_to_provider_) {
+ if (handshake_state_ == kHandshakeShouldSendOpen) {
+ rtc::CopyOnWriteBuffer payload;
+ WriteDataChannelOpenMessage(label_, config_, &payload);
+ SendControlMessage(payload);
+ } else if (handshake_state_ == kHandshakeShouldSendAck) {
+ rtc::CopyOnWriteBuffer payload;
+ WriteDataChannelOpenAckMessage(&payload);
+ SendControlMessage(payload);
}
- if (connected_to_provider_) {
- if (handshake_state_ == kHandshakeShouldSendOpen) {
- rtc::CopyOnWriteBuffer payload;
- WriteDataChannelOpenMessage(label_, config_, &payload);
- SendControlMessage(payload);
- } else if (handshake_state_ == kHandshakeShouldSendAck) {
- rtc::CopyOnWriteBuffer payload;
- WriteDataChannelOpenAckMessage(&payload);
- SendControlMessage(payload);
- }
- if (writable_ && (handshake_state_ == kHandshakeReady ||
- handshake_state_ == kHandshakeWaitingForAck)) {
- SetState(kOpen);
- // If we have received buffers before the channel got writable.
- // Deliver them now.
- DeliverQueuedReceivedData();
- }
+ if (writable_ && (handshake_state_ == kHandshakeReady ||
+ handshake_state_ == kHandshakeWaitingForAck)) {
+ SetState(kOpen);
+ // If we have received buffers before the channel got writable.
+ // Deliver them now.
+ DeliverQueuedReceivedData();
}
}
break;
@@ -538,25 +532,14 @@ void DataChannel::UpdateState() {
// Wait for all queued data to be sent before beginning the closing
// procedure.
if (queued_send_data_.Empty() && queued_control_data_.Empty()) {
- if (data_channel_type_ == cricket::DCT_RTP) {
- // For RTP data channels, we can go to "closed" after we finish
- // sending data and the send/recv SSRCs are unset.
- if (connected_to_provider_) {
- DisconnectFromProvider();
- }
- if (!send_ssrc_set_ && !receive_ssrc_set_) {
- SetState(kClosed);
- }
- } else {
- // For SCTP data channels, we need to wait for the closing procedure
- // to complete; after calling RemoveSctpDataStream,
- // OnClosingProcedureComplete will end up called asynchronously
- // afterwards.
- if (connected_to_provider_ && !started_closing_procedure_ &&
- config_.id >= 0) {
- started_closing_procedure_ = true;
- provider_->RemoveSctpDataStream(config_.id);
- }
+ // For SCTP data channels, we need to wait for the closing procedure
+ // to complete; after calling RemoveSctpDataStream,
+ // OnClosingProcedureComplete will end up called asynchronously
+ // afterwards.
+ if (connected_to_provider_ && !started_closing_procedure_ &&
+ config_.id >= 0) {
+ started_closing_procedure_ = true;
+ provider_->RemoveSctpDataStream(config_.id);
}
}
break;
@@ -566,7 +549,8 @@ void DataChannel::UpdateState() {
}
}
-void DataChannel::SetState(DataState state) {
+void SctpDataChannel::SetState(DataState state) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (state_ == state) {
return;
}
@@ -582,7 +566,8 @@ void DataChannel::SetState(DataState state) {
}
}
-void DataChannel::DisconnectFromProvider() {
+void SctpDataChannel::DisconnectFromProvider() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (!connected_to_provider_)
return;
@@ -590,7 +575,8 @@ void DataChannel::DisconnectFromProvider() {
connected_to_provider_ = false;
}
-void DataChannel::DeliverQueuedReceivedData() {
+void SctpDataChannel::DeliverQueuedReceivedData() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (!observer_) {
return;
}
@@ -603,7 +589,8 @@ void DataChannel::DeliverQueuedReceivedData() {
}
}
-void DataChannel::SendQueuedDataMessages() {
+void SctpDataChannel::SendQueuedDataMessages() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (queued_send_data_.Empty()) {
return;
}
@@ -620,28 +607,25 @@ void DataChannel::SendQueuedDataMessages() {
}
}
-bool DataChannel::SendDataMessage(const DataBuffer& buffer,
- bool queue_if_blocked) {
+bool SctpDataChannel::SendDataMessage(const DataBuffer& buffer,
+ bool queue_if_blocked) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
cricket::SendDataParams send_params;
- if (IsSctpLike(data_channel_type_)) {
- send_params.ordered = config_.ordered;
- // Send as ordered if it is still going through OPEN/ACK signaling.
- if (handshake_state_ != kHandshakeReady && !config_.ordered) {
- send_params.ordered = true;
- RTC_LOG(LS_VERBOSE)
- << "Sending data as ordered for unordered DataChannel "
- "because the OPEN_ACK message has not been received.";
- }
-
- send_params.max_rtx_count =
- config_.maxRetransmits ? *config_.maxRetransmits : -1;
- send_params.max_rtx_ms =
- config_.maxRetransmitTime ? *config_.maxRetransmitTime : -1;
- send_params.sid = config_.id;
- } else {
- send_params.ssrc = send_ssrc_;
+ send_params.ordered = config_.ordered;
+ // Send as ordered if it is still going through OPEN/ACK signaling.
+ if (handshake_state_ != kHandshakeReady && !config_.ordered) {
+ send_params.ordered = true;
+ RTC_LOG(LS_VERBOSE)
+ << "Sending data as ordered for unordered DataChannel "
+ "because the OPEN_ACK message has not been received.";
}
+
+ send_params.max_rtx_count =
+ config_.maxRetransmits ? *config_.maxRetransmits : -1;
+ send_params.max_rtx_ms =
+ config_.maxRetransmitTime ? *config_.maxRetransmitTime : -1;
+ send_params.sid = config_.id;
send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT;
cricket::SendDataResult send_result = cricket::SDR_SUCCESS;
@@ -659,10 +643,6 @@ bool DataChannel::SendDataMessage(const DataBuffer& buffer,
return true;
}
- if (!IsSctpLike(data_channel_type_)) {
- return false;
- }
-
if (send_result == cricket::SDR_BLOCK) {
if (!queue_if_blocked || QueueSendDataMessage(buffer)) {
return false;
@@ -679,7 +659,8 @@ bool DataChannel::SendDataMessage(const DataBuffer& buffer,
return false;
}
-bool DataChannel::QueueSendDataMessage(const DataBuffer& buffer) {
+bool SctpDataChannel::QueueSendDataMessage(const DataBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
size_t start_buffered_amount = queued_send_data_.byte_count();
if (start_buffered_amount + buffer.size() > kMaxQueuedSendDataBytes) {
RTC_LOG(LS_ERROR) << "Can't buffer any more data for the data channel.";
@@ -689,7 +670,8 @@ bool DataChannel::QueueSendDataMessage(const DataBuffer& buffer) {
return true;
}
-void DataChannel::SendQueuedControlMessages() {
+void SctpDataChannel::SendQueuedControlMessages() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
PacketQueue control_packets;
control_packets.Swap(&queued_control_data_);
@@ -699,16 +681,18 @@ void DataChannel::SendQueuedControlMessages() {
}
}
-void DataChannel::QueueControlMessage(const rtc::CopyOnWriteBuffer& buffer) {
+void SctpDataChannel::QueueControlMessage(
+ const rtc::CopyOnWriteBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
queued_control_data_.PushBack(std::make_unique<DataBuffer>(buffer, true));
}
-bool DataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) {
- bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen;
-
- RTC_DCHECK(IsSctpLike(data_channel_type_));
+bool SctpDataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
RTC_DCHECK(writable_);
RTC_DCHECK_GE(config_.id, 0);
+
+ bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen;
RTC_DCHECK(!is_open_message || !config_.negotiated);
cricket::SendDataParams send_params;
@@ -742,7 +726,7 @@ bool DataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) {
}
// static
-void DataChannel::ResetInternalIdAllocatorForTesting(int new_value) {
+void SctpDataChannel::ResetInternalIdAllocatorForTesting(int new_value) {
g_unique_id = new_value;
}
diff --git a/pc/sctp_data_channel.h b/pc/sctp_data_channel.h
new file mode 100644
index 0000000000..871f18af5c
--- /dev/null
+++ b/pc/sctp_data_channel.h
@@ -0,0 +1,285 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SCTP_DATA_CHANNEL_H_
+#define PC_SCTP_DATA_CHANNEL_H_
+
+#include <memory>
+#include <set>
+#include <string>
+
+#include "api/data_channel_interface.h"
+#include "api/priority.h"
+#include "api/scoped_refptr.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "media/base/media_channel.h"
+#include "pc/data_channel_utils.h"
+#include "rtc_base/async_invoker.h"
+#include "rtc_base/ssl_stream_adapter.h" // For SSLRole
+#include "rtc_base/third_party/sigslot/sigslot.h"
+
+namespace webrtc {
+
+class SctpDataChannel;
+
+// TODO(deadbeef): Get rid of this and have SctpDataChannel depend on
+// SctpTransportInternal (pure virtual SctpTransport interface) instead.
+class SctpDataChannelProviderInterface {
+ public:
+ // Sends the data to the transport.
+ virtual bool SendData(const cricket::SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result) = 0;
+ // Connects to the transport signals.
+ virtual bool ConnectDataChannel(SctpDataChannel* data_channel) = 0;
+ // Disconnects from the transport signals.
+ virtual void DisconnectDataChannel(SctpDataChannel* data_channel) = 0;
+ // Adds the data channel SID to the transport for SCTP.
+ virtual void AddSctpDataStream(int sid) = 0;
+ // Begins the closing procedure by sending an outgoing stream reset. Still
+ // need to wait for callbacks to tell when this completes.
+ virtual void RemoveSctpDataStream(int sid) = 0;
+ // Returns true if the transport channel is ready to send data.
+ virtual bool ReadyToSendData() const = 0;
+
+ protected:
+ virtual ~SctpDataChannelProviderInterface() {}
+};
+
+// TODO(tommi): Change to not inherit from DataChannelInit but to have it as
+// a const member. Block access to the 'id' member since it cannot be const.
+struct InternalDataChannelInit : public DataChannelInit {
+ enum OpenHandshakeRole { kOpener, kAcker, kNone };
+ // The default role is kOpener because the default |negotiated| is false.
+ InternalDataChannelInit() : open_handshake_role(kOpener) {}
+ explicit InternalDataChannelInit(const DataChannelInit& base);
+ OpenHandshakeRole open_handshake_role;
+};
+
+// Helper class to allocate unique IDs for SCTP DataChannels.
+class SctpSidAllocator {
+ public:
+ // Gets the first unused odd/even id based on the DTLS role. If |role| is
+ // SSL_CLIENT, the allocated id starts from 0 and takes even numbers;
+ // otherwise, the id starts from 1 and takes odd numbers.
+ // Returns false if no ID can be allocated.
+ bool AllocateSid(rtc::SSLRole role, int* sid);
+
+ // Attempts to reserve a specific sid. Returns false if it's unavailable.
+ bool ReserveSid(int sid);
+
+ // Indicates that |sid| isn't in use any more, and is thus available again.
+ void ReleaseSid(int sid);
+
+ private:
+ // Checks if |sid| is available to be assigned to a new SCTP data channel.
+ bool IsSidAvailable(int sid) const;
+
+ std::set<int> used_sids_;
+};
+
+// SctpDataChannel is an implementation of the DataChannelInterface based on
+// SctpTransport. It provides an implementation of unreliable or
+// reliabledata channels.
+
+// DataChannel states:
+// kConnecting: The channel has been created the transport might not yet be
+// ready.
+// kOpen: The open handshake has been performed (if relevant) and the data
+// channel is able to send messages.
+// kClosing: DataChannelInterface::Close has been called, or the remote side
+// initiated the closing procedure, but the closing procedure has not
+// yet finished.
+// kClosed: The closing handshake is finished (possibly initiated from this,
+// side, possibly from the peer).
+//
+// How the closing procedure works for SCTP:
+// 1. Alice calls Close(), state changes to kClosing.
+// 2. Alice finishes sending any queued data.
+// 3. Alice calls RemoveSctpDataStream, sends outgoing stream reset.
+// 4. Bob receives incoming stream reset; OnClosingProcedureStartedRemotely
+// called.
+// 5. Bob sends outgoing stream reset.
+// 6. Alice receives incoming reset, Bob receives acknowledgement. Both receive
+// OnClosingProcedureComplete callback and transition to kClosed.
+class SctpDataChannel : public DataChannelInterface,
+ public sigslot::has_slots<> {
+ public:
+ static rtc::scoped_refptr<SctpDataChannel> Create(
+ SctpDataChannelProviderInterface* provider,
+ const std::string& label,
+ const InternalDataChannelInit& config,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread);
+
+ // Instantiates an API proxy for a SctpDataChannel instance that will be
+ // handed out to external callers.
+ static rtc::scoped_refptr<DataChannelInterface> CreateProxy(
+ rtc::scoped_refptr<SctpDataChannel> channel);
+
+ void RegisterObserver(DataChannelObserver* observer) override;
+ void UnregisterObserver() override;
+
+ std::string label() const override { return label_; }
+ bool reliable() const override;
+ bool ordered() const override { return config_.ordered; }
+ // Backwards compatible accessors
+ uint16_t maxRetransmitTime() const override {
+ return config_.maxRetransmitTime ? *config_.maxRetransmitTime
+ : static_cast<uint16_t>(-1);
+ }
+ uint16_t maxRetransmits() const override {
+ return config_.maxRetransmits ? *config_.maxRetransmits
+ : static_cast<uint16_t>(-1);
+ }
+ absl::optional<int> maxPacketLifeTime() const override {
+ return config_.maxRetransmitTime;
+ }
+ absl::optional<int> maxRetransmitsOpt() const override {
+ return config_.maxRetransmits;
+ }
+ std::string protocol() const override { return config_.protocol; }
+ bool negotiated() const override { return config_.negotiated; }
+ int id() const override { return config_.id; }
+ Priority priority() const override {
+ return config_.priority ? *config_.priority : Priority::kLow;
+ }
+
+ virtual int internal_id() const { return internal_id_; }
+
+ uint64_t buffered_amount() const override;
+ void Close() override;
+ DataState state() const override;
+ RTCError error() const override;
+ uint32_t messages_sent() const override;
+ uint64_t bytes_sent() const override;
+ uint32_t messages_received() const override;
+ uint64_t bytes_received() const override;
+ bool Send(const DataBuffer& buffer) override;
+
+ // Close immediately, ignoring any queued data or closing procedure.
+ // This is called when the underlying SctpTransport is being destroyed.
+ // It is also called by the PeerConnection if SCTP ID assignment fails.
+ void CloseAbruptlyWithError(RTCError error);
+ // Specializations of CloseAbruptlyWithError
+ void CloseAbruptlyWithDataChannelFailure(const std::string& message);
+ void CloseAbruptlyWithSctpCauseCode(const std::string& message,
+ uint16_t cause_code);
+
+ // Slots for provider to connect signals to.
+ //
+ // TODO(deadbeef): Make these private once we're hooking up signals ourselves,
+ // instead of relying on SctpDataChannelProviderInterface.
+
+ // Called when the SctpTransport's ready to use. That can happen when we've
+ // finished negotiation, or if the channel was created after negotiation has
+ // already finished.
+ void OnTransportReady(bool writable);
+
+ void OnDataReceived(const cricket::ReceiveDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload);
+
+ // Sets the SCTP sid and adds to transport layer if not set yet. Should only
+ // be called once.
+ void SetSctpSid(int sid);
+ // The remote side started the closing procedure by resetting its outgoing
+ // stream (our incoming stream). Sets state to kClosing.
+ void OnClosingProcedureStartedRemotely(int sid);
+ // The closing procedure is complete; both incoming and outgoing stream
+ // resets are done and the channel can transition to kClosed. Called
+ // asynchronously after RemoveSctpDataStream.
+ void OnClosingProcedureComplete(int sid);
+ // Called when the transport channel is created.
+ // Only needs to be called for SCTP data channels.
+ void OnTransportChannelCreated();
+ // Called when the transport channel is unusable.
+ // This method makes sure the DataChannel is disconnected and changes state
+ // to kClosed.
+ void OnTransportChannelClosed();
+
+ DataChannelStats GetStats() const;
+
+ // Emitted when state transitions to kOpen.
+ sigslot::signal1<DataChannelInterface*> SignalOpened;
+ // Emitted when state transitions to kClosed.
+ // This signal can be used to tell when the channel's sid is free.
+ sigslot::signal1<DataChannelInterface*> SignalClosed;
+
+ // Reset the allocator for internal ID values for testing, so that
+ // the internal IDs generated are predictable. Test only.
+ static void ResetInternalIdAllocatorForTesting(int new_value);
+
+ protected:
+ SctpDataChannel(const InternalDataChannelInit& config,
+ SctpDataChannelProviderInterface* client,
+ const std::string& label,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread);
+ ~SctpDataChannel() override;
+
+ private:
+ // The OPEN(_ACK) signaling state.
+ enum HandshakeState {
+ kHandshakeInit,
+ kHandshakeShouldSendOpen,
+ kHandshakeShouldSendAck,
+ kHandshakeWaitingForAck,
+ kHandshakeReady
+ };
+
+ bool Init();
+ void UpdateState();
+ void SetState(DataState state);
+ void DisconnectFromProvider();
+
+ void DeliverQueuedReceivedData();
+
+ void SendQueuedDataMessages();
+ bool SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked);
+ bool QueueSendDataMessage(const DataBuffer& buffer);
+
+ void SendQueuedControlMessages();
+ void QueueControlMessage(const rtc::CopyOnWriteBuffer& buffer);
+ bool SendControlMessage(const rtc::CopyOnWriteBuffer& buffer);
+
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const network_thread_;
+ const int internal_id_;
+ const std::string label_;
+ const InternalDataChannelInit config_;
+ DataChannelObserver* observer_ RTC_GUARDED_BY(signaling_thread_) = nullptr;
+ DataState state_ RTC_GUARDED_BY(signaling_thread_) = kConnecting;
+ RTCError error_ RTC_GUARDED_BY(signaling_thread_);
+ uint32_t messages_sent_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ uint64_t bytes_sent_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ uint32_t messages_received_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ uint64_t bytes_received_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ // Number of bytes of data that have been queued using Send(). Increased
+ // before each transport send and decreased after each successful send.
+ uint64_t buffered_amount_ RTC_GUARDED_BY(signaling_thread_) = 0;
+ SctpDataChannelProviderInterface* const provider_
+ RTC_GUARDED_BY(signaling_thread_);
+ HandshakeState handshake_state_ RTC_GUARDED_BY(signaling_thread_) =
+ kHandshakeInit;
+ bool connected_to_provider_ RTC_GUARDED_BY(signaling_thread_) = false;
+ bool writable_ RTC_GUARDED_BY(signaling_thread_) = false;
+ // Did we already start the graceful SCTP closing procedure?
+ bool started_closing_procedure_ RTC_GUARDED_BY(signaling_thread_) = false;
+ // Control messages that always have to get sent out before any queued
+ // data.
+ PacketQueue queued_control_data_ RTC_GUARDED_BY(signaling_thread_);
+ PacketQueue queued_received_data_ RTC_GUARDED_BY(signaling_thread_);
+ PacketQueue queued_send_data_ RTC_GUARDED_BY(signaling_thread_);
+ rtc::AsyncInvoker invoker_ RTC_GUARDED_BY(signaling_thread_);
+};
+
+} // namespace webrtc
+
+#endif // PC_SCTP_DATA_CHANNEL_H_
diff --git a/pc/sctp_transport.cc b/pc/sctp_transport.cc
index 532e91c67d..ea1165f94a 100644
--- a/pc/sctp_transport.cc
+++ b/pc/sctp_transport.cc
@@ -39,7 +39,7 @@ SctpTransport::~SctpTransport() {
}
SctpTransportInformation SctpTransport::Information() const {
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
return info_;
}
@@ -66,7 +66,7 @@ void SctpTransport::Clear() {
RTC_DCHECK_RUN_ON(owner_thread_);
RTC_DCHECK(internal());
{
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
// Note that we delete internal_sctp_transport_, but
// only drop the reference to dtls_transport_.
dtls_transport_ = nullptr;
@@ -80,7 +80,7 @@ void SctpTransport::SetDtlsTransport(
RTC_DCHECK_RUN_ON(owner_thread_);
SctpTransportState next_state;
{
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
next_state = info_.state();
dtls_transport_ = transport;
if (internal_sctp_transport_) {
@@ -103,7 +103,7 @@ void SctpTransport::Start(int local_port,
int remote_port,
int max_message_size) {
{
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
// Record max message size on calling thread.
info_ = SctpTransportInformation(info_.state(), info_.dtls_transport(),
max_message_size, info_.MaxChannels());
@@ -125,7 +125,7 @@ void SctpTransport::UpdateInformation(SctpTransportState state) {
bool must_send_update;
SctpTransportInformation info_copy(SctpTransportState::kNew);
{
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
must_send_update = (state != info_.state());
// TODO(https://bugs.webrtc.org/10358): Update max channels from internal
// SCTP transport when available.
@@ -149,7 +149,7 @@ void SctpTransport::UpdateInformation(SctpTransportState state) {
void SctpTransport::OnAssociationChangeCommunicationUp() {
RTC_DCHECK_RUN_ON(owner_thread_);
{
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
RTC_DCHECK(internal_sctp_transport_);
if (internal_sctp_transport_->max_outbound_streams() &&
internal_sctp_transport_->max_inbound_streams()) {
diff --git a/pc/sctp_transport.h b/pc/sctp_transport.h
index a13a58c68e..a902ff02e8 100644
--- a/pc/sctp_transport.h
+++ b/pc/sctp_transport.h
@@ -17,6 +17,7 @@
#include "api/sctp_transport_interface.h"
#include "media/sctp/sctp_transport.h"
#include "pc/dtls_transport.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -47,12 +48,12 @@ class SctpTransport : public SctpTransportInterface,
// internal() to be functions on the webrtc::SctpTransport interface,
// and make the internal() function private.
cricket::SctpTransportInternal* internal() {
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
return internal_sctp_transport_.get();
}
const cricket::SctpTransportInternal* internal() const {
- rtc::CritScope scope(&lock_);
+ MutexLock lock(&lock_);
return internal_sctp_transport_.get();
}
@@ -71,7 +72,7 @@ class SctpTransport : public SctpTransportInterface,
// Note - owner_thread never changes, but can't be const if we do
// Invoke() on it.
rtc::Thread* owner_thread_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
// Variables accessible off-thread, guarded by lock_
SctpTransportInformation info_ RTC_GUARDED_BY(lock_);
std::unique_ptr<cricket::SctpTransportInternal> internal_sctp_transport_
diff --git a/pc/sctp_utils.cc b/pc/sctp_utils.cc
index 9cdff0e4da..1882a1525f 100644
--- a/pc/sctp_utils.cc
+++ b/pc/sctp_utils.cc
@@ -13,6 +13,7 @@
#include <stddef.h>
#include <stdint.h>
+#include "api/priority.h"
#include "rtc_base/byte_buffer.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/logging.h"
@@ -34,6 +35,15 @@ enum DataChannelOpenMessageChannelType {
DCOMCT_UNORDERED_PARTIAL_TIME = 0x82,
};
+// Values of priority in the DC open protocol message.
+// These are compared against an integer, so are enum, not enum class.
+enum DataChannelPriority {
+ DCO_PRIORITY_VERY_LOW = 128,
+ DCO_PRIORITY_LOW = 256,
+ DCO_PRIORITY_MEDIUM = 512,
+ DCO_PRIORITY_HIGH = 1024,
+};
+
bool IsOpenMessage(const rtc::CopyOnWriteBuffer& payload) {
// Format defined at
// http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
@@ -76,6 +86,18 @@ bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload,
<< "Could not read OPEN message reliabilility prioirty.";
return false;
}
+ // Parse priority as defined in
+ // https://w3c.github.io/webrtc-priority/#rtcdatachannel-processing-steps
+ if (priority <= DCO_PRIORITY_VERY_LOW) {
+ config->priority = Priority::kVeryLow;
+ } else if (priority <= DCO_PRIORITY_LOW) {
+ config->priority = Priority::kLow;
+ } else if (priority <= DCO_PRIORITY_MEDIUM) {
+ config->priority = Priority::kMedium;
+ } else {
+ config->priority = Priority::kHigh;
+ }
+
uint32_t reliability_param;
if (!buffer.ReadUInt32(&reliability_param)) {
RTC_LOG(LS_WARNING) << "Could not read OPEN message reliabilility param.";
@@ -146,6 +168,24 @@ bool WriteDataChannelOpenMessage(const std::string& label,
uint8_t channel_type = 0;
uint32_t reliability_param = 0;
uint16_t priority = 0;
+ // Set priority according to
+ // https://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-12#section-6.4
+ if (config.priority) {
+ switch (*config.priority) {
+ case Priority::kVeryLow:
+ priority = DCO_PRIORITY_VERY_LOW;
+ break;
+ case Priority::kLow:
+ priority = DCO_PRIORITY_LOW;
+ break;
+ case Priority::kMedium:
+ priority = DCO_PRIORITY_MEDIUM;
+ break;
+ case Priority::kHigh:
+ priority = DCO_PRIORITY_HIGH;
+ break;
+ }
+ }
if (config.ordered) {
if (config.maxRetransmits) {
channel_type = DCOMCT_ORDERED_PARTIAL_RTXS;
diff --git a/pc/sctp_utils_unittest.cc b/pc/sctp_utils_unittest.cc
index 70c627714d..690a9dc523 100644
--- a/pc/sctp_utils_unittest.cc
+++ b/pc/sctp_utils_unittest.cc
@@ -45,6 +45,13 @@ class SctpUtilsTest : public ::testing::Test {
}
ASSERT_TRUE(buffer.ReadUInt16(&priority));
+ if (config.priority) {
+ // Exact values are checked by round-trip conversion, but
+ // all values defined are greater than zero.
+ EXPECT_GT(priority, 0);
+ } else {
+ EXPECT_EQ(priority, 0);
+ }
ASSERT_TRUE(buffer.ReadUInt32(&reliability));
if (config.maxRetransmits || config.maxRetransmitTime) {
@@ -136,6 +143,27 @@ TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmits) {
EXPECT_FALSE(output_config.maxRetransmitTime);
}
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithPriority) {
+ webrtc::DataChannelInit config;
+ std::string label = "abc";
+ config.protocol = "y";
+ config.priority = webrtc::Priority::kVeryLow;
+
+ rtc::CopyOnWriteBuffer packet;
+ ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+ VerifyOpenMessageFormat(packet, label, config);
+
+ std::string output_label;
+ webrtc::DataChannelInit output_config;
+ ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(packet, &output_label,
+ &output_config));
+
+ EXPECT_EQ(label, output_label);
+ ASSERT_TRUE(output_config.priority);
+ EXPECT_EQ(*config.priority, *output_config.priority);
+}
+
TEST_F(SctpUtilsTest, WriteParseAckMessage) {
rtc::CopyOnWriteBuffer packet;
webrtc::WriteDataChannelOpenAckMessage(&packet);
diff --git a/pc/session_description.h b/pc/session_description.h
index bfd19b8c7a..3405accbf3 100644
--- a/pc/session_description.h
+++ b/pc/session_description.h
@@ -147,13 +147,6 @@ class MediaContentDescription {
rtp_header_extensions_.push_back(ext);
rtp_header_extensions_set_ = true;
}
- virtual void AddRtpHeaderExtension(const cricket::RtpHeaderExtension& ext) {
- webrtc::RtpExtension webrtc_extension;
- webrtc_extension.uri = ext.uri;
- webrtc_extension.id = ext.id;
- rtp_header_extensions_.push_back(webrtc_extension);
- rtp_header_extensions_set_ = true;
- }
virtual void ClearRtpHeaderExtensions() {
rtp_header_extensions_.clear();
rtp_header_extensions_set_ = true;
@@ -253,13 +246,6 @@ class MediaContentDescription {
receive_rids_ = rids;
}
- virtual const absl::optional<std::string>& alt_protocol() const {
- return alt_protocol_;
- }
- virtual void set_alt_protocol(const absl::optional<std::string>& protocol) {
- alt_protocol_ = protocol;
- }
-
protected:
bool rtcp_mux_ = false;
bool rtcp_reduced_size_ = false;
@@ -282,8 +268,6 @@ class MediaContentDescription {
SimulcastDescription simulcast_;
std::vector<RidDescription> receive_rids_;
- absl::optional<std::string> alt_protocol_;
-
private:
// Copy function that returns a raw pointer. Caller will assert ownership.
// Should only be called by the Clone() function. Must be implemented
diff --git a/pc/srtp_filter.h b/pc/srtp_filter.h
index 5b6c99dcb5..fc60a356fe 100644
--- a/pc/srtp_filter.h
+++ b/pc/srtp_filter.h
@@ -24,7 +24,6 @@
#include "pc/session_description.h"
#include "rtc_base/buffer.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/ssl_stream_adapter.h"
#include "rtc_base/thread_checker.h"
diff --git a/pc/srtp_session.cc b/pc/srtp_session.cc
index 5ded455ee5..3aa488003f 100644
--- a/pc/srtp_session.cc
+++ b/pc/srtp_session.cc
@@ -13,7 +13,6 @@
#include "absl/base/attributes.h"
#include "media/base/rtp_utils.h"
#include "pc/external_hmac.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
#include "rtc_base/ssl_stream_adapter.h"
#include "system_wrappers/include/metrics.h"
@@ -364,16 +363,16 @@ bool SrtpSession::UpdateKey(int type,
}
ABSL_CONST_INIT int g_libsrtp_usage_count = 0;
-ABSL_CONST_INIT rtc::GlobalLock g_libsrtp_lock;
+ABSL_CONST_INIT webrtc::GlobalMutex g_libsrtp_lock(absl::kConstInit);
void ProhibitLibsrtpInitialization() {
- rtc::GlobalLockScope ls(&g_libsrtp_lock);
+ webrtc::GlobalMutexLock ls(&g_libsrtp_lock);
++g_libsrtp_usage_count;
}
// static
bool SrtpSession::IncrementLibsrtpUsageCountAndMaybeInit() {
- rtc::GlobalLockScope ls(&g_libsrtp_lock);
+ webrtc::GlobalMutexLock ls(&g_libsrtp_lock);
RTC_DCHECK_GE(g_libsrtp_usage_count, 0);
if (g_libsrtp_usage_count == 0) {
@@ -402,7 +401,7 @@ bool SrtpSession::IncrementLibsrtpUsageCountAndMaybeInit() {
// static
void SrtpSession::DecrementLibsrtpUsageCountAndMaybeDeinit() {
- rtc::GlobalLockScope ls(&g_libsrtp_lock);
+ webrtc::GlobalMutexLock ls(&g_libsrtp_lock);
RTC_DCHECK_GE(g_libsrtp_usage_count, 1);
if (--g_libsrtp_usage_count == 0) {
diff --git a/pc/srtp_session.h b/pc/srtp_session.h
index 0a26c02c9f..84445965b2 100644
--- a/pc/srtp_session.h
+++ b/pc/srtp_session.h
@@ -14,6 +14,8 @@
#include <vector>
#include "api/scoped_refptr.h"
+#include "rtc_base/constructor_magic.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_checker.h"
// Forward declaration to avoid pulling in libsrtp headers here
@@ -123,7 +125,7 @@ class SrtpSession {
int rtp_auth_tag_len_ = 0;
int rtcp_auth_tag_len_ = 0;
bool inited_ = false;
- static rtc::GlobalLock lock_;
+ static webrtc::GlobalMutex lock_;
int last_send_seq_num_ = -1;
bool external_auth_active_ = false;
bool external_auth_enabled_ = false;
diff --git a/pc/stats_collector.cc b/pc/stats_collector.cc
index 0509c6dc19..73d4510fa8 100644
--- a/pc/stats_collector.cc
+++ b/pc/stats_collector.cc
@@ -1146,19 +1146,20 @@ void StatsCollector::ExtractDataInfo() {
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
- for (const auto& dc : pc_->sctp_data_channels()) {
+ std::vector<DataChannelStats> data_stats = pc_->GetDataChannelStats();
+ for (const auto& stats : data_stats) {
StatsReport::Id id(StatsReport::NewTypedIntId(
- StatsReport::kStatsReportTypeDataChannel, dc->id()));
+ StatsReport::kStatsReportTypeDataChannel, stats.id));
StatsReport* report = reports_.ReplaceOrAddNew(id);
report->set_timestamp(stats_gathering_started_);
- report->AddString(StatsReport::kStatsValueNameLabel, dc->label());
+ report->AddString(StatsReport::kStatsValueNameLabel, stats.label);
// Filter out the initial id (-1).
- if (dc->id() >= 0) {
- report->AddInt(StatsReport::kStatsValueNameDataChannelId, dc->id());
+ if (stats.id >= 0) {
+ report->AddInt(StatsReport::kStatsValueNameDataChannelId, stats.id);
}
- report->AddString(StatsReport::kStatsValueNameProtocol, dc->protocol());
+ report->AddString(StatsReport::kStatsValueNameProtocol, stats.protocol);
report->AddString(StatsReport::kStatsValueNameState,
- DataChannelInterface::DataStateString(dc->state()));
+ DataChannelInterface::DataStateString(stats.state));
}
}
diff --git a/pc/stats_collector_unittest.cc b/pc/stats_collector_unittest.cc
index ab469729ae..a5666ff6b6 100644
--- a/pc/stats_collector_unittest.cc
+++ b/pc/stats_collector_unittest.cc
@@ -23,9 +23,9 @@
#include "call/call.h"
#include "media/base/media_channel.h"
#include "modules/audio_processing/include/audio_processing_statistics.h"
-#include "pc/data_channel.h"
#include "pc/media_stream.h"
#include "pc/media_stream_track.h"
+#include "pc/sctp_data_channel.h"
#include "pc/test/fake_peer_connection_for_stats.h"
#include "pc/test/fake_video_track_source.h"
#include "pc/test/mock_rtp_receiver_internal.h"
diff --git a/pc/test/fake_audio_capture_module.cc b/pc/test/fake_audio_capture_module.cc
index db0886ddad..a395df0409 100644
--- a/pc/test/fake_audio_capture_module.cc
+++ b/pc/test/fake_audio_capture_module.cc
@@ -47,7 +47,9 @@ FakeAudioCaptureModule::FakeAudioCaptureModule()
current_mic_level_(kMaxVolume),
started_(false),
next_frame_time_(0),
- frames_received_(0) {}
+ frames_received_(0) {
+ process_thread_checker_.Detach();
+}
FakeAudioCaptureModule::~FakeAudioCaptureModule() {
if (process_thread_) {
@@ -65,7 +67,7 @@ rtc::scoped_refptr<FakeAudioCaptureModule> FakeAudioCaptureModule::Create() {
}
int FakeAudioCaptureModule::frames_received() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return frames_received_;
}
@@ -77,7 +79,7 @@ int32_t FakeAudioCaptureModule::ActiveAudioLayer(
int32_t FakeAudioCaptureModule::RegisterAudioCallback(
webrtc::AudioTransport* audio_callback) {
- rtc::CritScope cs(&crit_callback_);
+ webrtc::MutexLock lock(&mutex_);
audio_callback_ = audio_callback;
return 0;
}
@@ -181,7 +183,7 @@ int32_t FakeAudioCaptureModule::StartPlayout() {
return -1;
}
{
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
playing_ = true;
}
bool start = true;
@@ -192,7 +194,7 @@ int32_t FakeAudioCaptureModule::StartPlayout() {
int32_t FakeAudioCaptureModule::StopPlayout() {
bool start = false;
{
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
playing_ = false;
start = ShouldStartProcessing();
}
@@ -201,7 +203,7 @@ int32_t FakeAudioCaptureModule::StopPlayout() {
}
bool FakeAudioCaptureModule::Playing() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return playing_;
}
@@ -210,7 +212,7 @@ int32_t FakeAudioCaptureModule::StartRecording() {
return -1;
}
{
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
recording_ = true;
}
bool start = true;
@@ -221,7 +223,7 @@ int32_t FakeAudioCaptureModule::StartRecording() {
int32_t FakeAudioCaptureModule::StopRecording() {
bool start = false;
{
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
recording_ = false;
start = ShouldStartProcessing();
}
@@ -230,7 +232,7 @@ int32_t FakeAudioCaptureModule::StopRecording() {
}
bool FakeAudioCaptureModule::Recording() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return recording_;
}
@@ -288,13 +290,13 @@ int32_t FakeAudioCaptureModule::MicrophoneVolumeIsAvailable(
}
int32_t FakeAudioCaptureModule::SetMicrophoneVolume(uint32_t volume) {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
current_mic_level_ = volume;
return 0;
}
int32_t FakeAudioCaptureModule::MicrophoneVolume(uint32_t* volume) const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
*volume = current_mic_level_;
return 0;
}
@@ -448,29 +450,34 @@ void FakeAudioCaptureModule::UpdateProcessing(bool start) {
if (process_thread_) {
process_thread_->Stop();
process_thread_.reset(nullptr);
+ process_thread_checker_.Detach();
}
+ webrtc::MutexLock lock(&mutex_);
started_ = false;
}
}
void FakeAudioCaptureModule::StartProcessP() {
- RTC_CHECK(process_thread_->IsCurrent());
- if (started_) {
- // Already started.
- return;
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ {
+ webrtc::MutexLock lock(&mutex_);
+ if (started_) {
+ // Already started.
+ return;
+ }
}
ProcessFrameP();
}
void FakeAudioCaptureModule::ProcessFrameP() {
- RTC_CHECK(process_thread_->IsCurrent());
- if (!started_) {
- next_frame_time_ = rtc::TimeMillis();
- started_ = true;
- }
-
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
{
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
+ if (!started_) {
+ next_frame_time_ = rtc::TimeMillis();
+ started_ = true;
+ }
+
// Receive and send frames every kTimePerFrameMs.
if (playing_) {
ReceiveFrameP();
@@ -488,24 +495,22 @@ void FakeAudioCaptureModule::ProcessFrameP() {
}
void FakeAudioCaptureModule::ReceiveFrameP() {
- RTC_CHECK(process_thread_->IsCurrent());
- {
- rtc::CritScope cs(&crit_callback_);
- if (!audio_callback_) {
- return;
- }
- ResetRecBuffer();
- size_t nSamplesOut = 0;
- int64_t elapsed_time_ms = 0;
- int64_t ntp_time_ms = 0;
- if (audio_callback_->NeedMorePlayData(
- kNumberSamples, kNumberBytesPerSample, kNumberOfChannels,
- kSamplesPerSecond, rec_buffer_, nSamplesOut, &elapsed_time_ms,
- &ntp_time_ms) != 0) {
- RTC_NOTREACHED();
- }
- RTC_CHECK(nSamplesOut == kNumberSamples);
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ if (!audio_callback_) {
+ return;
+ }
+ ResetRecBuffer();
+ size_t nSamplesOut = 0;
+ int64_t elapsed_time_ms = 0;
+ int64_t ntp_time_ms = 0;
+ if (audio_callback_->NeedMorePlayData(kNumberSamples, kNumberBytesPerSample,
+ kNumberOfChannels, kSamplesPerSecond,
+ rec_buffer_, nSamplesOut,
+ &elapsed_time_ms, &ntp_time_ms) != 0) {
+ RTC_NOTREACHED();
}
+ RTC_CHECK(nSamplesOut == kNumberSamples);
+
// The SetBuffer() function ensures that after decoding, the audio buffer
// should contain samples of similar magnitude (there is likely to be some
// distortion due to the audio pipeline). If one sample is detected to
@@ -513,25 +518,22 @@ void FakeAudioCaptureModule::ReceiveFrameP() {
// has been received from the remote side (i.e. faked frames are not being
// pulled).
if (CheckRecBuffer(kHighSampleValue)) {
- rtc::CritScope cs(&crit_);
++frames_received_;
}
}
void FakeAudioCaptureModule::SendFrameP() {
- RTC_CHECK(process_thread_->IsCurrent());
- rtc::CritScope cs(&crit_callback_);
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
if (!audio_callback_) {
return;
}
bool key_pressed = false;
- uint32_t current_mic_level = 0;
- MicrophoneVolume(&current_mic_level);
+ uint32_t current_mic_level = current_mic_level_;
if (audio_callback_->RecordedDataIsAvailable(
send_buffer_, kNumberSamples, kNumberBytesPerSample,
kNumberOfChannels, kSamplesPerSecond, kTotalDelayMs, kClockDriftMs,
current_mic_level, key_pressed, current_mic_level) != 0) {
RTC_NOTREACHED();
}
- SetMicrophoneVolume(current_mic_level);
+ current_mic_level_ = current_mic_level;
}
diff --git a/pc/test/fake_audio_capture_module.h b/pc/test/fake_audio_capture_module.h
index 0af3810290..cd57a4f127 100644
--- a/pc/test/fake_audio_capture_module.h
+++ b/pc/test/fake_audio_capture_module.h
@@ -24,8 +24,9 @@
#include "api/scoped_refptr.h"
#include "modules/audio_device/include/audio_device.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/message_handler.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/synchronization/sequence_checker.h"
namespace rtc {
class Thread;
@@ -47,13 +48,13 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
// Returns the number of frames that have been successfully pulled by the
// instance. Note that correctly detecting success can only be done if the
// pulled frame was generated/pushed from a FakeAudioCaptureModule.
- int frames_received() const;
+ int frames_received() const RTC_LOCKS_EXCLUDED(mutex_);
int32_t ActiveAudioLayer(AudioLayer* audio_layer) const override;
// Note: Calling this method from a callback may result in deadlock.
- int32_t RegisterAudioCallback(
- webrtc::AudioTransport* audio_callback) override;
+ int32_t RegisterAudioCallback(webrtc::AudioTransport* audio_callback) override
+ RTC_LOCKS_EXCLUDED(mutex_);
int32_t Init() override;
int32_t Terminate() override;
@@ -80,12 +81,12 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
int32_t InitRecording() override;
bool RecordingIsInitialized() const override;
- int32_t StartPlayout() override;
- int32_t StopPlayout() override;
- bool Playing() const override;
- int32_t StartRecording() override;
- int32_t StopRecording() override;
- bool Recording() const override;
+ int32_t StartPlayout() RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t StopPlayout() RTC_LOCKS_EXCLUDED(mutex_) override;
+ bool Playing() const RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t StartRecording() RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t StopRecording() RTC_LOCKS_EXCLUDED(mutex_) override;
+ bool Recording() const RTC_LOCKS_EXCLUDED(mutex_) override;
int32_t InitSpeaker() override;
bool SpeakerIsInitialized() const override;
@@ -99,8 +100,10 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
int32_t MinSpeakerVolume(uint32_t* min_volume) const override;
int32_t MicrophoneVolumeIsAvailable(bool* available) override;
- int32_t SetMicrophoneVolume(uint32_t volume) override;
- int32_t MicrophoneVolume(uint32_t* volume) const override;
+ int32_t SetMicrophoneVolume(uint32_t volume)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t MicrophoneVolume(uint32_t* volume) const
+ RTC_LOCKS_EXCLUDED(mutex_) override;
int32_t MaxMicrophoneVolume(uint32_t* max_volume) const override;
int32_t MinMicrophoneVolume(uint32_t* min_volume) const override;
@@ -170,26 +173,28 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
// Returns true/false depending on if recording or playback has been
// enabled/started.
- bool ShouldStartProcessing();
+ bool ShouldStartProcessing() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Starts or stops the pushing and pulling of audio frames.
- void UpdateProcessing(bool start);
+ void UpdateProcessing(bool start) RTC_LOCKS_EXCLUDED(mutex_);
// Starts the periodic calling of ProcessFrame() in a thread safe way.
void StartProcessP();
// Periodcally called function that ensures that frames are pulled and pushed
// periodically if enabled/started.
- void ProcessFrameP();
+ void ProcessFrameP() RTC_LOCKS_EXCLUDED(mutex_);
// Pulls frames from the registered webrtc::AudioTransport.
- void ReceiveFrameP();
+ void ReceiveFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Pushes frames to the registered webrtc::AudioTransport.
- void SendFrameP();
+ void SendFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Callback for playout and recording.
- webrtc::AudioTransport* audio_callback_;
+ webrtc::AudioTransport* audio_callback_ RTC_GUARDED_BY(mutex_);
- bool recording_; // True when audio is being pushed from the instance.
- bool playing_; // True when audio is being pulled by the instance.
+ bool recording_ RTC_GUARDED_BY(
+ mutex_); // True when audio is being pushed from the instance.
+ bool playing_ RTC_GUARDED_BY(
+ mutex_); // True when audio is being pulled by the instance.
bool play_is_initialized_; // True when the instance is ready to pull audio.
bool rec_is_initialized_; // True when the instance is ready to push audio.
@@ -197,13 +202,13 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
// Input to and output from RecordedDataIsAvailable(..) makes it possible to
// modify the current mic level. The implementation does not care about the
// mic level so it just feeds back what it receives.
- uint32_t current_mic_level_;
+ uint32_t current_mic_level_ RTC_GUARDED_BY(mutex_);
// next_frame_time_ is updated in a non-drifting manner to indicate the next
// wall clock time the next frame should be generated and received. started_
// ensures that next_frame_time_ can be initialized properly on first call.
- bool started_;
- int64_t next_frame_time_;
+ bool started_ RTC_GUARDED_BY(mutex_);
+ int64_t next_frame_time_ RTC_GUARDED_BY(process_thread_checker_);
std::unique_ptr<rtc::Thread> process_thread_;
@@ -219,10 +224,8 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
// Protects variables that are accessed from process_thread_ and
// the main thread.
- rtc::CriticalSection crit_;
- // Protects |audio_callback_| that is accessed from process_thread_ and
- // the main thread.
- rtc::CriticalSection crit_callback_;
+ mutable webrtc::Mutex mutex_;
+ webrtc::SequenceChecker process_thread_checker_;
};
#endif // PC_TEST_FAKE_AUDIO_CAPTURE_MODULE_H_
diff --git a/pc/test/fake_audio_capture_module_unittest.cc b/pc/test/fake_audio_capture_module_unittest.cc
index 8dd252a733..63b41cdded 100644
--- a/pc/test/fake_audio_capture_module_unittest.cc
+++ b/pc/test/fake_audio_capture_module_unittest.cc
@@ -15,8 +15,8 @@
#include <algorithm>
#include "api/scoped_refptr.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/gunit.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/gtest.h"
class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport {
@@ -45,7 +45,7 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport {
const uint32_t currentMicLevel,
const bool keyPressed,
uint32_t& newMicLevel) override {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
rec_buffer_bytes_ = nSamples * nBytesPerSample;
if ((rec_buffer_bytes_ == 0) ||
(rec_buffer_bytes_ >
@@ -77,7 +77,7 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport {
size_t& nSamplesOut,
int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) override {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
++pull_iterations_;
const size_t audio_buffer_size = nSamples * nBytesPerSample;
const size_t bytes_out =
@@ -91,11 +91,11 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport {
}
int push_iterations() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return push_iterations_;
}
int pull_iterations() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return pull_iterations_;
}
@@ -115,7 +115,7 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport {
return min_buffer_size;
}
- rtc::CriticalSection crit_;
+ mutable webrtc::Mutex mutex_;
int push_iterations_;
int pull_iterations_;
diff --git a/pc/test/fake_data_channel_provider.h b/pc/test/fake_data_channel_provider.h
index 2ada4a992d..7145225ca6 100644
--- a/pc/test/fake_data_channel_provider.h
+++ b/pc/test/fake_data_channel_provider.h
@@ -13,10 +13,11 @@
#include <set>
-#include "pc/data_channel.h"
+#include "pc/sctp_data_channel.h"
#include "rtc_base/checks.h"
-class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
+class FakeDataChannelProvider
+ : public webrtc::SctpDataChannelProviderInterface {
public:
FakeDataChannelProvider()
: send_blocked_(false),
@@ -44,7 +45,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
return true;
}
- bool ConnectDataChannel(webrtc::DataChannel* data_channel) override {
+ bool ConnectDataChannel(webrtc::SctpDataChannel* data_channel) override {
RTC_CHECK(connected_channels_.find(data_channel) ==
connected_channels_.end());
if (!transport_available_) {
@@ -55,7 +56,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
return true;
}
- void DisconnectDataChannel(webrtc::DataChannel* data_channel) override {
+ void DisconnectDataChannel(webrtc::SctpDataChannel* data_channel) override {
RTC_CHECK(connected_channels_.find(data_channel) !=
connected_channels_.end());
RTC_LOG(LS_INFO) << "DataChannel disconnected " << data_channel;
@@ -77,7 +78,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
recv_ssrcs_.erase(sid);
// Unlike the real SCTP transport, act like the closing procedure finished
// instantly, doing the same snapshot thing as below.
- for (webrtc::DataChannel* ch : std::set<webrtc::DataChannel*>(
+ for (webrtc::SctpDataChannel* ch : std::set<webrtc::SctpDataChannel*>(
connected_channels_.begin(), connected_channels_.end())) {
if (connected_channels_.count(ch)) {
ch->OnClosingProcedureComplete(sid);
@@ -93,12 +94,12 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
if (!blocked) {
// Take a snapshot of the connected channels and check to see whether
// each value is still in connected_channels_ before calling
- // OnChannelReady(). This avoids problems where the set gets modified
- // in response to OnChannelReady().
- for (webrtc::DataChannel* ch : std::set<webrtc::DataChannel*>(
+ // OnTransportReady(). This avoids problems where the set gets modified
+ // in response to OnTransportReady().
+ for (webrtc::SctpDataChannel* ch : std::set<webrtc::SctpDataChannel*>(
connected_channels_.begin(), connected_channels_.end())) {
if (connected_channels_.count(ch)) {
- ch->OnChannelReady(true);
+ ch->OnTransportReady(true);
}
}
}
@@ -116,10 +117,10 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
RTC_CHECK(transport_available_);
ready_to_send_ = ready;
if (ready) {
- std::set<webrtc::DataChannel*>::iterator it;
+ std::set<webrtc::SctpDataChannel*>::iterator it;
for (it = connected_channels_.begin(); it != connected_channels_.end();
++it) {
- (*it)->OnChannelReady(true);
+ (*it)->OnTransportReady(true);
}
}
}
@@ -130,7 +131,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
return last_send_data_params_;
}
- bool IsConnected(webrtc::DataChannel* data_channel) const {
+ bool IsConnected(webrtc::SctpDataChannel* data_channel) const {
return connected_channels_.find(data_channel) != connected_channels_.end();
}
@@ -148,7 +149,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
bool transport_available_;
bool ready_to_send_;
bool transport_error_;
- std::set<webrtc::DataChannel*> connected_channels_;
+ std::set<webrtc::SctpDataChannel*> connected_channels_;
std::set<uint32_t> send_ssrcs_;
std::set<uint32_t> recv_ssrcs_;
};
diff --git a/pc/test/fake_peer_connection_base.h b/pc/test/fake_peer_connection_base.h
index f4b27f03e1..9531c6de5b 100644
--- a/pc/test/fake_peer_connection_base.h
+++ b/pc/test/fake_peer_connection_base.h
@@ -248,17 +248,16 @@ class FakePeerConnectionBase : public PeerConnectionInternal {
return {};
}
- sigslot::signal1<DataChannel*>& SignalDataChannelCreated() override {
- return SignalDataChannelCreated_;
+ sigslot::signal1<RtpDataChannel*>& SignalRtpDataChannelCreated() override {
+ return SignalRtpDataChannelCreated_;
}
- cricket::RtpDataChannel* rtp_data_channel() const override { return nullptr; }
-
- std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels()
- const override {
- return {};
+ sigslot::signal1<SctpDataChannel*>& SignalSctpDataChannelCreated() override {
+ return SignalSctpDataChannelCreated_;
}
+ cricket::RtpDataChannel* rtp_data_channel() const override { return nullptr; }
+
absl::optional<std::string> sctp_transport_name() const override {
return absl::nullopt;
}
@@ -299,7 +298,8 @@ class FakePeerConnectionBase : public PeerConnectionInternal {
}
protected:
- sigslot::signal1<DataChannel*> SignalDataChannelCreated_;
+ sigslot::signal1<RtpDataChannel*> SignalRtpDataChannelCreated_;
+ sigslot::signal1<SctpDataChannel*> SignalSctpDataChannelCreated_;
};
} // namespace webrtc
diff --git a/pc/test/fake_peer_connection_for_stats.h b/pc/test/fake_peer_connection_for_stats.h
index c6391583f5..1a4c1a05d0 100644
--- a/pc/test/fake_peer_connection_for_stats.h
+++ b/pc/test/fake_peer_connection_for_stats.h
@@ -174,11 +174,13 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
void AddSctpDataChannel(const std::string& label,
const InternalDataChannelInit& init) {
- AddSctpDataChannel(DataChannel::Create(&data_channel_provider_,
- cricket::DCT_SCTP, label, init));
+ // TODO(bugs.webrtc.org/11547): Supply a separate network thread.
+ AddSctpDataChannel(SctpDataChannel::Create(&data_channel_provider_, label,
+ init, rtc::Thread::Current(),
+ rtc::Thread::Current()));
}
- void AddSctpDataChannel(rtc::scoped_refptr<DataChannel> data_channel) {
+ void AddSctpDataChannel(rtc::scoped_refptr<SctpDataChannel> data_channel) {
sctp_data_channels_.push_back(data_channel);
}
@@ -257,9 +259,12 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
return transceivers_;
}
- std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels()
- const override {
- return sctp_data_channels_;
+ std::vector<DataChannelStats> GetDataChannelStats() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<DataChannelStats> stats;
+ for (const auto& channel : sctp_data_channels_)
+ stats.push_back(channel->GetStats());
+ return stats;
}
cricket::CandidateStatsList GetPooledCandidateStats() const override {
@@ -359,7 +364,7 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
std::unique_ptr<cricket::VoiceChannel> voice_channel_;
std::unique_ptr<cricket::VideoChannel> video_channel_;
- std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_;
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> sctp_data_channels_;
std::map<std::string, cricket::TransportStats> transport_stats_by_name_;
diff --git a/pc/test/fake_periodic_video_source.h b/pc/test/fake_periodic_video_source.h
index 1684ca4adb..ac6e5a43e7 100644
--- a/pc/test/fake_periodic_video_source.h
+++ b/pc/test/fake_periodic_video_source.h
@@ -16,6 +16,7 @@
#include "api/video/video_source_interface.h"
#include "media/base/fake_frame_source.h"
#include "media/base/video_broadcaster.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "rtc_base/task_utils/repeating_task.h"
@@ -59,6 +60,11 @@ class FakePeriodicVideoSource final
});
}
+ rtc::VideoSinkWants wants() const {
+ MutexLock lock(&mutex_);
+ return wants_;
+ }
+
void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override {
RTC_DCHECK(thread_checker_.IsCurrent());
broadcaster_.RemoveSink(sink);
@@ -67,6 +73,10 @@ class FakePeriodicVideoSource final
void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
RTC_DCHECK(thread_checker_.IsCurrent());
+ {
+ MutexLock lock(&mutex_);
+ wants_ = wants;
+ }
broadcaster_.AddOrUpdateSink(sink, wants);
}
@@ -80,6 +90,8 @@ class FakePeriodicVideoSource final
rtc::VideoBroadcaster broadcaster_;
cricket::FakeFrameSource frame_source_;
+ mutable Mutex mutex_;
+ rtc::VideoSinkWants wants_ RTC_GUARDED_BY(&mutex_);
std::unique_ptr<TaskQueueForTest> task_queue_;
};
diff --git a/pc/test/fake_periodic_video_track_source.h b/pc/test/fake_periodic_video_track_source.h
index cc406d6d3f..98a456f232 100644
--- a/pc/test/fake_periodic_video_track_source.h
+++ b/pc/test/fake_periodic_video_track_source.h
@@ -29,6 +29,10 @@ class FakePeriodicVideoTrackSource : public VideoTrackSource {
~FakePeriodicVideoTrackSource() = default;
+ const FakePeriodicVideoSource& fake_periodic_source() const {
+ return source_;
+ }
+
protected:
rtc::VideoSourceInterface<VideoFrame>* source() override { return &source_; }
diff --git a/pc/test/mock_channel_interface.h b/pc/test/mock_channel_interface.h
index 255bd2fcee..2df3baee47 100644
--- a/pc/test/mock_channel_interface.h
+++ b/pc/test/mock_channel_interface.h
@@ -24,25 +24,40 @@ namespace cricket {
// implementation of BaseChannel.
class MockChannelInterface : public cricket::ChannelInterface {
public:
- MOCK_CONST_METHOD0(media_type, cricket::MediaType());
- MOCK_CONST_METHOD0(media_channel, MediaChannel*());
- MOCK_CONST_METHOD0(transport_name, const std::string&());
- MOCK_CONST_METHOD0(content_name, const std::string&());
- MOCK_CONST_METHOD0(enabled, bool());
- MOCK_METHOD1(Enable, bool(bool));
- MOCK_METHOD0(SignalFirstPacketReceived,
- sigslot::signal1<ChannelInterface*>&());
- MOCK_METHOD3(SetLocalContent,
- bool(const cricket::MediaContentDescription*,
- webrtc::SdpType,
- std::string*));
- MOCK_METHOD3(SetRemoteContent,
- bool(const cricket::MediaContentDescription*,
- webrtc::SdpType,
- std::string*));
- MOCK_CONST_METHOD0(local_streams, const std::vector<StreamParams>&());
- MOCK_CONST_METHOD0(remote_streams, const std::vector<StreamParams>&());
- MOCK_METHOD1(SetRtpTransport, bool(webrtc::RtpTransportInternal*));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(MediaChannel*, media_channel, (), (const, override));
+ MOCK_METHOD(const std::string&, transport_name, (), (const, override));
+ MOCK_METHOD(const std::string&, content_name, (), (const, override));
+ MOCK_METHOD(bool, enabled, (), (const, override));
+ MOCK_METHOD(bool, Enable, (bool), (override));
+ MOCK_METHOD(sigslot::signal1<ChannelInterface*>&,
+ SignalFirstPacketReceived,
+ (),
+ (override));
+ MOCK_METHOD(bool,
+ SetLocalContent,
+ (const cricket::MediaContentDescription*,
+ webrtc::SdpType,
+ std::string*),
+ (override));
+ MOCK_METHOD(bool,
+ SetRemoteContent,
+ (const cricket::MediaContentDescription*,
+ webrtc::SdpType,
+ std::string*),
+ (override));
+ MOCK_METHOD(const std::vector<StreamParams>&,
+ local_streams,
+ (),
+ (const, override));
+ MOCK_METHOD(const std::vector<StreamParams>&,
+ remote_streams,
+ (),
+ (const, override));
+ MOCK_METHOD(bool,
+ SetRtpTransport,
+ (webrtc::RtpTransportInternal*),
+ (override));
};
} // namespace cricket
diff --git a/pc/test/mock_data_channel.h b/pc/test/mock_data_channel.h
index 3385ec2f75..ab4b0073da 100644
--- a/pc/test/mock_data_channel.h
+++ b/pc/test/mock_data_channel.h
@@ -13,24 +13,39 @@
#include <string>
-#include "pc/data_channel.h"
+#include "pc/sctp_data_channel.h"
#include "test/gmock.h"
namespace webrtc {
-class MockDataChannel : public rtc::RefCountedObject<DataChannel> {
+class MockSctpDataChannel : public rtc::RefCountedObject<SctpDataChannel> {
public:
- MockDataChannel(int id, DataState state)
- : MockDataChannel(id, "MockDataChannel", state, "udp", 0, 0, 0, 0) {}
- MockDataChannel(int id,
- const std::string& label,
- DataState state,
- const std::string& protocol,
- uint32_t messages_sent,
- uint64_t bytes_sent,
- uint32_t messages_received,
- uint64_t bytes_received)
- : rtc::RefCountedObject<DataChannel>(nullptr, cricket::DCT_NONE, label) {
+ MockSctpDataChannel(int id, DataState state)
+ : MockSctpDataChannel(id,
+ "MockSctpDataChannel",
+ state,
+ "udp",
+ 0,
+ 0,
+ 0,
+ 0) {}
+ MockSctpDataChannel(
+ int id,
+ const std::string& label,
+ DataState state,
+ const std::string& protocol,
+ uint32_t messages_sent,
+ uint64_t bytes_sent,
+ uint32_t messages_received,
+ uint64_t bytes_received,
+ const InternalDataChannelInit& config = InternalDataChannelInit(),
+ rtc::Thread* signaling_thread = rtc::Thread::Current(),
+ rtc::Thread* network_thread = rtc::Thread::Current())
+ : rtc::RefCountedObject<SctpDataChannel>(config,
+ nullptr,
+ label,
+ signaling_thread,
+ network_thread) {
EXPECT_CALL(*this, id()).WillRepeatedly(::testing::Return(id));
EXPECT_CALL(*this, state()).WillRepeatedly(::testing::Return(state));
EXPECT_CALL(*this, protocol()).WillRepeatedly(::testing::Return(protocol));
@@ -43,13 +58,13 @@ class MockDataChannel : public rtc::RefCountedObject<DataChannel> {
EXPECT_CALL(*this, bytes_received())
.WillRepeatedly(::testing::Return(bytes_received));
}
- MOCK_CONST_METHOD0(id, int());
- MOCK_CONST_METHOD0(state, DataState());
- MOCK_CONST_METHOD0(protocol, std::string());
- MOCK_CONST_METHOD0(messages_sent, uint32_t());
- MOCK_CONST_METHOD0(bytes_sent, uint64_t());
- MOCK_CONST_METHOD0(messages_received, uint32_t());
- MOCK_CONST_METHOD0(bytes_received, uint64_t());
+ MOCK_METHOD(int, id, (), (const, override));
+ MOCK_METHOD(DataState, state, (), (const, override));
+ MOCK_METHOD(std::string, protocol, (), (const, override));
+ MOCK_METHOD(uint32_t, messages_sent, (), (const, override));
+ MOCK_METHOD(uint64_t, bytes_sent, (), (const, override));
+ MOCK_METHOD(uint32_t, messages_received, (), (const, override));
+ MOCK_METHOD(uint64_t, bytes_received, (), (const, override));
};
} // namespace webrtc
diff --git a/pc/test/mock_delayable.h b/pc/test/mock_delayable.h
index 548f9f8c0a..bef07c1970 100644
--- a/pc/test/mock_delayable.h
+++ b/pc/test/mock_delayable.h
@@ -21,9 +21,14 @@ namespace webrtc {
class MockDelayable : public cricket::Delayable {
public:
- MOCK_METHOD2(SetBaseMinimumPlayoutDelayMs, bool(uint32_t ssrc, int delay_ms));
- MOCK_CONST_METHOD1(GetBaseMinimumPlayoutDelayMs,
- absl::optional<int>(uint32_t ssrc));
+ MOCK_METHOD(bool,
+ SetBaseMinimumPlayoutDelayMs,
+ (uint32_t ssrc, int delay_ms),
+ (override));
+ MOCK_METHOD(absl::optional<int>,
+ GetBaseMinimumPlayoutDelayMs,
+ (uint32_t ssrc),
+ (const, override));
};
} // namespace webrtc
diff --git a/pc/test/mock_rtp_receiver_internal.h b/pc/test/mock_rtp_receiver_internal.h
index ffe78b5230..779dcdcf08 100644
--- a/pc/test/mock_rtp_receiver_internal.h
+++ b/pc/test/mock_rtp_receiver_internal.h
@@ -24,37 +24,54 @@ namespace webrtc {
class MockRtpReceiverInternal : public RtpReceiverInternal {
public:
// RtpReceiverInterface methods.
- MOCK_METHOD1(SetTrack, void(MediaStreamTrackInterface*));
- MOCK_CONST_METHOD0(track, rtc::scoped_refptr<MediaStreamTrackInterface>());
- MOCK_CONST_METHOD0(dtls_transport,
- rtc::scoped_refptr<DtlsTransportInterface>());
- MOCK_CONST_METHOD0(stream_ids, std::vector<std::string>());
- MOCK_CONST_METHOD0(streams,
- std::vector<rtc::scoped_refptr<MediaStreamInterface>>());
- MOCK_CONST_METHOD0(media_type, cricket::MediaType());
- MOCK_CONST_METHOD0(id, std::string());
- MOCK_CONST_METHOD0(GetParameters, RtpParameters());
- MOCK_METHOD1(SetObserver, void(RtpReceiverObserverInterface*));
- MOCK_METHOD1(SetJitterBufferMinimumDelay, void(absl::optional<double>));
- MOCK_CONST_METHOD0(GetSources, std::vector<RtpSource>());
- MOCK_METHOD1(SetFrameDecryptor,
- void(rtc::scoped_refptr<FrameDecryptorInterface>));
- MOCK_CONST_METHOD0(GetFrameDecryptor,
- rtc::scoped_refptr<FrameDecryptorInterface>());
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamTrackInterface>,
+ track,
+ (),
+ (const, override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ dtls_transport,
+ (),
+ (const, override));
+ MOCK_METHOD(std::vector<std::string>, stream_ids, (), (const, override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<MediaStreamInterface>>,
+ streams,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(std::string, id, (), (const, override));
+ MOCK_METHOD(RtpParameters, GetParameters, (), (const, override));
+ MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override));
+ MOCK_METHOD(void,
+ SetJitterBufferMinimumDelay,
+ (absl::optional<double>),
+ (override));
+ MOCK_METHOD(std::vector<RtpSource>, GetSources, (), (const, override));
+ MOCK_METHOD(void,
+ SetFrameDecryptor,
+ (rtc::scoped_refptr<FrameDecryptorInterface>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<FrameDecryptorInterface>,
+ GetFrameDecryptor,
+ (),
+ (const, override));
// RtpReceiverInternal methods.
- MOCK_METHOD0(Stop, void());
- MOCK_METHOD1(SetMediaChannel, void(cricket::MediaChannel*));
- MOCK_METHOD1(SetupMediaChannel, void(uint32_t));
- MOCK_METHOD0(SetupUnsignaledMediaChannel, void());
- MOCK_CONST_METHOD0(ssrc, uint32_t());
- MOCK_METHOD0(NotifyFirstPacketReceived, void());
- MOCK_METHOD1(set_stream_ids, void(std::vector<std::string>));
- MOCK_METHOD1(set_transport, void(rtc::scoped_refptr<DtlsTransportInterface>));
- MOCK_METHOD1(
- SetStreams,
- void(const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&));
- MOCK_CONST_METHOD0(AttachmentId, int());
+ MOCK_METHOD(void, Stop, (), (override));
+ MOCK_METHOD(void, SetMediaChannel, (cricket::MediaChannel*), (override));
+ MOCK_METHOD(void, SetupMediaChannel, (uint32_t), (override));
+ MOCK_METHOD(void, SetupUnsignaledMediaChannel, (), (override));
+ MOCK_METHOD(uint32_t, ssrc, (), (const, override));
+ MOCK_METHOD(void, NotifyFirstPacketReceived, (), (override));
+ MOCK_METHOD(void, set_stream_ids, (std::vector<std::string>), (override));
+ MOCK_METHOD(void,
+ set_transport,
+ (rtc::scoped_refptr<DtlsTransportInterface>),
+ (override));
+ MOCK_METHOD(void,
+ SetStreams,
+ (const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&),
+ (override));
+ MOCK_METHOD(int, AttachmentId, (), (const, override));
};
} // namespace webrtc
diff --git a/pc/test/mock_rtp_sender_internal.h b/pc/test/mock_rtp_sender_internal.h
index 2cf0173bd1..1a31c5dac6 100644
--- a/pc/test/mock_rtp_sender_internal.h
+++ b/pc/test/mock_rtp_sender_internal.h
@@ -23,37 +23,65 @@ namespace webrtc {
class MockRtpSenderInternal : public RtpSenderInternal {
public:
// RtpSenderInterface methods.
- MOCK_METHOD1(SetTrack, bool(MediaStreamTrackInterface*));
- MOCK_CONST_METHOD0(track, rtc::scoped_refptr<MediaStreamTrackInterface>());
- MOCK_CONST_METHOD0(ssrc, uint32_t());
- MOCK_CONST_METHOD0(dtls_transport,
- rtc::scoped_refptr<DtlsTransportInterface>());
- MOCK_CONST_METHOD0(media_type, cricket::MediaType());
- MOCK_CONST_METHOD0(id, std::string());
- MOCK_CONST_METHOD0(stream_ids, std::vector<std::string>());
- MOCK_CONST_METHOD0(init_send_encodings, std::vector<RtpEncodingParameters>());
- MOCK_METHOD1(set_transport, void(rtc::scoped_refptr<DtlsTransportInterface>));
- MOCK_CONST_METHOD0(GetParameters, RtpParameters());
- MOCK_CONST_METHOD0(GetParametersInternal, RtpParameters());
- MOCK_METHOD1(SetParameters, RTCError(const RtpParameters&));
- MOCK_METHOD1(SetParametersInternal, RTCError(const RtpParameters&));
- MOCK_CONST_METHOD0(GetDtmfSender, rtc::scoped_refptr<DtmfSenderInterface>());
- MOCK_METHOD1(SetFrameEncryptor,
- void(rtc::scoped_refptr<FrameEncryptorInterface>));
- MOCK_CONST_METHOD0(GetFrameEncryptor,
- rtc::scoped_refptr<FrameEncryptorInterface>());
+ MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override));
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamTrackInterface>,
+ track,
+ (),
+ (const, override));
+ MOCK_METHOD(uint32_t, ssrc, (), (const, override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ dtls_transport,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(std::string, id, (), (const, override));
+ MOCK_METHOD(std::vector<std::string>, stream_ids, (), (const, override));
+ MOCK_METHOD(std::vector<RtpEncodingParameters>,
+ init_send_encodings,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ set_transport,
+ (rtc::scoped_refptr<DtlsTransportInterface>),
+ (override));
+ MOCK_METHOD(RtpParameters, GetParameters, (), (const, override));
+ MOCK_METHOD(RtpParameters, GetParametersInternal, (), (const, override));
+ MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override));
+ MOCK_METHOD(RTCError,
+ SetParametersInternal,
+ (const RtpParameters&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<DtmfSenderInterface>,
+ GetDtmfSender,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ SetFrameEncryptor,
+ (rtc::scoped_refptr<FrameEncryptorInterface>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<FrameEncryptorInterface>,
+ GetFrameEncryptor,
+ (),
+ (const, override));
// RtpSenderInternal methods.
- MOCK_METHOD1(SetMediaChannel, void(cricket::MediaChannel*));
- MOCK_METHOD1(SetSsrc, void(uint32_t));
- MOCK_METHOD1(set_stream_ids, void(const std::vector<std::string>&));
- MOCK_METHOD1(SetStreams, void(const std::vector<std::string>&));
- MOCK_METHOD1(set_init_send_encodings,
- void(const std::vector<RtpEncodingParameters>&));
- MOCK_METHOD0(Stop, void());
- MOCK_CONST_METHOD0(AttachmentId, int());
- MOCK_METHOD1(DisableEncodingLayers,
- RTCError(const std::vector<std::string>&));
+ MOCK_METHOD(void, SetMediaChannel, (cricket::MediaChannel*), (override));
+ MOCK_METHOD(void, SetSsrc, (uint32_t), (override));
+ MOCK_METHOD(void,
+ set_stream_ids,
+ (const std::vector<std::string>&),
+ (override));
+ MOCK_METHOD(void, SetStreams, (const std::vector<std::string>&), (override));
+ MOCK_METHOD(void,
+ set_init_send_encodings,
+ (const std::vector<RtpEncodingParameters>&),
+ (override));
+ MOCK_METHOD(void, Stop, (), (override));
+ MOCK_METHOD(int, AttachmentId, (), (const, override));
+ MOCK_METHOD(RTCError,
+ DisableEncodingLayers,
+ (const std::vector<std::string>&),
+ (override));
};
} // namespace webrtc
diff --git a/pc/test/peer_connection_test_wrapper.cc b/pc/test/peer_connection_test_wrapper.cc
index 4f0d72e667..946f459f3b 100644
--- a/pc/test/peer_connection_test_wrapper.cc
+++ b/pc/test/peer_connection_test_wrapper.cc
@@ -80,7 +80,8 @@ PeerConnectionTestWrapper::PeerConnectionTestWrapper(
rtc::Thread* worker_thread)
: name_(name),
network_thread_(network_thread),
- worker_thread_(worker_thread) {
+ worker_thread_(worker_thread),
+ pending_negotiation_(false) {
pc_thread_checker_.Detach();
}
@@ -135,6 +136,17 @@ PeerConnectionTestWrapper::CreateDataChannel(
return peer_connection_->CreateDataChannel(label, &init);
}
+void PeerConnectionTestWrapper::WaitForNegotiation() {
+ EXPECT_TRUE_WAIT(!pending_negotiation_, kMaxWait);
+}
+
+void PeerConnectionTestWrapper::OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) {
+ if (new_state == webrtc::PeerConnectionInterface::SignalingState::kStable) {
+ pending_negotiation_ = false;
+ }
+}
+
void PeerConnectionTestWrapper::OnAddTrack(
rtc::scoped_refptr<RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
@@ -182,6 +194,7 @@ void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) {
void PeerConnectionTestWrapper::CreateOffer(
const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) {
RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": CreateOffer.";
+ pending_negotiation_ = true;
peer_connection_->CreateOffer(this, options);
}
@@ -189,6 +202,7 @@ void PeerConnectionTestWrapper::CreateAnswer(
const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) {
RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
<< ": CreateAnswer.";
+ pending_negotiation_ = true;
peer_connection_->CreateAnswer(this, options);
}
diff --git a/pc/test/peer_connection_test_wrapper.h b/pc/test/peer_connection_test_wrapper.h
index 2dc88e9309..92599b78ab 100644
--- a/pc/test/peer_connection_test_wrapper.h
+++ b/pc/test/peer_connection_test_wrapper.h
@@ -49,15 +49,21 @@ class PeerConnectionTestWrapper
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory);
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory()
+ const {
+ return peer_connection_factory_;
+ }
webrtc::PeerConnectionInterface* pc() { return peer_connection_.get(); }
rtc::scoped_refptr<webrtc::DataChannelInterface> CreateDataChannel(
const std::string& label,
const webrtc::DataChannelInit& init);
+ void WaitForNegotiation();
+
// Implements PeerConnectionObserver.
void OnSignalingChange(
- webrtc::PeerConnectionInterface::SignalingState new_state) override {}
+ webrtc::PeerConnectionInterface::SignalingState new_state) override;
void OnAddTrack(
rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface>>&
@@ -121,6 +127,7 @@ class PeerConnectionTestWrapper
rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
std::unique_ptr<webrtc::FakeVideoTrackRenderer> renderer_;
int num_get_user_media_calls_ = 0;
+ bool pending_negotiation_;
};
#endif // PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_
diff --git a/pc/track_media_info_map.cc b/pc/track_media_info_map.cc
index ca923a030d..b3ec68bb27 100644
--- a/pc/track_media_info_map.cc
+++ b/pc/track_media_info_map.cc
@@ -14,6 +14,8 @@
#include <string>
#include <utility>
+#include "rtc_base/thread.h"
+
namespace webrtc {
namespace {
@@ -43,20 +45,12 @@ void GetAudioAndVideoTrackBySsrc(
RTC_DCHECK(local_video_track_by_ssrc->empty());
RTC_DCHECK(remote_audio_track_by_ssrc->empty());
RTC_DCHECK(remote_video_track_by_ssrc->empty());
- // TODO(hbos): RTP senders/receivers uses a proxy to the signaling thread, and
- // our sender/receiver implementations invokes on the worker thread. (This
- // means one thread jump if on signaling thread and two thread jumps if on any
- // other threads). Is there a way to avoid thread jump(s) on a per
- // sender/receiver, per method basis?
for (const auto& rtp_sender : rtp_senders) {
cricket::MediaType media_type = rtp_sender->media_type();
MediaStreamTrackInterface* track = rtp_sender->track();
if (!track) {
continue;
}
- RTC_DCHECK_EQ(track->kind(), media_type == cricket::MEDIA_TYPE_AUDIO
- ? MediaStreamTrackInterface::kAudioKind
- : MediaStreamTrackInterface::kVideoKind);
// TODO(deadbeef): |ssrc| should be removed in favor of |GetParameters|.
uint32_t ssrc = rtp_sender->ssrc();
if (ssrc != 0) {
@@ -77,9 +71,6 @@ void GetAudioAndVideoTrackBySsrc(
cricket::MediaType media_type = rtp_receiver->media_type();
MediaStreamTrackInterface* track = rtp_receiver->track();
RTC_DCHECK(track);
- RTC_DCHECK_EQ(track->kind(), media_type == cricket::MEDIA_TYPE_AUDIO
- ? MediaStreamTrackInterface::kAudioKind
- : MediaStreamTrackInterface::kVideoKind);
RtpParameters params = rtp_receiver->GetParameters();
for (const RtpEncodingParameters& encoding : params.encodings) {
if (!encoding.ssrc) {
@@ -115,6 +106,8 @@ TrackMediaInfoMap::TrackMediaInfoMap(
const std::vector<rtc::scoped_refptr<RtpReceiverInternal>>& rtp_receivers)
: voice_media_info_(std::move(voice_media_info)),
video_media_info_(std::move(video_media_info)) {
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
std::map<uint32_t, AudioTrackInterface*> local_audio_track_by_ssrc;
std::map<uint32_t, VideoTrackInterface*> local_video_track_by_ssrc;
std::map<uint32_t, AudioTrackInterface*> remote_audio_track_by_ssrc;
diff --git a/pc/track_media_info_map_unittest.cc b/pc/track_media_info_map_unittest.cc
index c487ab8f81..0cb1e0e277 100644
--- a/pc/track_media_info_map_unittest.cc
+++ b/pc/track_media_info_map_unittest.cc
@@ -83,19 +83,21 @@ rtc::scoped_refptr<MockRtpReceiverInternal> CreateMockRtpReceiver(
class TrackMediaInfoMapTest : public ::testing::Test {
public:
- TrackMediaInfoMapTest()
+ TrackMediaInfoMapTest() : TrackMediaInfoMapTest(true) {}
+
+ explicit TrackMediaInfoMapTest(bool use_current_thread)
: voice_media_info_(new cricket::VoiceMediaInfo()),
video_media_info_(new cricket::VideoMediaInfo()),
local_audio_track_(AudioTrack::Create("LocalAudioTrack", nullptr)),
remote_audio_track_(AudioTrack::Create("RemoteAudioTrack", nullptr)),
- local_video_track_(
- VideoTrack::Create("LocalVideoTrack",
- FakeVideoTrackSource::Create(false),
- rtc::Thread::Current())),
- remote_video_track_(
- VideoTrack::Create("RemoteVideoTrack",
- FakeVideoTrackSource::Create(false),
- rtc::Thread::Current())) {}
+ local_video_track_(VideoTrack::Create(
+ "LocalVideoTrack",
+ FakeVideoTrackSource::Create(false),
+ use_current_thread ? rtc::Thread::Current() : nullptr)),
+ remote_video_track_(VideoTrack::Create(
+ "RemoteVideoTrack",
+ FakeVideoTrackSource::Create(false),
+ use_current_thread ? rtc::Thread::Current() : nullptr)) {}
~TrackMediaInfoMapTest() {
// If we have a map the ownership has been passed to the map, only delete if
@@ -417,7 +419,10 @@ TEST_F(TrackMediaInfoMapTest, GetAttachmentIdByTrack) {
// base/test/gtest_util.h.
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-class TrackMediaInfoMapDeathTest : public TrackMediaInfoMapTest {};
+class TrackMediaInfoMapDeathTest : public TrackMediaInfoMapTest {
+ public:
+ TrackMediaInfoMapDeathTest() : TrackMediaInfoMapTest(false) {}
+};
TEST_F(TrackMediaInfoMapDeathTest, MultipleOneSsrcReceiversPerTrack) {
AddRtpReceiverWithSsrcs({1}, remote_audio_track_);
diff --git a/pc/video_rtp_receiver_unittest.cc b/pc/video_rtp_receiver_unittest.cc
index c4b7b8205d..b3eb6e6e35 100644
--- a/pc/video_rtp_receiver_unittest.cc
+++ b/pc/video_rtp_receiver_unittest.cc
@@ -32,16 +32,20 @@ class VideoRtpReceiverTest : public testing::Test {
MockVideoMediaChannel(cricket::FakeVideoEngine* engine,
const cricket::VideoOptions& options)
: FakeVideoMediaChannel(engine, options) {}
- MOCK_METHOD2(SetRecordableEncodedFrameCallback,
- void(uint32_t,
- std::function<void(const RecordableEncodedFrame&)>));
- MOCK_METHOD1(ClearRecordableEncodedFrameCallback, void(uint32_t));
- MOCK_METHOD1(GenerateKeyFrame, void(uint32_t));
+ MOCK_METHOD(void,
+ SetRecordableEncodedFrameCallback,
+ (uint32_t, std::function<void(const RecordableEncodedFrame&)>),
+ (override));
+ MOCK_METHOD(void,
+ ClearRecordableEncodedFrameCallback,
+ (uint32_t),
+ (override));
+ MOCK_METHOD(void, GenerateKeyFrame, (uint32_t), (override));
};
class MockVideoSink : public rtc::VideoSinkInterface<RecordableEncodedFrame> {
public:
- MOCK_METHOD1(OnFrame, void(const RecordableEncodedFrame&));
+ MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override));
};
VideoRtpReceiverTest()
diff --git a/pc/video_rtp_track_source.cc b/pc/video_rtp_track_source.cc
index 2f15c42b4d..f96db962b1 100644
--- a/pc/video_rtp_track_source.cc
+++ b/pc/video_rtp_track_source.cc
@@ -31,7 +31,7 @@ rtc::VideoSinkInterface<VideoFrame>* VideoRtpTrackSource::sink() {
void VideoRtpTrackSource::BroadcastRecordableEncodedFrame(
const RecordableEncodedFrame& frame) const {
- rtc::CritScope cs(&mu_);
+ MutexLock lock(&mu_);
for (rtc::VideoSinkInterface<RecordableEncodedFrame>* sink : encoded_sinks_) {
sink->OnFrame(frame);
}
@@ -54,7 +54,7 @@ void VideoRtpTrackSource::AddEncodedSink(
RTC_DCHECK(sink);
size_t size = 0;
{
- rtc::CritScope cs(&mu_);
+ MutexLock lock(&mu_);
RTC_DCHECK(std::find(encoded_sinks_.begin(), encoded_sinks_.end(), sink) ==
encoded_sinks_.end());
encoded_sinks_.push_back(sink);
@@ -70,7 +70,7 @@ void VideoRtpTrackSource::RemoveEncodedSink(
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
size_t size = 0;
{
- rtc::CritScope cs(&mu_);
+ MutexLock lock(&mu_);
auto it = std::find(encoded_sinks_.begin(), encoded_sinks_.end(), sink);
if (it != encoded_sinks_.end()) {
encoded_sinks_.erase(it);
diff --git a/pc/video_rtp_track_source.h b/pc/video_rtp_track_source.h
index e62cda70c3..b887849312 100644
--- a/pc/video_rtp_track_source.h
+++ b/pc/video_rtp_track_source.h
@@ -16,7 +16,7 @@
#include "media/base/video_broadcaster.h"
#include "pc/video_track_source.h"
#include "rtc_base/callback.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -72,7 +72,7 @@ class VideoRtpTrackSource : public VideoTrackSource {
// It might be better if the decoder can handle multiple sinks and consider
// the VideoSinkWants.
rtc::VideoBroadcaster broadcaster_;
- rtc::CriticalSection mu_;
+ mutable Mutex mu_;
std::vector<rtc::VideoSinkInterface<RecordableEncodedFrame>*> encoded_sinks_
RTC_GUARDED_BY(mu_);
Callback* callback_ RTC_GUARDED_BY(worker_sequence_checker_);
diff --git a/pc/video_rtp_track_source_unittest.cc b/pc/video_rtp_track_source_unittest.cc
index ed26db3654..ea1b4cacf8 100644
--- a/pc/video_rtp_track_source_unittest.cc
+++ b/pc/video_rtp_track_source_unittest.cc
@@ -19,13 +19,13 @@ namespace {
class MockCallback : public VideoRtpTrackSource::Callback {
public:
- MOCK_METHOD0(OnGenerateKeyFrame, void());
- MOCK_METHOD1(OnEncodedSinkEnabled, void(bool));
+ MOCK_METHOD(void, OnGenerateKeyFrame, (), (override));
+ MOCK_METHOD(void, OnEncodedSinkEnabled, (bool), (override));
};
class MockSink : public rtc::VideoSinkInterface<RecordableEncodedFrame> {
public:
- MOCK_METHOD1(OnFrame, void(const RecordableEncodedFrame&));
+ MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override));
};
rtc::scoped_refptr<VideoRtpTrackSource> MakeSource(
diff --git a/pc/webrtc_sdp.cc b/pc/webrtc_sdp.cc
index f77327faf1..af584791be 100644
--- a/pc/webrtc_sdp.cc
+++ b/pc/webrtc_sdp.cc
@@ -229,12 +229,6 @@ static const char kApplicationSpecificMaximum[] = "AS";
static const char kDefaultSctpmapProtocol[] = "webrtc-datachannel";
-// This is a non-standardized setting for plugin transports.
-static const char kOpaqueTransportParametersLine[] = "x-opaque";
-
-// This is a non-standardized setting for plugin transports.
-static const char kAltProtocolLine[] = "x-alt-protocol";
-
// RTP payload type is in the 0-127 range. Use -1 to indicate "all" payload
// types.
const int kWildcardPayloadType = -1;
@@ -523,25 +517,6 @@ static void InitAttrLine(const std::string& attribute, rtc::StringBuilder* os) {
InitLine(kLineTypeAttributes, attribute, os);
}
-// Adds an x-otp SDP attribute line based on opaque transport parameters.
-static void AddOpaqueTransportLine(
- const cricket::OpaqueTransportParameters params,
- std::string* message) {
- rtc::StringBuilder os;
- InitAttrLine(kOpaqueTransportParametersLine, &os);
- os << kSdpDelimiterColon << params.protocol << kSdpDelimiterColon
- << rtc::Base64::Encode(params.parameters);
- AddLine(os.str(), message);
-}
-
-static void AddAltProtocolLine(const std::string& protocol,
- std::string* message) {
- rtc::StringBuilder os;
- InitAttrLine(kAltProtocolLine, &os);
- os << kSdpDelimiterColon << protocol;
- AddLine(os.str(), message);
-}
-
// Writes a SDP attribute line based on |attribute| and |value| to |message|.
static void AddAttributeLine(const std::string& attribute,
int value,
@@ -1532,15 +1507,6 @@ void BuildMediaDescription(const ContentInfo* content_info,
AddLine(os.str(), message);
}
}
-
- if (transport_info->description.opaque_parameters) {
- AddOpaqueTransportLine(*transport_info->description.opaque_parameters,
- message);
- }
- }
-
- if (media_desc->alt_protocol()) {
- AddAltProtocolLine(*media_desc->alt_protocol(), message);
}
// RFC 3388
@@ -1796,8 +1762,13 @@ void WriteRtcpFbHeader(int payload_type, rtc::StringBuilder* os) {
void WriteFmtpParameter(const std::string& parameter_name,
const std::string& parameter_value,
rtc::StringBuilder* os) {
- // fmtp parameters: |parameter_name|=|parameter_value|
- *os << parameter_name << kSdpDelimiterEqual << parameter_value;
+ if (parameter_name == "") {
+ // RFC 2198 and RFC 4733 don't use key-value pairs.
+ *os << parameter_value;
+ } else {
+ // fmtp parameters: |parameter_name|=|parameter_value|
+ *os << parameter_name << kSdpDelimiterEqual << parameter_value;
+ }
}
bool IsFmtpParam(const std::string& name) {
@@ -2105,32 +2076,6 @@ bool ParseConnectionData(const std::string& line,
return true;
}
-bool ParseOpaqueTransportLine(const std::string& line,
- std::string* protocol,
- std::string* transport_parameters,
- SdpParseError* error) {
- std::string value;
- if (!GetValue(line, kOpaqueTransportParametersLine, &value, error)) {
- return false;
- }
- std::string tmp_parameters;
- if (!rtc::tokenize_first(value, kSdpDelimiterColonChar, protocol,
- &tmp_parameters)) {
- return ParseFailedGetValue(line, kOpaqueTransportParametersLine, error);
- }
- if (!rtc::Base64::Decode(tmp_parameters, rtc::Base64::DO_STRICT,
- transport_parameters, nullptr)) {
- return ParseFailedGetValue(line, kOpaqueTransportParametersLine, error);
- }
- return true;
-}
-
-bool ParseAltProtocolLine(const std::string& line,
- std::string* protocol,
- SdpParseError* error) {
- return GetValue(line, kAltProtocolLine, protocol, error);
-}
-
bool ParseSessionDescription(const std::string& message,
size_t* pos,
std::string* session_id,
@@ -2685,18 +2630,12 @@ bool ParseMediaDescription(
if (!rtc::FromString<int>(fields[1], &port) || !IsValidPort(port)) {
return ParseFailed(line, "The port number is invalid", error);
}
- std::string protocol = fields[2];
+ const std::string& protocol = fields[2];
// <fmt>
std::vector<int> payload_types;
if (cricket::IsRtpProtocol(protocol)) {
for (size_t j = 3; j < fields.size(); ++j) {
- // TODO(wu): Remove when below bug is fixed.
- // https://bugzilla.mozilla.org/show_bug.cgi?id=996329
- if (fields[j].empty() && j == fields.size() - 1) {
- continue;
- }
-
int pl = 0;
if (!GetPayloadTypeFromString(line, fields[j], &pl, error)) {
return false;
@@ -2716,17 +2655,18 @@ bool ParseMediaDescription(
std::string content_name;
bool bundle_only = false;
int section_msid_signaling = 0;
- if (HasAttribute(line, kMediaTypeVideo)) {
+ const std::string& media_type = fields[0];
+ if (media_type == kMediaTypeVideo) {
content = ParseContentDescription<VideoContentDescription>(
message, cricket::MEDIA_TYPE_VIDEO, mline_index, protocol,
payload_types, pos, &content_name, &bundle_only,
&section_msid_signaling, &transport, candidates, error);
- } else if (HasAttribute(line, kMediaTypeAudio)) {
+ } else if (media_type == kMediaTypeAudio) {
content = ParseContentDescription<AudioContentDescription>(
message, cricket::MEDIA_TYPE_AUDIO, mline_index, protocol,
payload_types, pos, &content_name, &bundle_only,
&section_msid_signaling, &transport, candidates, error);
- } else if (HasAttribute(line, kMediaTypeData)) {
+ } else if (media_type == kMediaTypeData) {
if (cricket::IsDtlsSctp(protocol)) {
// The draft-03 format is:
// m=application <port> DTLS/SCTP <sctp-port>...
@@ -3137,19 +3077,6 @@ bool ParseContent(const std::string& message,
if (!ParseIceOptions(line, &transport->transport_options, error)) {
return false;
}
- } else if (HasAttribute(line, kOpaqueTransportParametersLine)) {
- transport->opaque_parameters = cricket::OpaqueTransportParameters();
- if (!ParseOpaqueTransportLine(
- line, &transport->opaque_parameters->protocol,
- &transport->opaque_parameters->parameters, error)) {
- return false;
- }
- } else if (HasAttribute(line, kAltProtocolLine)) {
- std::string alt_protocol;
- if (!ParseAltProtocolLine(line, &alt_protocol, error)) {
- return false;
- }
- media_desc->set_alt_protocol(alt_protocol);
} else if (HasAttribute(line, kAttributeFmtp)) {
if (!ParseFmtpAttributes(line, media_type, media_desc, error)) {
return false;
@@ -3681,8 +3608,10 @@ bool ParseFmtpParam(const std::string& line,
std::string* value,
SdpParseError* error) {
if (!rtc::tokenize_first(line, kSdpDelimiterEqualChar, parameter, value)) {
- ParseFailed(line, "Unable to parse fmtp parameter. \'=\' missing.", error);
- return false;
+ // Support for non-key-value lines like RFC 2198 or RFC 4733.
+ *parameter = "";
+ *value = line;
+ return true;
}
// a=fmtp:<payload_type> <param1>=<value1>; <param2>=<value2>; ...
return true;
@@ -3700,7 +3629,7 @@ bool ParseFmtpAttributes(const std::string& line,
std::string line_payload;
std::string line_params;
- // RFC 5576
+ // https://tools.ietf.org/html/rfc4566#section-6
// a=fmtp:<format> <format specific parameters>
// At least two fields, whereas the second one is any of the optional
// parameters.
@@ -3729,17 +3658,15 @@ bool ParseFmtpAttributes(const std::string& line,
cricket::CodecParameterMap codec_params;
for (auto& iter : fields) {
- if (iter.find(kSdpDelimiterEqual) == std::string::npos) {
- // Only fmtps with equals are currently supported. Other fmtp types
- // should be ignored. Unknown fmtps do not constitute an error.
- continue;
- }
-
std::string name;
std::string value;
if (!ParseFmtpParam(rtc::string_trim(iter), &name, &value, error)) {
return false;
}
+ if (codec_params.find(name) != codec_params.end()) {
+ RTC_LOG(LS_INFO) << "Overwriting duplicate fmtp parameter with key \""
+ << name << "\".";
+ }
codec_params[name] = value;
}
diff --git a/pc/webrtc_sdp_unittest.cc b/pc/webrtc_sdp_unittest.cc
index a2ad4b8bdc..7b83c86ab1 100644
--- a/pc/webrtc_sdp_unittest.cc
+++ b/pc/webrtc_sdp_unittest.cc
@@ -1293,8 +1293,7 @@ class WebRtcSdpTest : public ::testing::Test {
"inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32",
"dummy_session_params"));
audio->set_protocol(cricket::kMediaProtocolSavpf);
- AudioCodec opus(111, "opus", 48000, 0, 2);
- audio->AddCodec(opus);
+ audio->AddCodec(AudioCodec(111, "opus", 48000, 0, 2));
audio->AddCodec(AudioCodec(103, "ISAC", 16000, 0, 1));
audio->AddCodec(AudioCodec(104, "ISAC", 32000, 0, 1));
return audio;
@@ -1528,8 +1527,6 @@ class WebRtcSdpTest : public ::testing::Test {
CompareSimulcastDescription(
c1.media_description()->simulcast_description(),
c2.media_description()->simulcast_description());
- EXPECT_EQ(c1.media_description()->alt_protocol(),
- c2.media_description()->alt_protocol());
}
// group
@@ -1584,8 +1581,6 @@ class WebRtcSdpTest : public ::testing::Test {
}
EXPECT_EQ(transport1.description.transport_options,
transport2.description.transport_options);
- EXPECT_EQ(transport1.description.opaque_parameters,
- transport2.description.opaque_parameters);
}
// global attributes
@@ -1679,23 +1674,6 @@ class WebRtcSdpTest : public ::testing::Test {
desc_.AddTransportInfo(transport_info);
}
- void AddOpaqueTransportParameters(const std::string& content_name,
- cricket::OpaqueTransportParameters params) {
- ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
- cricket::TransportInfo info = *(desc_.GetTransportInfoByName(content_name));
- desc_.RemoveTransportInfoByName(content_name);
- info.description.opaque_parameters = params;
- desc_.AddTransportInfo(info);
- }
-
- void AddAltProtocol(const std::string& content_name,
- const std::string& alt_protocol) {
- ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
- cricket::MediaContentDescription* description =
- desc_.GetContentDescriptionByName(content_name);
- description->set_alt_protocol(alt_protocol);
- }
-
void AddFingerprint() {
desc_.RemoveTransportInfoByName(kAudioContentName);
desc_.RemoveTransportInfoByName(kVideoContentName);
@@ -1955,13 +1933,14 @@ class WebRtcSdpTest : public ::testing::Test {
// description.
"a=msid-semantic: WMS\r\n"
// Pl type 111 preferred.
- "m=audio 9 RTP/SAVPF 111 104 103\r\n"
+ "m=audio 9 RTP/SAVPF 111 104 103 105\r\n"
// Pltype 111 listed before 103 and 104 in the map.
"a=rtpmap:111 opus/48000/2\r\n"
// Pltype 103 listed before 104.
"a=rtpmap:103 ISAC/16000\r\n"
"a=rtpmap:104 ISAC/32000\r\n"
- "a=fmtp:111 0-15,66,70\r\n"
+ "a=rtpmap:105 telephone-event/8000\r\n"
+ "a=fmtp:105 0-15,66,70\r\n"
"a=fmtp:111 ";
std::ostringstream os;
os << "minptime=" << params.min_ptime << "; stereo=" << params.stereo
@@ -2008,6 +1987,14 @@ class WebRtcSdpTest : public ::testing::Test {
VerifyCodecParameter(codec.params, "maxptime", params.max_ptime);
}
+ cricket::AudioCodec dtmf = acd->codecs()[3];
+ EXPECT_EQ("telephone-event", dtmf.name);
+ EXPECT_EQ(105, dtmf.id);
+ EXPECT_EQ(3u,
+ dtmf.params.size()); // ptime and max_ptime count as parameters.
+ EXPECT_EQ(dtmf.params.begin()->first, "");
+ EXPECT_EQ(dtmf.params.begin()->second, "0-15,66,70");
+
const VideoContentDescription* vcd =
GetFirstVideoContentDescription(jdesc_output->description());
ASSERT_TRUE(vcd);
@@ -2236,41 +2223,6 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithIceOptions) {
EXPECT_EQ(sdp_with_ice_options, message);
}
-TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithOpaqueTransportParams) {
- cricket::OpaqueTransportParameters params;
- params.protocol = "foo";
- params.parameters = "test64";
- AddOpaqueTransportParameters(kAudioContentName, params);
- AddOpaqueTransportParameters(kVideoContentName, params);
-
- ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
- jdesc_.session_version()));
- std::string message = webrtc::SdpSerialize(jdesc_);
-
- std::string sdp_with_transport_parameters = kSdpFullString;
- InjectAfter(kAttributeIcePwdVoice, "a=x-opaque:foo:dGVzdDY0\r\n",
- &sdp_with_transport_parameters);
- InjectAfter(kAttributeIcePwdVideo, "a=x-opaque:foo:dGVzdDY0\r\n",
- &sdp_with_transport_parameters);
- EXPECT_EQ(message, sdp_with_transport_parameters);
-}
-
-TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAltProtocol) {
- AddAltProtocol(kAudioContentName, "foo");
- AddAltProtocol(kVideoContentName, "bar");
-
- ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
- jdesc_.session_version()));
- std::string message = webrtc::SdpSerialize(jdesc_);
-
- std::string sdp_with_alt_protocol = kSdpFullString;
- InjectAfter(kAttributeIcePwdVoice, "a=x-alt-protocol:foo\r\n",
- &sdp_with_alt_protocol);
- InjectAfter(kAttributeIcePwdVideo, "a=x-alt-protocol:bar\r\n",
- &sdp_with_alt_protocol);
- EXPECT_EQ(message, sdp_with_alt_protocol);
-}
-
TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithRecvOnlyContent) {
EXPECT_TRUE(TestSerializeDirection(RtpTransceiverDirection::kRecvOnly));
}
@@ -2445,8 +2397,6 @@ TEST_F(WebRtcSdpTest, SerializeHostnameCandidate) {
EXPECT_EQ(std::string(kRawHostnameCandidate), message);
}
-// TODO(mallinath) : Enable this test once WebRTCSdp capable of parsing
-// RFC 6544.
TEST_F(WebRtcSdpTest, SerializeTcpCandidates) {
Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp",
rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority,
@@ -2685,48 +2635,6 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithIceOptions) {
EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_ice_options));
}
-TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithOpaqueTransportParams) {
- std::string sdp_with_transport_parameters = kSdpFullString;
- InjectAfter(kAttributeIcePwdVoice, "a=x-opaque:foo:dGVzdDY0\r\n",
- &sdp_with_transport_parameters);
- InjectAfter(kAttributeIcePwdVideo, "a=x-opaque:foo:dGVzdDY0\r\n",
- &sdp_with_transport_parameters);
-
- JsepSessionDescription jdesc_with_transport_parameters(kDummyType);
- EXPECT_TRUE(SdpDeserialize(sdp_with_transport_parameters,
- &jdesc_with_transport_parameters));
-
- cricket::OpaqueTransportParameters params;
- params.protocol = "foo";
- params.parameters = "test64";
-
- AddOpaqueTransportParameters(kAudioContentName, params);
- AddOpaqueTransportParameters(kVideoContentName, params);
-
- ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
- jdesc_.session_version()));
- EXPECT_TRUE(
- CompareSessionDescription(jdesc_, jdesc_with_transport_parameters));
-}
-
-TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithAltProtocol) {
- std::string sdp_with_alt_protocol = kSdpFullString;
- InjectAfter(kAttributeIcePwdVoice, "a=x-alt-protocol:foo\r\n",
- &sdp_with_alt_protocol);
- InjectAfter(kAttributeIcePwdVideo, "a=x-alt-protocol:bar\r\n",
- &sdp_with_alt_protocol);
-
- JsepSessionDescription jdesc_with_alt_protocol(kDummyType);
- EXPECT_TRUE(SdpDeserialize(sdp_with_alt_protocol, &jdesc_with_alt_protocol));
-
- AddAltProtocol(kAudioContentName, "foo");
- AddAltProtocol(kVideoContentName, "bar");
-
- ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
- jdesc_.session_version()));
- EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_alt_protocol));
-}
-
TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithUfragPwd) {
// Remove the original ice-ufrag and ice-pwd
JsepSessionDescription jdesc_with_ufrag_pwd(kDummyType);
@@ -3017,6 +2925,24 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsWithSctpColonPort) {
EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
}
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsButWrongMediaType) {
+ bool use_sctpmap = true;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+
+ std::string sdp = kSdpSessionString;
+ sdp += kSdpSctpDataChannelString;
+
+ const char needle[] = "m=application ";
+ sdp.replace(sdp.find(needle), strlen(needle), "m=application:bogus ");
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+
+ EXPECT_EQ(0u, jdesc_output.description()->contents().size());
+}
+
// Helper function to set the max-message-size parameter in the
// SCTP data codec.
void MutateJsepSctpMaxMessageSize(const SessionDescription& desc,
@@ -3365,6 +3291,7 @@ TEST_F(WebRtcSdpTest, DeserializeBrokenSdp) {
// Broken media description
ExpectParseFailure("m=audio", "c=IN IP4 74.125.224.39");
ExpectParseFailure("m=video", kSdpDestroyer);
+ ExpectParseFailure("m=", "c=IN IP4 74.125.224.39");
// Invalid lines
ExpectParseFailure("a=candidate", kSdpEmptyType);
@@ -3673,6 +3600,28 @@ TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithPTimeAndMaxPTime) {
EXPECT_EQ(sdp_with_fmtp, message);
}
+TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithTelephoneEvent) {
+ AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_);
+
+ cricket::AudioCodecs codecs = acd->codecs();
+ cricket::AudioCodec dtmf(105, "telephone-event", 8000, 0, 1);
+ dtmf.params[""] = "0-15";
+ codecs.push_back(dtmf);
+ acd->set_codecs(codecs);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_fmtp = kSdpFullString;
+ InjectAfter("m=audio 2345 RTP/SAVPF 111 103 104", " 105", &sdp_with_fmtp);
+ InjectAfter(
+ "a=rtpmap:104 ISAC/32000\r\n",
+ "a=rtpmap:105 telephone-event/8000\r\n" // No comma here. String merging!
+ "a=fmtp:105 0-15\r\n",
+ &sdp_with_fmtp);
+ EXPECT_EQ(sdp_with_fmtp, message);
+}
+
TEST_F(WebRtcSdpTest, SerializeVideoFmtp) {
VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_);
@@ -4716,3 +4665,23 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCname) {
jdesc_.session_version()));
EXPECT_TRUE(CompareSessionDescription(jdesc_, new_jdesc));
}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithUnsupportedMediaType) {
+ bool use_sctpmap = true;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+
+ std::string sdp = kSdpSessionString;
+ sdp +=
+ "m=bogus 9 RTP/SAVPF 0 8\r\n"
+ "c=IN IP4 0.0.0.0\r\n";
+ sdp +=
+ "m=audio/something 9 RTP/SAVPF 0 8\r\n"
+ "c=IN IP4 0.0.0.0\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+
+ EXPECT_EQ(0u, jdesc_output.description()->contents().size());
+}
diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn
index a61ede4ac9..73bca85efa 100644
--- a/rtc_base/BUILD.gn
+++ b/rtc_base/BUILD.gn
@@ -55,12 +55,13 @@ rtc_library("rtc_base_approved") {
":type_traits",
"../api:array_view",
"../api:scoped_refptr",
+ "synchronization:mutex",
"system:arch",
"system:rtc_export",
"system:unused",
"third_party/base64",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [] # no-presubmit-check TODO(webrtc:8603)
sources = [
@@ -154,6 +155,7 @@ rtc_library("platform_thread_types") {
"platform_thread_types.cc",
"platform_thread_types.h",
]
+ deps = [ ":macromagic" ]
}
rtc_source_set("refcount") {
@@ -168,15 +170,15 @@ rtc_source_set("refcount") {
rtc_library("criticalsection") {
sources = [
- "critical_section.cc",
- "critical_section.h",
+ "deprecated/recursive_critical_section.cc",
+ "deprecated/recursive_critical_section.h",
]
deps = [
":atomicops",
":checks",
":macromagic",
":platform_thread_types",
- "system:rtc_export",
+ "synchronization:yield",
"system:unused",
]
}
@@ -187,6 +189,7 @@ rtc_library("platform_thread") {
":rtc_task_queue_libevent",
":rtc_task_queue_win",
":rtc_task_queue_stdlib",
+ "synchronization:mutex",
"synchronization:sequence_checker",
]
sources = [
@@ -201,8 +204,8 @@ rtc_library("platform_thread") {
":rtc_event",
":thread_checker",
":timeutils",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("rtc_event") {
@@ -225,8 +228,8 @@ rtc_library("rtc_event") {
":checks",
"synchronization:yield_policy",
"system:warn_current_thread_is_deadlocked",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
@@ -240,6 +243,9 @@ rtc_library("logging") {
":platform_thread_types",
":stringutils",
":timeutils",
+ "synchronization:mutex",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/meta:type_traits",
"//third_party/abseil-cpp/absl/strings",
@@ -264,7 +270,7 @@ rtc_library("logging") {
deps += [ "system:inline" ]
if (is_mac) {
- libs += [ "Foundation.framework" ]
+ frameworks = [ "Foundation.framework" ]
}
# logging.h needs the deprecation header while downstream projects are
@@ -301,6 +307,8 @@ rtc_library("checks") {
":safe_compare",
"system:inline",
"system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/meta:type_traits",
"//third_party/abseil-cpp/absl/strings",
]
@@ -317,13 +325,14 @@ rtc_library("rate_limiter") {
deps = [
":rtc_base_approved",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
+ "synchronization:mutex",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("sanitizer") {
sources = [ "sanitizer.h" ]
- deps = [ "//third_party/abseil-cpp/absl/meta:type_traits" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/meta:type_traits" ]
}
rtc_source_set("bounded_inline_vector") {
@@ -398,6 +407,8 @@ rtc_library("stringutils") {
":macromagic",
":safe_minmax",
"../api:array_view",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -433,8 +444,8 @@ rtc_library("rtc_task_queue") {
"../api/task_queue",
"system:rtc_export",
"task_utils:to_queued_task",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_source_set("rtc_operations_chain") {
@@ -469,6 +480,9 @@ if (rtc_enable_libevent) {
":safe_conversions",
":timeutils",
"../api/task_queue",
+ "synchronization:mutex",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings",
]
@@ -489,9 +503,10 @@ if (is_mac || is_ios) {
":checks",
":logging",
"../api/task_queue",
+ "synchronization:mutex",
"system:gcd_helpers",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
}
@@ -512,8 +527,9 @@ if (is_win) {
":safe_conversions",
":timeutils",
"../api/task_queue",
- "//third_party/abseil-cpp/absl/strings",
+ "synchronization:mutex",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
}
@@ -532,8 +548,9 @@ rtc_library("rtc_task_queue_stdlib") {
":safe_conversions",
":timeutils",
"../api/task_queue",
- "//third_party/abseil-cpp/absl/strings",
+ "synchronization:mutex",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("weak_ptr") {
@@ -576,6 +593,8 @@ rtc_library("rtc_numerics") {
"../api/units:data_rate",
"../api/units:time_delta",
"../api/units:timestamp",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -760,6 +779,7 @@ rtc_library("rtc_base") {
deps = [
":checks",
":deprecation",
+ ":rtc_task_queue",
":stringutils",
"../api:array_view",
"../api:function_view",
@@ -767,12 +787,17 @@ rtc_library("rtc_base") {
"../api/task_queue",
"../system_wrappers:field_trial",
"network:sent_packet",
+ "synchronization:mutex",
+ "synchronization:sequence_checker",
"system:file_wrapper",
"system:inline",
"system:rtc_export",
+ "task_utils:pending_task_safety_flag",
"task_utils:to_queued_task",
"third_party/base64",
"third_party/sigslot",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
@@ -801,6 +826,8 @@ rtc_library("rtc_base") {
"crypt_string.h",
"data_rate_limiter.cc",
"data_rate_limiter.h",
+ "deprecated/signal_thread.cc",
+ "deprecated/signal_thread.h",
"dscp.h",
"file_rotating_stream.cc",
"file_rotating_stream.h",
@@ -853,7 +880,6 @@ rtc_library("rtc_base") {
"rtc_certificate.h",
"rtc_certificate_generator.cc",
"rtc_certificate_generator.h",
- "signal_thread.cc",
"signal_thread.h",
"sigslot_repeater.h",
"socket.cc",
@@ -942,7 +968,7 @@ rtc_library("rtc_base") {
}
if (is_ios) {
- libs += [
+ frameworks = [
"CFNetwork.framework",
"Foundation.framework",
"Security.framework",
@@ -1000,8 +1026,8 @@ rtc_library("gunit_helpers") {
":rtc_base_tests_utils",
":stringutils",
"../test:test_support",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("testclient") {
@@ -1017,6 +1043,7 @@ rtc_library("testclient") {
":rtc_base",
":rtc_base_tests_utils",
":timeutils",
+ "synchronization:mutex",
]
}
@@ -1065,7 +1092,10 @@ rtc_library("rtc_base_tests_utils") {
"../api/units:time_delta",
"../api/units:timestamp",
"memory:fifo_buffer",
+ "synchronization:mutex",
"third_party/sigslot",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
]
@@ -1087,8 +1117,8 @@ rtc_library("task_queue_for_test") {
"../api/task_queue",
"../api/task_queue:default_task_queue_factory",
"task_utils:to_queued_task",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
if (rtc_include_tests) {
@@ -1100,6 +1130,7 @@ if (rtc_include_tests) {
":rtc_base",
":rtc_base_tests_utils",
"../test:test_support",
+ "synchronization:mutex",
"third_party/sigslot",
]
}
@@ -1128,8 +1159,8 @@ if (rtc_include_tests) {
"../test:test_support",
"third_party/sigslot",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
if (is_win) {
sources += [ "win32_socket_server_unittest.cc" ]
}
@@ -1149,7 +1180,7 @@ if (rtc_include_tests) {
"byte_order_unittest.cc",
"checks_unittest.cc",
"copy_on_write_buffer_unittest.cc",
- "critical_section_unittest.cc",
+ "deprecated/recursive_critical_section_unittest.cc",
"event_tracer_unittest.cc",
"event_unittest.cc",
"logging_unittest.cc",
@@ -1208,9 +1239,12 @@ if (rtc_include_tests) {
"../test:test_main",
"../test:test_support",
"memory:unittests",
+ "synchronization:mutex",
"task_utils:to_queued_task",
"third_party/base64",
"third_party/sigslot",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/memory",
]
@@ -1228,8 +1262,8 @@ if (rtc_include_tests) {
":task_queue_for_test",
"../test:test_main",
"../test:test_support",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("rtc_operations_chain_unittests") {
@@ -1279,8 +1313,8 @@ if (rtc_include_tests) {
":rtc_numerics",
"../test:test_main",
"../test:test_support",
- "//third_party/abseil-cpp/absl/algorithm:container",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
}
rtc_library("rtc_json_unittests") {
@@ -1304,6 +1338,7 @@ if (rtc_include_tests) {
"callback_unittest.cc",
"crc32_unittest.cc",
"data_rate_limiter_unittest.cc",
+ "deprecated/signal_thread_unittest.cc",
"fake_clock_unittest.cc",
"helpers_unittest.cc",
"ip_address_unittest.cc",
@@ -1316,7 +1351,6 @@ if (rtc_include_tests) {
"rolling_accumulator_unittest.cc",
"rtc_certificate_generator_unittest.cc",
"rtc_certificate_unittest.cc",
- "signal_thread_unittest.cc",
"sigslot_tester_unittest.cc",
"test_client_unittest.cc",
"thread_unittest.cc",
@@ -1353,9 +1387,12 @@ if (rtc_include_tests) {
"../test:test_main",
"../test:test_support",
"memory:fifo_buffer",
+ "synchronization:mutex",
"synchronization:synchronization_unittests",
"task_utils:to_queued_task",
"third_party/sigslot",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
diff --git a/rtc_base/DEPS b/rtc_base/DEPS
index 679d06dfc8..c9f7dc5898 100644
--- a/rtc_base/DEPS
+++ b/rtc_base/DEPS
@@ -1,8 +1,8 @@
include_rules = [
"+base/third_party/libevent",
"+json",
- "+third_party/jsoncpp",
"+system_wrappers",
+ "+third_party/jsoncpp",
]
specific_include_rules = {
diff --git a/rtc_base/async_invoker.cc b/rtc_base/async_invoker.cc
index 26f8c523ab..8b410a4561 100644
--- a/rtc_base/async_invoker.cc
+++ b/rtc_base/async_invoker.cc
@@ -101,28 +101,6 @@ void AsyncInvoker::DoInvokeDelayed(const Location& posted_from,
new ScopedMessageData<AsyncClosure>(std::move(closure)));
}
-GuardedAsyncInvoker::GuardedAsyncInvoker() : thread_(Thread::Current()) {
- thread_->SignalQueueDestroyed.connect(this,
- &GuardedAsyncInvoker::ThreadDestroyed);
-}
-
-GuardedAsyncInvoker::~GuardedAsyncInvoker() {}
-
-bool GuardedAsyncInvoker::Flush(uint32_t id) {
- CritScope cs(&crit_);
- if (thread_ == nullptr)
- return false;
- invoker_.Flush(thread_, id);
- return true;
-}
-
-void GuardedAsyncInvoker::ThreadDestroyed() {
- CritScope cs(&crit_);
- // We should never get more than one notification about the thread dying.
- RTC_DCHECK(thread_ != nullptr);
- thread_ = nullptr;
-}
-
AsyncClosure::AsyncClosure(AsyncInvoker* invoker)
: invoker_(invoker), invocation_complete_(invoker_->invocation_complete_) {
invoker_->pending_invocations_.fetch_add(1, std::memory_order_relaxed);
diff --git a/rtc_base/async_invoker.h b/rtc_base/async_invoker.h
index f15955d811..ed2df1cdcb 100644
--- a/rtc_base/async_invoker.h
+++ b/rtc_base/async_invoker.h
@@ -169,97 +169,6 @@ class AsyncInvoker : public MessageHandler {
RTC_DISALLOW_COPY_AND_ASSIGN(AsyncInvoker);
};
-// Similar to AsyncInvoker, but guards against the Thread being destroyed while
-// there are outstanding dangling pointers to it. It will connect to the current
-// thread in the constructor, and will get notified when that thread is
-// destroyed. After GuardedAsyncInvoker is constructed, it can be used from
-// other threads to post functors to the thread it was constructed on. If that
-// thread dies, any further calls to AsyncInvoke() will be safely ignored.
-class GuardedAsyncInvoker : public sigslot::has_slots<> {
- public:
- GuardedAsyncInvoker();
- ~GuardedAsyncInvoker() override;
-
- // Synchronously execute all outstanding calls we own, and wait for calls to
- // complete before returning. Optionally filter by message id. The destructor
- // will not wait for outstanding calls, so if that behavior is desired, call
- // Flush() first. Returns false if the thread has died.
- bool Flush(uint32_t id = MQID_ANY);
-
- // Call |functor| asynchronously with no callback upon completion. Returns
- // immediately. Returns false if the thread has died.
- template <class ReturnT, class FunctorT>
- bool AsyncInvoke(const Location& posted_from,
- FunctorT&& functor,
- uint32_t id = 0) {
- CritScope cs(&crit_);
- if (thread_ == nullptr)
- return false;
- invoker_.AsyncInvoke<ReturnT, FunctorT>(
- posted_from, thread_, std::forward<FunctorT>(functor), id);
- return true;
- }
-
- // Call |functor| asynchronously with |delay_ms|, with no callback upon
- // completion. Returns immediately. Returns false if the thread has died.
- template <class ReturnT, class FunctorT>
- bool AsyncInvokeDelayed(const Location& posted_from,
- FunctorT&& functor,
- uint32_t delay_ms,
- uint32_t id = 0) {
- CritScope cs(&crit_);
- if (thread_ == nullptr)
- return false;
- invoker_.AsyncInvokeDelayed<ReturnT, FunctorT>(
- posted_from, thread_, std::forward<FunctorT>(functor), delay_ms, id);
- return true;
- }
-
- // Call |functor| asynchronously, calling |callback| when done. Returns false
- // if the thread has died.
- template <class ReturnT, class FunctorT, class HostT>
- bool AsyncInvoke(const Location& posted_from,
- const Location& callback_posted_from,
- FunctorT&& functor,
- void (HostT::*callback)(ReturnT),
- HostT* callback_host,
- uint32_t id = 0) {
- CritScope cs(&crit_);
- if (thread_ == nullptr)
- return false;
- invoker_.AsyncInvoke<ReturnT, FunctorT, HostT>(
- posted_from, callback_posted_from, thread_,
- std::forward<FunctorT>(functor), callback, callback_host, id);
- return true;
- }
-
- // Call |functor| asynchronously calling |callback| when done. Overloaded for
- // void return. Returns false if the thread has died.
- template <class ReturnT, class FunctorT, class HostT>
- bool AsyncInvoke(const Location& posted_from,
- const Location& callback_posted_from,
- FunctorT&& functor,
- void (HostT::*callback)(),
- HostT* callback_host,
- uint32_t id = 0) {
- CritScope cs(&crit_);
- if (thread_ == nullptr)
- return false;
- invoker_.AsyncInvoke<ReturnT, FunctorT, HostT>(
- posted_from, callback_posted_from, thread_,
- std::forward<FunctorT>(functor), callback, callback_host, id);
- return true;
- }
-
- private:
- // Callback when |thread_| is destroyed.
- void ThreadDestroyed();
-
- CriticalSection crit_;
- Thread* thread_ RTC_GUARDED_BY(crit_);
- AsyncInvoker invoker_ RTC_GUARDED_BY(crit_);
-};
-
} // namespace rtc
#endif // RTC_BASE_ASYNC_INVOKER_H_
diff --git a/rtc_base/async_invoker_inl.h b/rtc_base/async_invoker_inl.h
index bd9b0d1aa1..6307afe220 100644
--- a/rtc_base/async_invoker_inl.h
+++ b/rtc_base/async_invoker_inl.h
@@ -13,7 +13,6 @@
#include "api/scoped_refptr.h"
#include "rtc_base/bind.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/message_handler.h"
#include "rtc_base/ref_counted_object.h"
diff --git a/rtc_base/bit_buffer.cc b/rtc_base/bit_buffer.cc
index a6dc1c7ab8..540141fe52 100644
--- a/rtc_base/bit_buffer.cc
+++ b/rtc_base/bit_buffer.cc
@@ -162,6 +162,12 @@ bool BitBuffer::ConsumeBits(size_t bit_count) {
bool BitBuffer::ReadNonSymmetric(uint32_t* val, uint32_t num_values) {
RTC_DCHECK_GT(num_values, 0);
RTC_DCHECK_LE(num_values, uint32_t{1} << 31);
+ if (num_values == 1) {
+ // When there is only one possible value, it requires zero bits to store it.
+ // But ReadBits doesn't support reading zero bits.
+ *val = 0;
+ return true;
+ }
size_t count_bits = CountBits(num_values);
uint32_t num_min_bits_values = (uint32_t{1} << count_bits) - num_values;
@@ -308,6 +314,11 @@ bool BitBufferWriter::WriteBits(uint64_t val, size_t bit_count) {
bool BitBufferWriter::WriteNonSymmetric(uint32_t val, uint32_t num_values) {
RTC_DCHECK_LT(val, num_values);
RTC_DCHECK_LE(num_values, uint32_t{1} << 31);
+ if (num_values == 1) {
+ // When there is only one possible value, it requires zero bits to store it.
+ // But WriteBits doesn't support writing zero bits.
+ return true;
+ }
size_t count_bits = CountBits(num_values);
uint32_t num_min_bits_values = (uint32_t{1} << count_bits) - num_values;
diff --git a/rtc_base/bit_buffer_unittest.cc b/rtc_base/bit_buffer_unittest.cc
index b3521b4951..656682c2ef 100644
--- a/rtc_base/bit_buffer_unittest.cc
+++ b/rtc_base/bit_buffer_unittest.cc
@@ -142,7 +142,7 @@ TEST(BitBufferTest, ReadBits) {
EXPECT_FALSE(buffer.ReadBits(&val, 1));
}
-TEST(BitBufferTest, SetOffsetValues) {
+TEST(BitBufferDeathTest, SetOffsetValues) {
uint8_t bytes[4] = {0};
BitBufferWriter buffer(bytes, 4);
@@ -254,6 +254,28 @@ TEST(BitBufferWriterTest, NonSymmetricReadsMatchesWrites) {
EXPECT_THAT(values, ElementsAre(0, 1, 2, 3, 4, 5));
}
+TEST(BitBufferTest, ReadNonSymmetricOnlyValueConsumesNoBits) {
+ const uint8_t bytes[2] = {};
+ BitBuffer reader(bytes, 2);
+ uint32_t value = 0xFFFFFFFF;
+ ASSERT_EQ(reader.RemainingBitCount(), 16u);
+
+ EXPECT_TRUE(reader.ReadNonSymmetric(&value, /*num_values=*/1));
+
+ EXPECT_EQ(value, 0u);
+ EXPECT_EQ(reader.RemainingBitCount(), 16u);
+}
+
+TEST(BitBufferWriterTest, WriteNonSymmetricOnlyValueConsumesNoBits) {
+ uint8_t bytes[2] = {};
+ BitBufferWriter writer(bytes, 2);
+ ASSERT_EQ(writer.RemainingBitCount(), 16u);
+
+ EXPECT_TRUE(writer.WriteNonSymmetric(0, /*num_values=*/1));
+
+ EXPECT_EQ(writer.RemainingBitCount(), 16u);
+}
+
uint64_t GolombEncoded(uint32_t val) {
val++;
uint32_t bit_counter = val;
diff --git a/rtc_base/buffer.h b/rtc_base/buffer.h
index 3048b9179f..d1639e2f71 100644
--- a/rtc_base/buffer.h
+++ b/rtc_base/buffer.h
@@ -370,7 +370,9 @@ class BufferT {
: capacity;
std::unique_ptr<T[]> new_data(new T[new_capacity]);
- std::memcpy(new_data.get(), data_.get(), size_ * sizeof(T));
+ if (data_ != nullptr) {
+ std::memcpy(new_data.get(), data_.get(), size_ * sizeof(T));
+ }
MaybeZeroCompleteBuffer();
data_ = std::move(new_data);
capacity_ = new_capacity;
diff --git a/rtc_base/buffer_queue.cc b/rtc_base/buffer_queue.cc
index 445045ceea..adad9dda17 100644
--- a/rtc_base/buffer_queue.cc
+++ b/rtc_base/buffer_queue.cc
@@ -21,7 +21,7 @@ BufferQueue::BufferQueue(size_t capacity, size_t default_size)
: capacity_(capacity), default_size_(default_size) {}
BufferQueue::~BufferQueue() {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
for (Buffer* buffer : queue_) {
delete buffer;
@@ -32,12 +32,12 @@ BufferQueue::~BufferQueue() {
}
size_t BufferQueue::size() const {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return queue_.size();
}
void BufferQueue::Clear() {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
while (!queue_.empty()) {
free_list_.push_back(queue_.front());
queue_.pop_front();
@@ -45,7 +45,7 @@ void BufferQueue::Clear() {
}
bool BufferQueue::ReadFront(void* buffer, size_t bytes, size_t* bytes_read) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
if (queue_.empty()) {
return false;
}
@@ -69,7 +69,7 @@ bool BufferQueue::ReadFront(void* buffer, size_t bytes, size_t* bytes_read) {
bool BufferQueue::WriteBack(const void* buffer,
size_t bytes,
size_t* bytes_written) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
if (queue_.size() == capacity_) {
return false;
}
diff --git a/rtc_base/buffer_queue.h b/rtc_base/buffer_queue.h
index 5cb18d0220..29d1a5b136 100644
--- a/rtc_base/buffer_queue.h
+++ b/rtc_base/buffer_queue.h
@@ -18,7 +18,7 @@
#include "rtc_base/buffer.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace rtc {
@@ -52,9 +52,9 @@ class BufferQueue {
private:
size_t capacity_;
size_t default_size_;
- CriticalSection crit_;
- std::deque<Buffer*> queue_ RTC_GUARDED_BY(crit_);
- std::vector<Buffer*> free_list_ RTC_GUARDED_BY(crit_);
+ mutable webrtc::Mutex mutex_;
+ std::deque<Buffer*> queue_ RTC_GUARDED_BY(mutex_);
+ std::vector<Buffer*> free_list_ RTC_GUARDED_BY(mutex_);
RTC_DISALLOW_COPY_AND_ASSIGN(BufferQueue);
};
diff --git a/rtc_base/buffer_unittest.cc b/rtc_base/buffer_unittest.cc
index 3e7396dd2c..8beae43cf9 100644
--- a/rtc_base/buffer_unittest.cc
+++ b/rtc_base/buffer_unittest.cc
@@ -447,7 +447,7 @@ TEST(BufferTest, TestStruct) {
EXPECT_EQ(kObsidian, buf[2].stone);
}
-TEST(BufferTest, DieOnUseAfterMove) {
+TEST(BufferDeathTest, DieOnUseAfterMove) {
Buffer buf(17);
Buffer buf2 = std::move(buf);
EXPECT_EQ(buf2.size(), 17u);
diff --git a/rtc_base/checks.h b/rtc_base/checks.h
index 2fde3f6640..61c074ac82 100644
--- a/rtc_base/checks.h
+++ b/rtc_base/checks.h
@@ -69,7 +69,7 @@ RTC_NORETURN void rtc_FatalMessage(const char* file, int line, const char* msg);
// the reason that it's better to terminate might simply be that the error
// handling code isn't in place yet; in production, the reason might be that
// the author of the code truly believes that x will always be true, but that
-// she recognizes that if she is wrong, abrupt and unpleasant process
+// they recognizes that if they are wrong, abrupt and unpleasant process
// termination is still better than carrying on with the assumption violated.
//
// RTC_CHECK always evaluates its argument, so it's OK for x to have side
diff --git a/rtc_base/checks_unittest.cc b/rtc_base/checks_unittest.cc
index e6e094e597..91e04cf6a1 100644
--- a/rtc_base/checks_unittest.cc
+++ b/rtc_base/checks_unittest.cc
@@ -19,7 +19,7 @@ TEST(ChecksTest, ExpressionNotEvaluatedWhenCheckPassing) {
}
#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(ChecksTest, Checks) {
+TEST(ChecksDeathTest, Checks) {
#if RTC_CHECK_MSG_ENABLED
EXPECT_DEATH(FATAL() << "message",
"\n\n#\n"
diff --git a/rtc_base/critical_section.cc b/rtc_base/deprecated/recursive_critical_section.cc
index 1969edefa5..068b9aa808 100644
--- a/rtc_base/critical_section.cc
+++ b/rtc_base/deprecated/recursive_critical_section.cc
@@ -8,17 +8,16 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#include <time.h>
#include "rtc_base/atomic_ops.h"
#include "rtc_base/checks.h"
#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/synchronization/yield.h"
#include "rtc_base/system/unused.h"
-// TODO(tommi): Split this file up to per-platform implementation files.
-
#if RTC_DCHECK_IS_ON
#define RTC_CS_DEBUG_CODE(x) x
#else // !RTC_DCHECK_IS_ON
@@ -27,7 +26,7 @@
namespace rtc {
-CriticalSection::CriticalSection() {
+RecursiveCriticalSection::RecursiveCriticalSection() {
#if defined(WEBRTC_WIN)
InitializeCriticalSection(&crit_);
#elif defined(WEBRTC_POSIX)
@@ -42,7 +41,7 @@ CriticalSection::CriticalSection() {
pthread_mutexattr_settype(&mutex_attribute, PTHREAD_MUTEX_RECURSIVE);
#if defined(WEBRTC_MAC)
pthread_mutexattr_setpolicy_np(&mutex_attribute,
- _PTHREAD_MUTEX_POLICY_FAIRSHARE);
+ _PTHREAD_MUTEX_POLICY_FIRSTFIT);
#endif
pthread_mutex_init(&mutex_, &mutex_attribute);
pthread_mutexattr_destroy(&mutex_attribute);
@@ -56,7 +55,7 @@ CriticalSection::CriticalSection() {
#endif
}
-CriticalSection::~CriticalSection() {
+RecursiveCriticalSection::~RecursiveCriticalSection() {
#if defined(WEBRTC_WIN)
DeleteCriticalSection(&crit_);
#elif defined(WEBRTC_POSIX)
@@ -70,7 +69,7 @@ CriticalSection::~CriticalSection() {
#endif
}
-void CriticalSection::Enter() const RTC_EXCLUSIVE_LOCK_FUNCTION() {
+void RecursiveCriticalSection::Enter() const RTC_EXCLUSIVE_LOCK_FUNCTION() {
#if defined(WEBRTC_WIN)
EnterCriticalSection(&crit_);
#elif defined(WEBRTC_POSIX)
@@ -129,7 +128,8 @@ void CriticalSection::Enter() const RTC_EXCLUSIVE_LOCK_FUNCTION() {
#endif
}
-bool CriticalSection::TryEnter() const RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+bool RecursiveCriticalSection::TryEnter() const
+ RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
#if defined(WEBRTC_WIN)
return TryEnterCriticalSection(&crit_) != FALSE;
#elif defined(WEBRTC_POSIX)
@@ -162,7 +162,7 @@ bool CriticalSection::TryEnter() const RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
#endif
}
-void CriticalSection::Leave() const RTC_UNLOCK_FUNCTION() {
+void RecursiveCriticalSection::Leave() const RTC_UNLOCK_FUNCTION() {
RTC_DCHECK(CurrentThreadIsOwner());
#if defined(WEBRTC_WIN)
LeaveCriticalSection(&crit_);
@@ -190,7 +190,7 @@ void CriticalSection::Leave() const RTC_UNLOCK_FUNCTION() {
#endif
}
-bool CriticalSection::CurrentThreadIsOwner() const {
+bool RecursiveCriticalSection::CurrentThreadIsOwner() const {
#if defined(WEBRTC_WIN)
// OwningThread has type HANDLE but actually contains the Thread ID:
// http://stackoverflow.com/questions/12675301/why-is-the-owningthread-member-of-critical-section-of-type-handle-when-it-is-de
@@ -209,41 +209,11 @@ bool CriticalSection::CurrentThreadIsOwner() const {
#endif
}
-CritScope::CritScope(const CriticalSection* cs) : cs_(cs) {
+CritScope::CritScope(const RecursiveCriticalSection* cs) : cs_(cs) {
cs_->Enter();
}
CritScope::~CritScope() {
cs_->Leave();
}
-void GlobalLock::Lock() {
-#if !defined(WEBRTC_WIN) && \
- (!defined(WEBRTC_MAC) || RTC_USE_NATIVE_MUTEX_ON_MAC)
- const struct timespec ts_null = {0};
-#endif
-
- while (AtomicOps::CompareAndSwap(&lock_acquired_, 0, 1)) {
-#if defined(WEBRTC_WIN)
- ::Sleep(0);
-#elif defined(WEBRTC_MAC) && !RTC_USE_NATIVE_MUTEX_ON_MAC
- sched_yield();
-#else
- nanosleep(&ts_null, nullptr);
-#endif
- }
-}
-
-void GlobalLock::Unlock() {
- int old_value = AtomicOps::CompareAndSwap(&lock_acquired_, 1, 0);
- RTC_DCHECK_EQ(1, old_value) << "Unlock called without calling Lock first";
-}
-
-GlobalLockScope::GlobalLockScope(GlobalLock* lock) : lock_(lock) {
- lock_->Lock();
-}
-
-GlobalLockScope::~GlobalLockScope() {
- lock_->Unlock();
-}
-
} // namespace rtc
diff --git a/rtc_base/critical_section.h b/rtc_base/deprecated/recursive_critical_section.h
index cf10463bdf..c044c732b9 100644
--- a/rtc_base/critical_section.h
+++ b/rtc_base/deprecated/recursive_critical_section.h
@@ -8,13 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef RTC_BASE_CRITICAL_SECTION_H_
-#define RTC_BASE_CRITICAL_SECTION_H_
+#ifndef RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_
+#define RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_
-#include "rtc_base/checks.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/platform_thread_types.h"
-#include "rtc_base/system/rtc_export.h"
#include "rtc_base/thread_annotations.h"
#if defined(WEBRTC_WIN)
@@ -43,13 +41,18 @@
namespace rtc {
+// NOTE: This class is deprecated. Please use webrtc::Mutex instead!
+// Search using https://www.google.com/?q=recursive+lock+considered+harmful
+// to find the reasons.
+//
// Locking methods (Enter, TryEnter, Leave)are const to permit protecting
-// members inside a const context without requiring mutable CriticalSections
-// everywhere. CriticalSection is reentrant lock.
-class RTC_LOCKABLE RTC_EXPORT CriticalSection {
+// members inside a const context without requiring mutable
+// RecursiveCriticalSections everywhere. RecursiveCriticalSection is
+// reentrant lock.
+class RTC_LOCKABLE RecursiveCriticalSection {
public:
- CriticalSection();
- ~CriticalSection();
+ RecursiveCriticalSection();
+ ~RecursiveCriticalSection();
void Enter() const RTC_EXCLUSIVE_LOCK_FUNCTION();
bool TryEnter() const RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true);
@@ -87,37 +90,15 @@ class RTC_LOCKABLE RTC_EXPORT CriticalSection {
// CritScope, for serializing execution through a scope.
class RTC_SCOPED_LOCKABLE CritScope {
public:
- explicit CritScope(const CriticalSection* cs) RTC_EXCLUSIVE_LOCK_FUNCTION(cs);
+ explicit CritScope(const RecursiveCriticalSection* cs)
+ RTC_EXCLUSIVE_LOCK_FUNCTION(cs);
~CritScope() RTC_UNLOCK_FUNCTION();
private:
- const CriticalSection* const cs_;
+ const RecursiveCriticalSection* const cs_;
RTC_DISALLOW_COPY_AND_ASSIGN(CritScope);
};
-// A lock used to protect global variables. Do NOT use for other purposes.
-class RTC_LOCKABLE GlobalLock {
- public:
- constexpr GlobalLock() : lock_acquired_(0) {}
-
- void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION();
- void Unlock() RTC_UNLOCK_FUNCTION();
-
- private:
- volatile int lock_acquired_;
-};
-
-// GlobalLockScope, for serializing execution through a scope.
-class RTC_SCOPED_LOCKABLE GlobalLockScope {
- public:
- explicit GlobalLockScope(GlobalLock* lock) RTC_EXCLUSIVE_LOCK_FUNCTION(lock);
- ~GlobalLockScope() RTC_UNLOCK_FUNCTION();
-
- private:
- GlobalLock* const lock_;
- RTC_DISALLOW_COPY_AND_ASSIGN(GlobalLockScope);
-};
-
} // namespace rtc
-#endif // RTC_BASE_CRITICAL_SECTION_H_
+#endif // RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_
diff --git a/rtc_base/critical_section_unittest.cc b/rtc_base/deprecated/recursive_critical_section_unittest.cc
index 16aefd2740..22c2655b3d 100644
--- a/rtc_base/critical_section_unittest.cc
+++ b/rtc_base/deprecated/recursive_critical_section_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#include <stddef.h>
#include <stdint.h>
@@ -124,7 +124,7 @@ class RTC_LOCKABLE CriticalSectionLock {
void Unlock() RTC_UNLOCK_FUNCTION() { cs_.Leave(); }
private:
- CriticalSection cs_;
+ RecursiveCriticalSection cs_;
};
template <class Lock>
@@ -183,7 +183,7 @@ class AtomicOpRunner : public RunnerBase {
}
private:
- CriticalSection all_values_crit_;
+ RecursiveCriticalSection all_values_crit_;
Verifier verifier_;
};
@@ -282,26 +282,7 @@ TEST(AtomicOpsTest, CompareAndSwap) {
EXPECT_EQ(1, runner.shared_value());
}
-TEST(GlobalLockTest, CanHaveStaticStorageDuration) {
- static_assert(std::is_trivially_destructible<GlobalLock>::value, "");
- ABSL_CONST_INIT static GlobalLock global_lock;
- global_lock.Lock();
- global_lock.Unlock();
-}
-
-TEST(GlobalLockTest, Basic) {
- // Create and start lots of threads.
- LockRunner<GlobalLock> runner;
- std::vector<std::unique_ptr<Thread>> threads;
- StartThreads(&threads, &runner);
- runner.SetExpectedThreadCount(kNumThreads);
-
- // Release the hounds!
- EXPECT_TRUE(runner.Run());
- EXPECT_EQ(0, runner.shared_value());
-}
-
-TEST(CriticalSectionTest, Basic) {
+TEST(RecursiveCriticalSectionTest, Basic) {
// Create and start lots of threads.
LockRunner<CriticalSectionLock> runner;
std::vector<std::unique_ptr<Thread>> threads;
@@ -339,7 +320,7 @@ class PerfTestData {
private:
uint8_t cache_line_barrier_1_[64];
- CriticalSection lock_;
+ RecursiveCriticalSection lock_;
uint8_t cache_line_barrier_2_[64];
int64_t my_counter_ = 0;
const int expected_count_;
@@ -391,7 +372,7 @@ class PerfTestThread {
// user 1m20.575s
// sys 3m48.872s
// Unit test output:
-// [ OK ] CriticalSectionTest.Performance (294375 ms)
+// [ OK ] RecursiveCriticalSectionTest.Performance (294375 ms)
//
// Native mutex implementation using first fit policy (current macOS default):
// Approximate CPU usage:
@@ -399,7 +380,7 @@ class PerfTestThread {
// user 0m12.738s
// sys 0m31.207s
// Unit test output:
-// [ OK ] CriticalSectionTest.Performance (11444 ms)
+// [ OK ] RecursiveCriticalSectionTest.Performance (11444 ms)
//
// Special partially spin lock based implementation:
// Approximate CPU usage:
@@ -407,10 +388,10 @@ class PerfTestThread {
// user 0m3.014s
// sys 0m4.495s
// Unit test output:
-// [ OK ] CriticalSectionTest.Performance (1885 ms)
+// [ OK ] RecursiveCriticalSectionTest.Performance (1885 ms)
//
// The test is disabled by default to avoid unecessarily loading the bots.
-TEST(CriticalSectionTest, DISABLED_Performance) {
+TEST(RecursiveCriticalSectionTest, DISABLED_Performance) {
PerfTestThread threads[8];
Event event;
diff --git a/rtc_base/signal_thread.cc b/rtc_base/deprecated/signal_thread.cc
index e100fbe179..96bdd65155 100644
--- a/rtc_base/signal_thread.cc
+++ b/rtc_base/deprecated/signal_thread.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "rtc_base/signal_thread.h"
+#include "rtc_base/deprecated/signal_thread.h"
#include <memory>
@@ -23,26 +23,30 @@ namespace rtc {
// SignalThread
///////////////////////////////////////////////////////////////////////////////
-SignalThread::SignalThread()
+DEPRECATED_SignalThread::DEPRECATED_SignalThread()
: main_(Thread::Current()), worker_(this), state_(kInit), refcount_(1) {
- main_->SignalQueueDestroyed.connect(this,
- &SignalThread::OnMainThreadDestroyed);
+ main_->SignalQueueDestroyed.connect(
+ this, &DEPRECATED_SignalThread::OnMainThreadDestroyed);
worker_.SetName("SignalThread", this);
}
-SignalThread::~SignalThread() {
+DEPRECATED_SignalThread::~DEPRECATED_SignalThread() {
+ rtc::CritScope lock(&cs_);
RTC_DCHECK(refcount_ == 0);
}
-bool SignalThread::SetName(const std::string& name, const void* obj) {
+bool DEPRECATED_SignalThread::SetName(const std::string& name,
+ const void* obj) {
EnterExit ee(this);
+ RTC_DCHECK(!destroy_called_);
RTC_DCHECK(main_->IsCurrent());
RTC_DCHECK(kInit == state_);
return worker_.SetName(name, obj);
}
-void SignalThread::Start() {
+void DEPRECATED_SignalThread::Start() {
EnterExit ee(this);
+ RTC_DCHECK(!destroy_called_);
RTC_DCHECK(main_->IsCurrent());
if (kInit == state_ || kComplete == state_) {
state_ = kRunning;
@@ -53,9 +57,13 @@ void SignalThread::Start() {
}
}
-void SignalThread::Destroy(bool wait) {
+void DEPRECATED_SignalThread::Destroy(bool wait) {
EnterExit ee(this);
- RTC_DCHECK(main_->IsCurrent());
+ // Sometimes the caller can't guarantee which thread will call Destroy, only
+ // that it will be the last thing it does.
+ // RTC_DCHECK(main_->IsCurrent());
+ RTC_DCHECK(!destroy_called_);
+ destroy_called_ = true;
if ((kInit == state_) || (kComplete == state_)) {
refcount_--;
} else if (kRunning == state_ || kReleasing == state_) {
@@ -76,8 +84,9 @@ void SignalThread::Destroy(bool wait) {
}
}
-void SignalThread::Release() {
+void DEPRECATED_SignalThread::Release() {
EnterExit ee(this);
+ RTC_DCHECK(!destroy_called_);
RTC_DCHECK(main_->IsCurrent());
if (kComplete == state_) {
refcount_--;
@@ -89,13 +98,14 @@ void SignalThread::Release() {
}
}
-bool SignalThread::ContinueWork() {
+bool DEPRECATED_SignalThread::ContinueWork() {
EnterExit ee(this);
+ RTC_DCHECK(!destroy_called_);
RTC_DCHECK(worker_.IsCurrent());
return worker_.ProcessMessages(0);
}
-void SignalThread::OnMessage(Message* msg) {
+void DEPRECATED_SignalThread::OnMessage(Message* msg) {
EnterExit ee(this);
if (ST_MSG_WORKER_DONE == msg->message_id) {
RTC_DCHECK(main_->IsCurrent());
@@ -126,21 +136,21 @@ void SignalThread::OnMessage(Message* msg) {
}
}
-SignalThread::Worker::Worker(SignalThread* parent)
+DEPRECATED_SignalThread::Worker::Worker(DEPRECATED_SignalThread* parent)
: Thread(std::make_unique<NullSocketServer>(), /*do_init=*/false),
parent_(parent) {
DoInit();
}
-SignalThread::Worker::~Worker() {
+DEPRECATED_SignalThread::Worker::~Worker() {
Stop();
}
-void SignalThread::Worker::Run() {
+void DEPRECATED_SignalThread::Worker::Run() {
parent_->Run();
}
-void SignalThread::Run() {
+void DEPRECATED_SignalThread::Run() {
DoWork();
{
EnterExit ee(this);
@@ -150,12 +160,12 @@ void SignalThread::Run() {
}
}
-void SignalThread::OnMainThreadDestroyed() {
+void DEPRECATED_SignalThread::OnMainThreadDestroyed() {
EnterExit ee(this);
main_ = nullptr;
}
-bool SignalThread::Worker::IsProcessingMessagesForTesting() {
+bool DEPRECATED_SignalThread::Worker::IsProcessingMessagesForTesting() {
return false;
}
diff --git a/rtc_base/deprecated/signal_thread.h b/rtc_base/deprecated/signal_thread.h
new file mode 100644
index 0000000000..3612f5a1ca
--- /dev/null
+++ b/rtc_base/deprecated/signal_thread.h
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_
+#define RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_
+
+#include <string>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/constructor_magic.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
+#include "rtc_base/deprecation.h"
+#include "rtc_base/message_handler.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// NOTE: this class has been deprecated. Do not use for new code. New code
+// should use factilities exposed by api/task_queue/ instead.
+//
+// SignalThread - Base class for worker threads. The main thread should call
+// Start() to begin work, and then follow one of these models:
+// Normal: Wait for SignalWorkDone, and then call Release to destroy.
+// Cancellation: Call Release(true), to abort the worker thread.
+// Fire-and-forget: Call Release(false), which allows the thread to run to
+// completion, and then self-destruct without further notification.
+// Periodic tasks: Wait for SignalWorkDone, then eventually call Start()
+// again to repeat the task. When the instance isn't needed anymore,
+// call Release. DoWork, OnWorkStart and OnWorkStop are called again,
+// on a new thread.
+// The subclass should override DoWork() to perform the background task. By
+// periodically calling ContinueWork(), it can check for cancellation.
+// OnWorkStart and OnWorkDone can be overridden to do pre- or post-work
+// tasks in the context of the main thread.
+///////////////////////////////////////////////////////////////////////////////
+
+class DEPRECATED_SignalThread : public sigslot::has_slots<>,
+ protected MessageHandler {
+ public:
+ DEPRECATED_SignalThread();
+
+ // Context: Main Thread. Call before Start to change the worker's name.
+ bool SetName(const std::string& name, const void* obj);
+
+ // Context: Main Thread. Call to begin the worker thread.
+ void Start();
+
+ // Context: Main Thread. If the worker thread is not running, deletes the
+ // object immediately. Otherwise, asks the worker thread to abort processing,
+ // and schedules the object to be deleted once the worker exits.
+ // SignalWorkDone will not be signalled. If wait is true, does not return
+ // until the thread is deleted.
+ void Destroy(bool wait);
+
+ // Context: Main Thread. If the worker thread is complete, deletes the
+ // object immediately. Otherwise, schedules the object to be deleted once
+ // the worker thread completes. SignalWorkDone will be signalled.
+ void Release();
+
+ // Context: Main Thread. Signalled when work is complete.
+ sigslot::signal1<DEPRECATED_SignalThread*> SignalWorkDone;
+
+ enum { ST_MSG_WORKER_DONE, ST_MSG_FIRST_AVAILABLE };
+
+ protected:
+ ~DEPRECATED_SignalThread() override;
+
+ Thread* worker() { return &worker_; }
+
+ // Context: Main Thread. Subclass should override to do pre-work setup.
+ virtual void OnWorkStart() {}
+
+ // Context: Worker Thread. Subclass should override to do work.
+ virtual void DoWork() = 0;
+
+ // Context: Worker Thread. Subclass should call periodically to
+ // dispatch messages and determine if the thread should terminate.
+ bool ContinueWork();
+
+ // Context: Worker Thread. Subclass should override when extra work is
+ // needed to abort the worker thread.
+ virtual void OnWorkStop() {}
+
+ // Context: Main Thread. Subclass should override to do post-work cleanup.
+ virtual void OnWorkDone() {}
+
+ // Context: Any Thread. If subclass overrides, be sure to call the base
+ // implementation. Do not use (message_id < ST_MSG_FIRST_AVAILABLE)
+ void OnMessage(Message* msg) override;
+
+ private:
+ enum State {
+ kInit, // Initialized, but not started
+ kRunning, // Started and doing work
+ kReleasing, // Same as running, but to be deleted when work is done
+ kComplete, // Work is done
+ kStopping, // Work is being interrupted
+ };
+
+ class Worker : public Thread {
+ public:
+ explicit Worker(DEPRECATED_SignalThread* parent);
+ ~Worker() override;
+ void Run() override;
+ bool IsProcessingMessagesForTesting() override;
+
+ private:
+ DEPRECATED_SignalThread* parent_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Worker);
+ };
+
+ class RTC_SCOPED_LOCKABLE EnterExit {
+ public:
+ explicit EnterExit(DEPRECATED_SignalThread* t)
+ RTC_EXCLUSIVE_LOCK_FUNCTION(t->cs_)
+ : t_(t) {
+ t_->cs_.Enter();
+ // If refcount_ is zero then the object has already been deleted and we
+ // will be double-deleting it in ~EnterExit()! (shouldn't happen)
+ RTC_DCHECK_NE(0, t_->refcount_);
+ ++t_->refcount_;
+ }
+ ~EnterExit() RTC_UNLOCK_FUNCTION() {
+ bool d = (0 == --t_->refcount_);
+ t_->cs_.Leave();
+ if (d)
+ delete t_;
+ }
+
+ private:
+ DEPRECATED_SignalThread* t_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(EnterExit);
+ };
+
+ void Run();
+ void OnMainThreadDestroyed();
+
+ Thread* main_;
+ Worker worker_;
+ RecursiveCriticalSection cs_;
+ State state_ RTC_GUARDED_BY(cs_);
+ int refcount_ RTC_GUARDED_BY(cs_);
+ bool destroy_called_ RTC_GUARDED_BY(cs_) = false;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(DEPRECATED_SignalThread);
+};
+
+typedef RTC_DEPRECATED DEPRECATED_SignalThread SignalThread;
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_
diff --git a/rtc_base/signal_thread_unittest.cc b/rtc_base/deprecated/signal_thread_unittest.cc
index 14761865b8..f5a49aad63 100644
--- a/rtc_base/signal_thread_unittest.cc
+++ b/rtc_base/deprecated/signal_thread_unittest.cc
@@ -13,9 +13,9 @@
#include <memory>
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/gunit.h"
#include "rtc_base/null_socket_server.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_annotations.h"
#include "test/gtest.h"
@@ -28,9 +28,9 @@ static const int kTimeout = 10000;
class SignalThreadTest : public ::testing::Test, public sigslot::has_slots<> {
public:
- class SlowSignalThread : public SignalThread {
+ class SlowSignalThread : public DEPRECATED_SignalThread {
public:
- SlowSignalThread(SignalThreadTest* harness) : harness_(harness) {}
+ explicit SlowSignalThread(SignalThreadTest* harness) : harness_(harness) {}
~SlowSignalThread() override {
EXPECT_EQ(harness_->main_thread_, Thread::Current());
@@ -70,7 +70,7 @@ class SignalThreadTest : public ::testing::Test, public sigslot::has_slots<> {
RTC_DISALLOW_COPY_AND_ASSIGN(SlowSignalThread);
};
- void OnWorkComplete(rtc::SignalThread* thread) {
+ void OnWorkComplete(rtc::DEPRECATED_SignalThread* thread) {
SlowSignalThread* t = static_cast<SlowSignalThread*>(thread);
EXPECT_EQ(t->harness(), this);
EXPECT_EQ(main_thread_, Thread::Current());
@@ -148,23 +148,23 @@ class OwnerThread : public Thread, public sigslot::has_slots<> {
// Delete |signal_thread|.
signal_thread->Destroy(true);
{
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
has_run_ = true;
}
}
bool has_run() {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return has_run_;
}
- void OnWorkDone(SignalThread* /*signal_thread*/) {
+ void OnWorkDone(DEPRECATED_SignalThread* /*signal_thread*/) {
FAIL() << " This shouldn't get called.";
}
private:
- rtc::CriticalSection crit_;
+ webrtc::Mutex mutex_;
SignalThreadTest* harness_;
- bool has_run_ RTC_GUARDED_BY(crit_);
+ bool has_run_ RTC_GUARDED_BY(mutex_);
RTC_DISALLOW_COPY_AND_ASSIGN(OwnerThread);
};
diff --git a/rtc_base/event_tracer.cc b/rtc_base/event_tracer.cc
index d23af21421..3af8183b1f 100644
--- a/rtc_base/event_tracer.cc
+++ b/rtc_base/event_tracer.cc
@@ -19,11 +19,11 @@
#include "rtc_base/atomic_ops.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/time_utils.h"
@@ -120,7 +120,7 @@ class EventLogger final {
arg.value.as_string = str_copy;
}
}
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
trace_events_.push_back(
{name, category_enabled, phase, args, timestamp, 1, thread_id});
}
@@ -136,7 +136,7 @@ class EventLogger final {
bool shutting_down = shutdown_event_.Wait(kLoggingIntervalMs);
std::vector<TraceEvent> events;
{
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
trace_events_.swap(events);
}
std::string args_str;
@@ -196,7 +196,7 @@ class EventLogger final {
output_file_ = file;
output_file_owned_ = owned;
{
- rtc::CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
// Since the atomic fast-path for adding events to the queue can be
// bypassed while the logging thread is shutting down there may be some
// stale events in the queue, hence the vector needs to be cleared to not
@@ -317,8 +317,8 @@ class EventLogger final {
return output;
}
- rtc::CriticalSection crit_;
- std::vector<TraceEvent> trace_events_ RTC_GUARDED_BY(crit_);
+ webrtc::Mutex mutex_;
+ std::vector<TraceEvent> trace_events_ RTC_GUARDED_BY(mutex_);
rtc::PlatformThread logging_thread_;
rtc::Event shutdown_event_;
rtc::ThreadChecker thread_checker_;
diff --git a/rtc_base/event_tracer_unittest.cc b/rtc_base/event_tracer_unittest.cc
index 79cc9c0788..f4d41e4e7c 100644
--- a/rtc_base/event_tracer_unittest.cc
+++ b/rtc_base/event_tracer_unittest.cc
@@ -10,7 +10,7 @@
#include "rtc_base/event_tracer.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/trace_event.h"
#include "test/gtest.h"
@@ -20,17 +20,17 @@ namespace {
class TestStatistics {
public:
void Reset() {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
events_logged_ = 0;
}
void Increment() {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
++events_logged_;
}
int Count() const {
- rtc::CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return events_logged_;
}
@@ -41,8 +41,8 @@ class TestStatistics {
}
private:
- rtc::CriticalSection crit_;
- int events_logged_ RTC_GUARDED_BY(crit_) = 0;
+ mutable webrtc::Mutex mutex_;
+ int events_logged_ RTC_GUARDED_BY(mutex_) = 0;
};
} // namespace
diff --git a/rtc_base/experiments/BUILD.gn b/rtc_base/experiments/BUILD.gn
index bb3e0ce8ae..282b5b9270 100644
--- a/rtc_base/experiments/BUILD.gn
+++ b/rtc_base/experiments/BUILD.gn
@@ -17,8 +17,8 @@ rtc_library("alr_experiment") {
"../:rtc_base_approved",
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("field_trial_parser") {
@@ -40,6 +40,8 @@ rtc_library("field_trial_parser") {
"../../rtc_base:logging",
"../../rtc_base:safe_conversions",
"../../rtc_base:stringutils",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings:strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -57,8 +59,8 @@ rtc_library("quality_rampup_experiment") {
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("quality_scaler_settings") {
@@ -72,8 +74,8 @@ rtc_library("quality_scaler_settings") {
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("quality_scaling_experiment") {
@@ -85,8 +87,8 @@ rtc_library("quality_scaling_experiment") {
"../:rtc_base_approved",
"../../api/video_codecs:video_codecs_api",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("normalize_simulcast_size_experiment") {
@@ -97,8 +99,8 @@ rtc_library("normalize_simulcast_size_experiment") {
deps = [
"../:rtc_base_approved",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("balanced_degradation_settings") {
@@ -111,8 +113,8 @@ rtc_library("balanced_degradation_settings") {
"../:rtc_base_approved",
"../../api/video_codecs:video_codecs_api",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("cpu_speed_experiment") {
@@ -123,8 +125,8 @@ rtc_library("cpu_speed_experiment") {
deps = [
"../:rtc_base_approved",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtt_mult_experiment") {
@@ -135,8 +137,8 @@ rtc_library("rtt_mult_experiment") {
deps = [
"../:rtc_base_approved",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("jitter_upper_bound_experiment") {
@@ -147,8 +149,8 @@ rtc_library("jitter_upper_bound_experiment") {
deps = [
"../:rtc_base_approved",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rate_control_settings") {
@@ -164,6 +166,8 @@ rtc_library("rate_control_settings") {
"../../api/units:data_size",
"../../api/video_codecs:video_codecs_api",
"../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -178,8 +182,8 @@ rtc_library("keyframe_interval_settings_experiment") {
":field_trial_parser",
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("stable_target_rate_experiment") {
@@ -192,8 +196,8 @@ rtc_library("stable_target_rate_experiment") {
":rate_control_settings",
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("min_video_bitrate_experiment") {
@@ -208,8 +212,8 @@ rtc_library("min_video_bitrate_experiment") {
"../../rtc_base:checks",
"../../rtc_base:logging",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -255,7 +259,7 @@ if (rtc_include_tests) {
"../../test:field_trial",
"../../test:test_main",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/rtc_base/experiments/quality_rampup_experiment.cc b/rtc_base/experiments/quality_rampup_experiment.cc
index caf7e62368..ee6675c924 100644
--- a/rtc_base/experiments/quality_rampup_experiment.cc
+++ b/rtc_base/experiments/quality_rampup_experiment.cc
@@ -70,4 +70,8 @@ bool QualityRampupExperiment::BwHigh(int64_t now_ms,
return (now_ms - *start_ms_) >= min_duration_ms_.Value();
}
+bool QualityRampupExperiment::Enabled() const {
+ return min_pixels_ || min_duration_ms_ || max_bitrate_kbps_;
+}
+
} // namespace webrtc
diff --git a/rtc_base/experiments/quality_rampup_experiment.h b/rtc_base/experiments/quality_rampup_experiment.h
index ff9d7d38e5..9d46901104 100644
--- a/rtc_base/experiments/quality_rampup_experiment.h
+++ b/rtc_base/experiments/quality_rampup_experiment.h
@@ -33,6 +33,8 @@ class QualityRampupExperiment final {
// (max_bitrate_factor_) above |max_bitrate_kbps_| for |min_duration_ms_|.
bool BwHigh(int64_t now_ms, uint32_t available_bw_kbps);
+ bool Enabled() const;
+
private:
explicit QualityRampupExperiment(
const WebRtcKeyValueConfig* const key_value_config);
diff --git a/rtc_base/fake_clock.cc b/rtc_base/fake_clock.cc
index e242e8e659..652a5afa3a 100644
--- a/rtc_base/fake_clock.cc
+++ b/rtc_base/fake_clock.cc
@@ -16,18 +16,18 @@
namespace rtc {
int64_t FakeClock::TimeNanos() const {
- CritScope cs(&lock_);
+ webrtc::MutexLock lock(&lock_);
return time_ns_;
}
void FakeClock::SetTime(webrtc::Timestamp new_time) {
- CritScope cs(&lock_);
+ webrtc::MutexLock lock(&lock_);
RTC_DCHECK(new_time.us() * 1000 >= time_ns_);
time_ns_ = new_time.us() * 1000;
}
void FakeClock::AdvanceTime(webrtc::TimeDelta delta) {
- CritScope cs(&lock_);
+ webrtc::MutexLock lock(&lock_);
time_ns_ += delta.ns();
}
diff --git a/rtc_base/fake_clock.h b/rtc_base/fake_clock.h
index 0ab9a937a8..edb507becb 100644
--- a/rtc_base/fake_clock.h
+++ b/rtc_base/fake_clock.h
@@ -15,7 +15,7 @@
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/time_utils.h"
@@ -43,7 +43,7 @@ class FakeClock : public ClockInterface {
void AdvanceTime(webrtc::TimeDelta delta);
private:
- CriticalSection lock_;
+ mutable webrtc::Mutex lock_;
int64_t time_ns_ RTC_GUARDED_BY(lock_) = 0;
};
diff --git a/rtc_base/firewall_socket_server.cc b/rtc_base/firewall_socket_server.cc
index fc7917613c..8f44753760 100644
--- a/rtc_base/firewall_socket_server.cc
+++ b/rtc_base/firewall_socket_server.cc
@@ -163,19 +163,19 @@ void FirewallSocketServer::AddRule(bool allow,
r.p = p;
r.src = src;
r.dst = dst;
- CritScope scope(&crit_);
+ webrtc::MutexLock scope(&mutex_);
rules_.push_back(r);
}
void FirewallSocketServer::ClearRules() {
- CritScope scope(&crit_);
+ webrtc::MutexLock scope(&mutex_);
rules_.clear();
}
bool FirewallSocketServer::Check(FirewallProtocol p,
const SocketAddress& src,
const SocketAddress& dst) {
- CritScope scope(&crit_);
+ webrtc::MutexLock scope(&mutex_);
for (size_t i = 0; i < rules_.size(); ++i) {
const Rule& r = rules_[i];
if ((r.p != p) && (r.p != FP_ANY))
@@ -239,12 +239,12 @@ FirewallManager::~FirewallManager() {
}
void FirewallManager::AddServer(FirewallSocketServer* server) {
- CritScope scope(&crit_);
+ webrtc::MutexLock scope(&mutex_);
servers_.push_back(server);
}
void FirewallManager::RemoveServer(FirewallSocketServer* server) {
- CritScope scope(&crit_);
+ webrtc::MutexLock scope(&mutex_);
servers_.erase(std::remove(servers_.begin(), servers_.end(), server),
servers_.end());
}
@@ -253,7 +253,7 @@ void FirewallManager::AddRule(bool allow,
FirewallProtocol p,
FirewallDirection d,
const SocketAddress& addr) {
- CritScope scope(&crit_);
+ webrtc::MutexLock scope(&mutex_);
for (std::vector<FirewallSocketServer*>::const_iterator it = servers_.begin();
it != servers_.end(); ++it) {
(*it)->AddRule(allow, p, d, addr);
@@ -261,7 +261,7 @@ void FirewallManager::AddRule(bool allow,
}
void FirewallManager::ClearRules() {
- CritScope scope(&crit_);
+ webrtc::MutexLock scope(&mutex_);
for (std::vector<FirewallSocketServer*>::const_iterator it = servers_.begin();
it != servers_.end(); ++it) {
(*it)->ClearRules();
diff --git a/rtc_base/firewall_socket_server.h b/rtc_base/firewall_socket_server.h
index d174033e01..23b91d6ad3 100644
--- a/rtc_base/firewall_socket_server.h
+++ b/rtc_base/firewall_socket_server.h
@@ -14,11 +14,11 @@
#include <vector>
#include "rtc_base/async_socket.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/ip_address.h"
#include "rtc_base/socket.h"
#include "rtc_base/socket_address.h"
#include "rtc_base/socket_server.h"
+#include "rtc_base/synchronization/mutex.h"
namespace rtc {
@@ -90,7 +90,7 @@ class FirewallSocketServer : public SocketServer {
private:
SocketServer* server_;
FirewallManager* manager_;
- CriticalSection crit_;
+ webrtc::Mutex mutex_;
struct Rule {
bool allow;
FirewallProtocol p;
@@ -123,7 +123,7 @@ class FirewallManager {
void ClearRules();
private:
- CriticalSection crit_;
+ webrtc::Mutex mutex_;
std::vector<FirewallSocketServer*> servers_;
};
diff --git a/rtc_base/logging.cc b/rtc_base/logging.cc
index ff7369dd5c..d07a7e75e7 100644
--- a/rtc_base/logging.cc
+++ b/rtc_base/logging.cc
@@ -42,11 +42,11 @@ static const int kMaxLogLineSize = 1024 - 60;
#include "absl/base/attributes.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/platform_thread_types.h"
#include "rtc_base/string_encode.h"
#include "rtc_base/string_utils.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/time_utils.h"
@@ -72,7 +72,9 @@ const char* FilenameFromPath(const char* file) {
}
// Global lock for log subsystem, only needed to serialize access to streams_.
-CriticalSection g_log_crit;
+// TODO(bugs.webrtc.org/11665): this is not currently constant initialized and
+// trivially destructible.
+webrtc::Mutex g_log_mutex_;
} // namespace
/////////////////////////////////////////////////////////////////////////////
@@ -85,8 +87,9 @@ bool LogMessage::log_to_stderr_ = true;
// Note: we explicitly do not clean this up, because of the uncertain ordering
// of destructors at program exit. Let the person who sets the stream trigger
// cleanup by setting to null, or let it leak (safe at program exit).
-ABSL_CONST_INIT LogSink* LogMessage::streams_ RTC_GUARDED_BY(g_log_crit) =
+ABSL_CONST_INIT LogSink* LogMessage::streams_ RTC_GUARDED_BY(g_log_mutex_) =
nullptr;
+ABSL_CONST_INIT std::atomic<bool> LogMessage::streams_empty_ = {true};
// Boolean options default to false (0)
bool LogMessage::thread_, LogMessage::timestamp_;
@@ -193,7 +196,7 @@ LogMessage::~LogMessage() {
#endif
}
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) {
if (severity_ >= entry->min_severity_) {
#if defined(WEBRTC_ANDROID)
@@ -242,7 +245,7 @@ void LogMessage::LogTimestamps(bool on) {
void LogMessage::LogToDebug(LoggingSeverity min_sev) {
g_dbg_sev = min_sev;
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
UpdateMinLogSeverity();
}
@@ -251,7 +254,7 @@ void LogMessage::SetLogToStderr(bool log_to_stderr) {
}
int LogMessage::GetLogToStream(LogSink* stream) {
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
LoggingSeverity sev = LS_NONE;
for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) {
if (stream == nullptr || stream == entry) {
@@ -262,15 +265,16 @@ int LogMessage::GetLogToStream(LogSink* stream) {
}
void LogMessage::AddLogToStream(LogSink* stream, LoggingSeverity min_sev) {
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
stream->min_severity_ = min_sev;
stream->next_ = streams_;
streams_ = stream;
+ streams_empty_.store(false, std::memory_order_relaxed);
UpdateMinLogSeverity();
}
void LogMessage::RemoveLogToStream(LogSink* stream) {
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
for (LogSink** entry = &streams_; *entry != nullptr;
entry = &(*entry)->next_) {
if (*entry == stream) {
@@ -278,6 +282,7 @@ void LogMessage::RemoveLogToStream(LogSink* stream) {
break;
}
}
+ streams_empty_.store(streams_ == nullptr, std::memory_order_relaxed);
UpdateMinLogSeverity();
}
@@ -331,7 +336,7 @@ void LogMessage::ConfigureLogging(const char* params) {
}
void LogMessage::UpdateMinLogSeverity()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(g_log_crit) {
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(g_log_mutex_) {
LoggingSeverity min_sev = g_dbg_sev;
for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) {
min_sev = std::min(min_sev, entry->min_severity_);
@@ -435,12 +440,7 @@ void LogMessage::OutputToDebug(const std::string& str,
bool LogMessage::IsNoop(LoggingSeverity severity) {
if (severity >= g_dbg_sev || severity >= g_min_sev)
return false;
-
- // TODO(tommi): We're grabbing this lock for every LogMessage instance that
- // is going to be logged. This introduces unnecessary synchronization for
- // a feature that's mostly used for testing.
- CritScope cs(&g_log_crit);
- return streams_ == nullptr;
+ return streams_empty_.load(std::memory_order_relaxed);
}
void LogMessage::FinishPrintStream() {
diff --git a/rtc_base/logging.h b/rtc_base/logging.h
index 0aa1e676d1..0852c06182 100644
--- a/rtc_base/logging.h
+++ b/rtc_base/logging.h
@@ -46,6 +46,7 @@
#include <errno.h>
+#include <atomic>
#include <sstream> // no-presubmit-check TODO(webrtc:8982)
#include <string>
#include <utility>
@@ -463,9 +464,14 @@ class LogMessage {
static void SetLogToStderr(bool log_to_stderr);
// Stream: Any non-blocking stream interface.
// Installs the |stream| to collect logs with severtiy |min_sev| or higher.
- // |stream| must live until deinstalled by RemoveLogToStream
+ // |stream| must live until deinstalled by RemoveLogToStream.
+ // If |stream| is the first stream added to the system, we might miss some
+ // early concurrent log statement happening from another thread happening near
+ // this instant.
static void AddLogToStream(LogSink* stream, LoggingSeverity min_sev);
- // Removes the specified stream, without destroying it.
+ // Removes the specified stream, without destroying it. When the method
+ // has completed, it's guaranteed that |stream| will receive no more logging
+ // calls.
static void RemoveLogToStream(LogSink* stream);
// Returns the severity for the specified stream, of if none is specified,
// the minimum stream severity.
@@ -557,6 +563,12 @@ class LogMessage {
// The output streams and their associated severities
static LogSink* streams_;
+ // Holds true with high probability if |streams_| is empty, false with high
+ // probability otherwise. Operated on with std::memory_order_relaxed because
+ // it's ok to lose or log some additional statements near the instant streams
+ // are added/removed.
+ static std::atomic<bool> streams_empty_;
+
// Flags for formatting options
static bool thread_, timestamp_;
diff --git a/rtc_base/memory/BUILD.gn b/rtc_base/memory/BUILD.gn
index aa905c6f70..5c3dd0a5d1 100644
--- a/rtc_base/memory/BUILD.gn
+++ b/rtc_base/memory/BUILD.gn
@@ -31,7 +31,10 @@ rtc_library("fifo_buffer") {
"fifo_buffer.cc",
"fifo_buffer.h",
]
- deps = [ "..:rtc_base" ]
+ deps = [
+ "..:rtc_base",
+ "../synchronization:mutex",
+ ]
}
rtc_library("unittests") {
diff --git a/rtc_base/memory/fifo_buffer.cc b/rtc_base/memory/fifo_buffer.cc
index 44fb032e57..49e926719f 100644
--- a/rtc_base/memory/fifo_buffer.cc
+++ b/rtc_base/memory/fifo_buffer.cc
@@ -39,13 +39,13 @@ FifoBuffer::FifoBuffer(size_t size, Thread* owner)
FifoBuffer::~FifoBuffer() {}
bool FifoBuffer::GetBuffered(size_t* size) const {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
*size = data_length_;
return true;
}
bool FifoBuffer::SetCapacity(size_t size) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
if (data_length_ > size) {
return false;
}
@@ -67,7 +67,7 @@ StreamResult FifoBuffer::ReadOffset(void* buffer,
size_t bytes,
size_t offset,
size_t* bytes_read) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return ReadOffsetLocked(buffer, bytes, offset, bytes_read);
}
@@ -75,12 +75,12 @@ StreamResult FifoBuffer::WriteOffset(const void* buffer,
size_t bytes,
size_t offset,
size_t* bytes_written) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return WriteOffsetLocked(buffer, bytes, offset, bytes_written);
}
StreamState FifoBuffer::GetState() const {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return state_;
}
@@ -88,7 +88,7 @@ StreamResult FifoBuffer::Read(void* buffer,
size_t bytes,
size_t* bytes_read,
int* error) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
const bool was_writable = data_length_ < buffer_length_;
size_t copy = 0;
StreamResult result = ReadOffsetLocked(buffer, bytes, 0, &copy);
@@ -114,7 +114,7 @@ StreamResult FifoBuffer::Write(const void* buffer,
size_t bytes,
size_t* bytes_written,
int* error) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
const bool was_readable = (data_length_ > 0);
size_t copy = 0;
@@ -136,12 +136,12 @@ StreamResult FifoBuffer::Write(const void* buffer,
}
void FifoBuffer::Close() {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
state_ = SS_CLOSED;
}
const void* FifoBuffer::GetReadData(size_t* size) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
*size = (read_position_ + data_length_ <= buffer_length_)
? data_length_
: buffer_length_ - read_position_;
@@ -149,7 +149,7 @@ const void* FifoBuffer::GetReadData(size_t* size) {
}
void FifoBuffer::ConsumeReadData(size_t size) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
RTC_DCHECK(size <= data_length_);
const bool was_writable = data_length_ < buffer_length_;
read_position_ = (read_position_ + size) % buffer_length_;
@@ -160,7 +160,7 @@ void FifoBuffer::ConsumeReadData(size_t size) {
}
void* FifoBuffer::GetWriteBuffer(size_t* size) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
if (state_ == SS_CLOSED) {
return nullptr;
}
@@ -180,7 +180,7 @@ void* FifoBuffer::GetWriteBuffer(size_t* size) {
}
void FifoBuffer::ConsumeWriteBuffer(size_t size) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
RTC_DCHECK(size <= buffer_length_ - data_length_);
const bool was_readable = (data_length_ > 0);
data_length_ += size;
@@ -190,7 +190,7 @@ void FifoBuffer::ConsumeWriteBuffer(size_t size) {
}
bool FifoBuffer::GetWriteRemaining(size_t* size) const {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
*size = buffer_length_ - data_length_;
return true;
}
diff --git a/rtc_base/memory/fifo_buffer.h b/rtc_base/memory/fifo_buffer.h
index f859815c70..04c4cbf33b 100644
--- a/rtc_base/memory/fifo_buffer.h
+++ b/rtc_base/memory/fifo_buffer.h
@@ -14,6 +14,7 @@
#include <memory>
#include "rtc_base/stream.h"
+#include "rtc_base/synchronization/mutex.h"
namespace rtc {
@@ -103,7 +104,7 @@ class FifoBuffer final : public StreamInterface {
size_t bytes,
size_t offset,
size_t* bytes_read)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Helper method that implements WriteOffset. Caller must acquire a lock
// when calling this method.
@@ -111,22 +112,22 @@ class FifoBuffer final : public StreamInterface {
size_t bytes,
size_t offset,
size_t* bytes_written)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// keeps the opened/closed state of the stream
- StreamState state_ RTC_GUARDED_BY(crit_);
+ StreamState state_ RTC_GUARDED_BY(mutex_);
// the allocated buffer
- std::unique_ptr<char[]> buffer_ RTC_GUARDED_BY(crit_);
+ std::unique_ptr<char[]> buffer_ RTC_GUARDED_BY(mutex_);
// size of the allocated buffer
- size_t buffer_length_ RTC_GUARDED_BY(crit_);
+ size_t buffer_length_ RTC_GUARDED_BY(mutex_);
// amount of readable data in the buffer
- size_t data_length_ RTC_GUARDED_BY(crit_);
+ size_t data_length_ RTC_GUARDED_BY(mutex_);
// offset to the readable data
- size_t read_position_ RTC_GUARDED_BY(crit_);
+ size_t read_position_ RTC_GUARDED_BY(mutex_);
// stream callbacks are dispatched on this thread
Thread* owner_;
// object lock
- CriticalSection crit_;
+ mutable webrtc::Mutex mutex_;
RTC_DISALLOW_COPY_AND_ASSIGN(FifoBuffer);
};
diff --git a/rtc_base/nat_server.cc b/rtc_base/nat_server.cc
index 323a787ee0..725a57be9f 100644
--- a/rtc_base/nat_server.cc
+++ b/rtc_base/nat_server.cc
@@ -174,7 +174,7 @@ void NATServer::OnInternalUDPPacket(AsyncPacketSocket* socket,
RTC_DCHECK(iter != int_map_->end());
// Allow the destination to send packets back to the source.
- iter->second->WhitelistInsert(dest_addr);
+ iter->second->AllowlistInsert(dest_addr);
// Send the packet to its intended destination.
rtc::PacketOptions options;
@@ -227,29 +227,29 @@ void NATServer::Translate(const SocketAddressPair& route) {
bool NATServer::ShouldFilterOut(TransEntry* entry,
const SocketAddress& ext_addr) {
- return entry->WhitelistContains(ext_addr);
+ return entry->AllowlistContains(ext_addr);
}
NATServer::TransEntry::TransEntry(const SocketAddressPair& r,
AsyncUDPSocket* s,
NAT* nat)
: route(r), socket(s) {
- whitelist = new AddressSet(AddrCmp(nat));
+ allowlist = new AddressSet(AddrCmp(nat));
}
NATServer::TransEntry::~TransEntry() {
- delete whitelist;
+ delete allowlist;
delete socket;
}
-void NATServer::TransEntry::WhitelistInsert(const SocketAddress& addr) {
- CritScope cs(&crit_);
- whitelist->insert(addr);
+void NATServer::TransEntry::AllowlistInsert(const SocketAddress& addr) {
+ webrtc::MutexLock lock(&mutex_);
+ allowlist->insert(addr);
}
-bool NATServer::TransEntry::WhitelistContains(const SocketAddress& ext_addr) {
- CritScope cs(&crit_);
- return whitelist->find(ext_addr) == whitelist->end();
+bool NATServer::TransEntry::AllowlistContains(const SocketAddress& ext_addr) {
+ webrtc::MutexLock lock(&mutex_);
+ return allowlist->find(ext_addr) == allowlist->end();
}
} // namespace rtc
diff --git a/rtc_base/nat_server.h b/rtc_base/nat_server.h
index 46f01e9761..5078fbb2c1 100644
--- a/rtc_base/nat_server.h
+++ b/rtc_base/nat_server.h
@@ -20,6 +20,7 @@
#include "rtc_base/proxy_server.h"
#include "rtc_base/socket_address_pair.h"
#include "rtc_base/socket_factory.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread.h"
namespace rtc {
@@ -96,13 +97,13 @@ class NATServer : public sigslot::has_slots<> {
TransEntry(const SocketAddressPair& r, AsyncUDPSocket* s, NAT* nat);
~TransEntry();
- void WhitelistInsert(const SocketAddress& addr);
- bool WhitelistContains(const SocketAddress& ext_addr);
+ void AllowlistInsert(const SocketAddress& addr);
+ bool AllowlistContains(const SocketAddress& ext_addr);
SocketAddressPair route;
AsyncUDPSocket* socket;
- AddressSet* whitelist;
- CriticalSection crit_;
+ AddressSet* allowlist;
+ webrtc::Mutex mutex_;
};
typedef std::map<SocketAddressPair, TransEntry*, RouteCmp> InternalMap;
diff --git a/rtc_base/net_helpers.cc b/rtc_base/net_helpers.cc
index 6ff3791738..c6685e2a65 100644
--- a/rtc_base/net_helpers.cc
+++ b/rtc_base/net_helpers.cc
@@ -10,8 +10,6 @@
#include "rtc_base/net_helpers.h"
-#include <memory>
-
#if defined(WEBRTC_WIN)
#include <ws2spi.h>
#include <ws2tcpip.h>
@@ -26,8 +24,11 @@
#endif
#endif // defined(WEBRTC_POSIX) && !defined(__native_client__)
+#include "api/task_queue/task_queue_base.h"
#include "rtc_base/logging.h"
#include "rtc_base/signal_thread.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/third_party/sigslot/sigslot.h" // for signal_with_thread...
namespace rtc {
@@ -83,18 +84,35 @@ int ResolveHostname(const std::string& hostname,
#endif // !__native_client__
}
-// AsyncResolver
-AsyncResolver::AsyncResolver() : SignalThread(), error_(-1) {}
+AsyncResolver::AsyncResolver() : error_(-1) {}
-AsyncResolver::~AsyncResolver() = default;
+AsyncResolver::~AsyncResolver() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+}
void AsyncResolver::Start(const SocketAddress& addr) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
addr_ = addr;
- // SignalThred Start will kickoff the resolve process.
- SignalThread::Start();
+ webrtc::TaskQueueBase* current_task_queue = webrtc::TaskQueueBase::Current();
+ popup_thread_ = Thread::Create();
+ popup_thread_->Start();
+ popup_thread_->PostTask(webrtc::ToQueuedTask(
+ [this, flag = safety_.flag(), addr, current_task_queue] {
+ std::vector<IPAddress> addresses;
+ int error =
+ ResolveHostname(addr.hostname().c_str(), addr.family(), &addresses);
+ current_task_queue->PostTask(webrtc::ToQueuedTask(
+ std::move(flag), [this, error, addresses = std::move(addresses)] {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ ResolveDone(std::move(addresses), error);
+ }));
+ }));
}
bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
if (error_ != 0 || addresses_.empty())
return false;
@@ -109,20 +127,40 @@ bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const {
}
int AsyncResolver::GetError() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
return error_;
}
void AsyncResolver::Destroy(bool wait) {
- SignalThread::Destroy(wait);
+ // Some callers have trouble guaranteeing that Destroy is called on the
+ // sequence guarded by |sequence_checker_|.
+ // RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
+ destroy_called_ = true;
+ MaybeSelfDestruct();
}
-void AsyncResolver::DoWork() {
- error_ =
- ResolveHostname(addr_.hostname().c_str(), addr_.family(), &addresses_);
+const std::vector<IPAddress>& AsyncResolver::addresses() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
+ return addresses_;
}
-void AsyncResolver::OnWorkDone() {
+void AsyncResolver::ResolveDone(std::vector<IPAddress> addresses, int error) {
+ addresses_ = addresses;
+ error_ = error;
+ recursion_check_ = true;
SignalDone(this);
+ MaybeSelfDestruct();
+}
+
+void AsyncResolver::MaybeSelfDestruct() {
+ if (!recursion_check_) {
+ delete this;
+ } else {
+ recursion_check_ = false;
+ }
}
const char* inet_ntop(int af, const void* src, char* dst, socklen_t size) {
diff --git a/rtc_base/net_helpers.h b/rtc_base/net_helpers.h
index 1e06940be7..c6aa4be5b2 100644
--- a/rtc_base/net_helpers.h
+++ b/rtc_base/net_helpers.h
@@ -21,16 +21,23 @@
#include "rtc_base/async_resolver_interface.h"
#include "rtc_base/ip_address.h"
-#include "rtc_base/signal_thread.h"
#include "rtc_base/socket_address.h"
+#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/system/rtc_export.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
namespace rtc {
// AsyncResolver will perform async DNS resolution, signaling the result on
// the SignalDone from AsyncResolverInterface when the operation completes.
-class RTC_EXPORT AsyncResolver : public SignalThread,
- public AsyncResolverInterface {
+//
+// This class is thread-compatible, and all methods and destruction needs to
+// happen from the same rtc::Thread, except for Destroy which is allowed to
+// happen on another context provided it's not happening concurrently to another
+// public API call, and is the last access to the object.
+class RTC_EXPORT AsyncResolver : public AsyncResolverInterface {
public:
AsyncResolver();
~AsyncResolver() override;
@@ -40,17 +47,22 @@ class RTC_EXPORT AsyncResolver : public SignalThread,
int GetError() const override;
void Destroy(bool wait) override;
- const std::vector<IPAddress>& addresses() const { return addresses_; }
- void set_error(int error) { error_ = error; }
-
- protected:
- void DoWork() override;
- void OnWorkDone() override;
+ const std::vector<IPAddress>& addresses() const;
private:
- SocketAddress addr_;
- std::vector<IPAddress> addresses_;
- int error_;
+ void ResolveDone(std::vector<IPAddress> addresses, int error)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_);
+ void MaybeSelfDestruct();
+
+ SocketAddress addr_ RTC_GUARDED_BY(sequence_checker_);
+ std::vector<IPAddress> addresses_ RTC_GUARDED_BY(sequence_checker_);
+ int error_ RTC_GUARDED_BY(sequence_checker_);
+ webrtc::ScopedTaskSafety safety_ RTC_GUARDED_BY(sequence_checker_);
+ std::unique_ptr<Thread> popup_thread_ RTC_GUARDED_BY(sequence_checker_);
+ bool recursion_check_ =
+ false; // Protects against SignalDone calling into Destroy.
+ bool destroy_called_ = false;
+ webrtc::SequenceChecker sequence_checker_;
};
// rtc namespaced wrappers for inet_ntop and inet_pton so we can avoid
diff --git a/rtc_base/network.cc b/rtc_base/network.cc
index f30063d991..64aee4bdae 100644
--- a/rtc_base/network.cc
+++ b/rtc_base/network.cc
@@ -35,6 +35,7 @@
#include "rtc_base/string_utils.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/thread.h"
+#include "system_wrappers/include/field_trial.h"
namespace rtc {
namespace {
@@ -85,7 +86,8 @@ bool SortNetworks(const Network* a, const Network* b) {
return a->key() < b->key();
}
-uint16_t ComputeNetworkCostByType(int type) {
+uint16_t ComputeNetworkCostByType(int type,
+ bool use_differentiated_cellular_costs) {
// TODO(jonaso) : Rollout support for cellular network cost using A/B
// experiment to make sure it does not introduce regressions.
switch (type) {
@@ -95,11 +97,19 @@ uint16_t ComputeNetworkCostByType(int type) {
case rtc::ADAPTER_TYPE_WIFI:
return kNetworkCostLow;
case rtc::ADAPTER_TYPE_CELLULAR:
+ return kNetworkCostCellular;
case rtc::ADAPTER_TYPE_CELLULAR_2G:
+ return use_differentiated_cellular_costs ? kNetworkCostCellular2G
+ : kNetworkCostCellular;
case rtc::ADAPTER_TYPE_CELLULAR_3G:
+ return use_differentiated_cellular_costs ? kNetworkCostCellular3G
+ : kNetworkCostCellular;
case rtc::ADAPTER_TYPE_CELLULAR_4G:
+ return use_differentiated_cellular_costs ? kNetworkCostCellular4G
+ : kNetworkCostCellular;
case rtc::ADAPTER_TYPE_CELLULAR_5G:
- return kNetworkCostCellular;
+ return use_differentiated_cellular_costs ? kNetworkCostCellular5G
+ : kNetworkCostCellular;
case rtc::ADAPTER_TYPE_ANY:
// Candidates gathered from the any-address/wildcard ports, as backups,
// are given the maximum cost so that if there are other candidates with
@@ -930,7 +940,9 @@ Network::Network(const std::string& name,
scope_id_(0),
ignored_(false),
type_(ADAPTER_TYPE_UNKNOWN),
- preference_(0) {}
+ preference_(0),
+ use_differentiated_cellular_costs_(webrtc::field_trial::IsEnabled(
+ "WebRTC-UseDifferentiatedCellularCosts")) {}
Network::Network(const std::string& name,
const std::string& desc,
@@ -945,7 +957,9 @@ Network::Network(const std::string& name,
scope_id_(0),
ignored_(false),
type_(type),
- preference_(0) {}
+ preference_(0),
+ use_differentiated_cellular_costs_(webrtc::field_trial::IsEnabled(
+ "WebRTC-UseDifferentiatedCellularCosts")) {}
Network::Network(const Network&) = default;
@@ -1017,7 +1031,7 @@ webrtc::MdnsResponderInterface* Network::GetMdnsResponder() const {
uint16_t Network::GetCost() const {
AdapterType type = IsVpn() ? underlying_type_for_vpn_ : type_;
- return ComputeNetworkCostByType(type);
+ return ComputeNetworkCostByType(type, use_differentiated_cellular_costs_);
}
std::string Network::ToString() const {
diff --git a/rtc_base/network.h b/rtc_base/network.h
index bd05b6ae16..a67d2a2339 100644
--- a/rtc_base/network.h
+++ b/rtc_base/network.h
@@ -462,6 +462,7 @@ class RTC_EXPORT Network {
int preference_;
bool active_ = true;
uint16_t id_ = 0;
+ bool use_differentiated_cellular_costs_ = false;
friend class NetworkManager;
};
diff --git a/rtc_base/network/BUILD.gn b/rtc_base/network/BUILD.gn
index 1d06defb3b..35ae3d45f7 100644
--- a/rtc_base/network/BUILD.gn
+++ b/rtc_base/network/BUILD.gn
@@ -13,8 +13,6 @@ rtc_library("sent_packet") {
"sent_packet.cc",
"sent_packet.h",
]
- deps = [
- "../system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "../system:rtc_export" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/rtc_base/one_time_event.h b/rtc_base/one_time_event.h
index c5ccbf6933..d33ddbd587 100644
--- a/rtc_base/one_time_event.h
+++ b/rtc_base/one_time_event.h
@@ -11,7 +11,7 @@
#ifndef RTC_BASE_ONE_TIME_EVENT_H_
#define RTC_BASE_ONE_TIME_EVENT_H_
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
// Provides a simple way to perform an operation (such as logging) one
@@ -26,7 +26,7 @@ class OneTimeEvent {
public:
OneTimeEvent() {}
bool operator()() {
- rtc::CritScope cs(&critsect_);
+ MutexLock lock(&mutex_);
if (happened_) {
return false;
}
@@ -36,7 +36,7 @@ class OneTimeEvent {
private:
bool happened_ = false;
- rtc::CriticalSection critsect_;
+ Mutex mutex_;
};
// A non-thread-safe, ligher-weight version of the OneTimeEvent class.
diff --git a/rtc_base/openssl_adapter_unittest.cc b/rtc_base/openssl_adapter_unittest.cc
index b161304d65..4bd87992d4 100644
--- a/rtc_base/openssl_adapter_unittest.cc
+++ b/rtc_base/openssl_adapter_unittest.cc
@@ -25,28 +25,34 @@ namespace {
class MockAsyncSocket : public AsyncSocket {
public:
virtual ~MockAsyncSocket() = default;
- MOCK_METHOD1(Accept, AsyncSocket*(SocketAddress*));
- MOCK_CONST_METHOD0(GetLocalAddress, SocketAddress());
- MOCK_CONST_METHOD0(GetRemoteAddress, SocketAddress());
- MOCK_METHOD1(Bind, int(const SocketAddress&));
- MOCK_METHOD1(Connect, int(const SocketAddress&));
- MOCK_METHOD2(Send, int(const void*, size_t));
- MOCK_METHOD3(SendTo, int(const void*, size_t, const SocketAddress&));
- MOCK_METHOD3(Recv, int(void*, size_t, int64_t*));
- MOCK_METHOD4(RecvFrom, int(void*, size_t, SocketAddress*, int64_t*));
- MOCK_METHOD1(Listen, int(int));
- MOCK_METHOD0(Close, int());
- MOCK_CONST_METHOD0(GetError, int());
- MOCK_METHOD1(SetError, void(int));
- MOCK_CONST_METHOD0(GetState, ConnState());
- MOCK_METHOD2(GetOption, int(Option, int*));
- MOCK_METHOD2(SetOption, int(Option, int));
+ MOCK_METHOD(AsyncSocket*, Accept, (SocketAddress*), (override));
+ MOCK_METHOD(SocketAddress, GetLocalAddress, (), (const, override));
+ MOCK_METHOD(SocketAddress, GetRemoteAddress, (), (const, override));
+ MOCK_METHOD(int, Bind, (const SocketAddress&), (override));
+ MOCK_METHOD(int, Connect, (const SocketAddress&), (override));
+ MOCK_METHOD(int, Send, (const void*, size_t), (override));
+ MOCK_METHOD(int,
+ SendTo,
+ (const void*, size_t, const SocketAddress&),
+ (override));
+ MOCK_METHOD(int, Recv, (void*, size_t, int64_t*), (override));
+ MOCK_METHOD(int,
+ RecvFrom,
+ (void*, size_t, SocketAddress*, int64_t*),
+ (override));
+ MOCK_METHOD(int, Listen, (int), (override));
+ MOCK_METHOD(int, Close, (), (override));
+ MOCK_METHOD(int, GetError, (), (const, override));
+ MOCK_METHOD(void, SetError, (int), (override));
+ MOCK_METHOD(ConnState, GetState, (), (const, override));
+ MOCK_METHOD(int, GetOption, (Option, int*), (override));
+ MOCK_METHOD(int, SetOption, (Option, int), (override));
};
class MockCertVerifier : public SSLCertificateVerifier {
public:
virtual ~MockCertVerifier() = default;
- MOCK_METHOD1(Verify, bool(const SSLCertificate&));
+ MOCK_METHOD(bool, Verify, (const SSLCertificate&), (override));
};
} // namespace
diff --git a/rtc_base/operations_chain_unittest.cc b/rtc_base/operations_chain_unittest.cc
index 968f94c060..ed3c924998 100644
--- a/rtc_base/operations_chain_unittest.cc
+++ b/rtc_base/operations_chain_unittest.cc
@@ -369,14 +369,15 @@ TEST(OperationsChainTest, FunctorIsNotDestroyedWhileExecuting) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(OperationsChainTest, OperationNotInvokingCallbackShouldCrash) {
+TEST(OperationsChainDeathTest, OperationNotInvokingCallbackShouldCrash) {
scoped_refptr<OperationsChain> operations_chain = OperationsChain::Create();
EXPECT_DEATH(
operations_chain->ChainOperation([](std::function<void()> callback) {}),
"");
}
-TEST(OperationsChainTest, OperationInvokingCallbackMultipleTimesShouldCrash) {
+TEST(OperationsChainDeathTest,
+ OperationInvokingCallbackMultipleTimesShouldCrash) {
scoped_refptr<OperationsChain> operations_chain = OperationsChain::Create();
EXPECT_DEATH(
operations_chain->ChainOperation([](std::function<void()> callback) {
diff --git a/rtc_base/physical_socket_server.cc b/rtc_base/physical_socket_server.cc
index 080534af2c..05b32557be 100644
--- a/rtc_base/physical_socket_server.cc
+++ b/rtc_base/physical_socket_server.cc
@@ -24,7 +24,6 @@
// "poll" will be used to wait for the signal dispatcher.
#include <poll.h>
#endif
-#include <signal.h>
#include <sys/ioctl.h>
#include <sys/select.h>
#include <sys/time.h>
@@ -956,182 +955,7 @@ class EventDispatcher : public Dispatcher {
PhysicalSocketServer* ss_;
int afd_[2];
bool fSignaled_;
- CriticalSection crit_;
-};
-
-// These two classes use the self-pipe trick to deliver POSIX signals to our
-// select loop. This is the only safe, reliable, cross-platform way to do
-// non-trivial things with a POSIX signal in an event-driven program (until
-// proper pselect() implementations become ubiquitous).
-
-class PosixSignalHandler {
- public:
- // POSIX only specifies 32 signals, but in principle the system might have
- // more and the programmer might choose to use them, so we size our array
- // for 128.
- static constexpr int kNumPosixSignals = 128;
-
- // There is just a single global instance. (Signal handlers do not get any
- // sort of user-defined void * parameter, so they can't access anything that
- // isn't global.)
- static PosixSignalHandler* Instance() {
- static PosixSignalHandler* const instance = new PosixSignalHandler();
- return instance;
- }
-
- // Returns true if the given signal number is set.
- bool IsSignalSet(int signum) const {
- RTC_DCHECK(signum < static_cast<int>(arraysize(received_signal_)));
- if (signum < static_cast<int>(arraysize(received_signal_))) {
- return received_signal_[signum];
- } else {
- return false;
- }
- }
-
- // Clears the given signal number.
- void ClearSignal(int signum) {
- RTC_DCHECK(signum < static_cast<int>(arraysize(received_signal_)));
- if (signum < static_cast<int>(arraysize(received_signal_))) {
- received_signal_[signum] = false;
- }
- }
-
- // Returns the file descriptor to monitor for signal events.
- int GetDescriptor() const { return afd_[0]; }
-
- // This is called directly from our real signal handler, so it must be
- // signal-handler-safe. That means it cannot assume anything about the
- // user-level state of the process, since the handler could be executed at any
- // time on any thread.
- void OnPosixSignalReceived(int signum) {
- if (signum >= static_cast<int>(arraysize(received_signal_))) {
- // We don't have space in our array for this.
- return;
- }
- // Set a flag saying we've seen this signal.
- received_signal_[signum] = true;
- // Notify application code that we got a signal.
- const uint8_t b[1] = {0};
- if (-1 == write(afd_[1], b, sizeof(b))) {
- // Nothing we can do here. If there's an error somehow then there's
- // nothing we can safely do from a signal handler.
- // No, we can't even safely log it.
- // But, we still have to check the return value here. Otherwise,
- // GCC 4.4.1 complains ignoring return value. Even (void) doesn't help.
- return;
- }
- }
-
- private:
- PosixSignalHandler() {
- if (pipe(afd_) < 0) {
- RTC_LOG_ERR(LS_ERROR) << "pipe failed";
- return;
- }
- if (fcntl(afd_[0], F_SETFL, O_NONBLOCK) < 0) {
- RTC_LOG_ERR(LS_WARNING) << "fcntl #1 failed";
- }
- if (fcntl(afd_[1], F_SETFL, O_NONBLOCK) < 0) {
- RTC_LOG_ERR(LS_WARNING) << "fcntl #2 failed";
- }
- memset(const_cast<void*>(static_cast<volatile void*>(received_signal_)), 0,
- sizeof(received_signal_));
- }
-
- ~PosixSignalHandler() {
- int fd1 = afd_[0];
- int fd2 = afd_[1];
- // We clobber the stored file descriptor numbers here or else in principle
- // a signal that happens to be delivered during application termination
- // could erroneously write a zero byte to an unrelated file handle in
- // OnPosixSignalReceived() if some other file happens to be opened later
- // during shutdown and happens to be given the same file descriptor number
- // as our pipe had. Unfortunately even with this precaution there is still a
- // race where that could occur if said signal happens to be handled
- // concurrently with this code and happens to have already read the value of
- // afd_[1] from memory before we clobber it, but that's unlikely.
- afd_[0] = -1;
- afd_[1] = -1;
- close(fd1);
- close(fd2);
- }
-
- int afd_[2];
- // These are boolean flags that will be set in our signal handler and read
- // and cleared from Wait(). There is a race involved in this, but it is
- // benign. The signal handler sets the flag before signaling the pipe, so
- // we'll never end up blocking in select() while a flag is still true.
- // However, if two of the same signal arrive close to each other then it's
- // possible that the second time the handler may set the flag while it's still
- // true, meaning that signal will be missed. But the first occurrence of it
- // will still be handled, so this isn't a problem.
- // Volatile is not necessary here for correctness, but this data _is_ volatile
- // so I've marked it as such.
- volatile uint8_t received_signal_[kNumPosixSignals];
-};
-
-class PosixSignalDispatcher : public Dispatcher {
- public:
- PosixSignalDispatcher(PhysicalSocketServer* owner) : owner_(owner) {
- owner_->Add(this);
- }
-
- ~PosixSignalDispatcher() override { owner_->Remove(this); }
-
- uint32_t GetRequestedEvents() override { return DE_READ; }
-
- void OnPreEvent(uint32_t ff) override {
- // Events might get grouped if signals come very fast, so we read out up to
- // 16 bytes to make sure we keep the pipe empty.
- uint8_t b[16];
- ssize_t ret = read(GetDescriptor(), b, sizeof(b));
- if (ret < 0) {
- RTC_LOG_ERR(LS_WARNING) << "Error in read()";
- } else if (ret == 0) {
- RTC_LOG(LS_WARNING) << "Should have read at least one byte";
- }
- }
-
- void OnEvent(uint32_t ff, int err) override {
- for (int signum = 0; signum < PosixSignalHandler::kNumPosixSignals;
- ++signum) {
- if (PosixSignalHandler::Instance()->IsSignalSet(signum)) {
- PosixSignalHandler::Instance()->ClearSignal(signum);
- HandlerMap::iterator i = handlers_.find(signum);
- if (i == handlers_.end()) {
- // This can happen if a signal is delivered to our process at around
- // the same time as we unset our handler for it. It is not an error
- // condition, but it's unusual enough to be worth logging.
- RTC_LOG(LS_INFO) << "Received signal with no handler: " << signum;
- } else {
- // Otherwise, execute our handler.
- (*i->second)(signum);
- }
- }
- }
- }
-
- int GetDescriptor() override {
- return PosixSignalHandler::Instance()->GetDescriptor();
- }
-
- bool IsDescriptorClosed() override { return false; }
-
- void SetHandler(int signum, void (*handler)(int)) {
- handlers_[signum] = handler;
- }
-
- void ClearHandler(int signum) { handlers_.erase(signum); }
-
- bool HasHandlers() { return !handlers_.empty(); }
-
- private:
- typedef std::map<int, void (*)(int)> HandlerMap;
-
- HandlerMap handlers_;
- // Our owner.
- PhysicalSocketServer* owner_;
+ RecursiveCriticalSection crit_;
};
#endif // WEBRTC_POSIX
@@ -1205,31 +1029,32 @@ class Signaler : public EventDispatcher {
bool* pf_;
};
-PhysicalSocketServer::PhysicalSocketServer() : fWait_(false) {
+PhysicalSocketServer::PhysicalSocketServer()
+ :
+#if defined(WEBRTC_USE_EPOLL)
+ // Since Linux 2.6.8, the size argument is ignored, but must be greater
+ // than zero. Before that the size served as hint to the kernel for the
+ // amount of space to initially allocate in internal data structures.
+ epoll_fd_(epoll_create(FD_SETSIZE)),
+#endif
+#if defined(WEBRTC_WIN)
+ socket_ev_(WSACreateEvent()),
+#endif
+ fWait_(false) {
#if defined(WEBRTC_USE_EPOLL)
- // Since Linux 2.6.8, the size argument is ignored, but must be greater than
- // zero. Before that the size served as hint to the kernel for the amount of
- // space to initially allocate in internal data structures.
- epoll_fd_ = epoll_create(FD_SETSIZE);
if (epoll_fd_ == -1) {
// Not an error, will fall back to "select" below.
RTC_LOG_E(LS_WARNING, EN, errno) << "epoll_create";
- epoll_fd_ = INVALID_SOCKET;
+ // Note that -1 == INVALID_SOCKET, the alias used by later checks.
}
#endif
signal_wakeup_ = new Signaler(this, &fWait_);
-#if defined(WEBRTC_WIN)
- socket_ev_ = WSACreateEvent();
-#endif
}
PhysicalSocketServer::~PhysicalSocketServer() {
#if defined(WEBRTC_WIN)
WSACloseEvent(socket_ev_);
#endif
-#if defined(WEBRTC_POSIX)
- signal_dispatcher_.reset();
-#endif
delete signal_wakeup_;
#if defined(WEBRTC_USE_EPOLL)
if (epoll_fd_ != INVALID_SOCKET) {
@@ -1540,12 +1365,6 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) {
#if defined(WEBRTC_USE_EPOLL)
-// Initial number of events to process with one call to "epoll_wait".
-static const size_t kInitialEpollEvents = 128;
-
-// Maximum number of events to process with one call to "epoll_wait".
-static const size_t kMaxEpollEvents = 8192;
-
void PhysicalSocketServer::AddEpoll(Dispatcher* pdispatcher) {
RTC_DCHECK(epoll_fd_ != INVALID_SOCKET);
int fd = pdispatcher->GetDescriptor();
@@ -1612,20 +1431,13 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) {
tvStop = TimeAfter(cmsWait);
}
- if (epoll_events_.empty()) {
- // The initial space to receive events is created only if epoll is used.
- epoll_events_.resize(kInitialEpollEvents);
- }
-
fWait_ = true;
-
while (fWait_) {
// Wait then call handlers as appropriate
// < 0 means error
// 0 means timeout
// > 0 means count of descriptors ready
- int n = epoll_wait(epoll_fd_, &epoll_events_[0],
- static_cast<int>(epoll_events_.size()),
+ int n = epoll_wait(epoll_fd_, epoll_events_.data(), epoll_events_.size(),
static_cast<int>(tvWait));
if (n < 0) {
if (errno != EINTR) {
@@ -1658,13 +1470,6 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) {
}
}
- if (static_cast<size_t>(n) == epoll_events_.size() &&
- epoll_events_.size() < kMaxEpollEvents) {
- // We used the complete space to receive events, increase size for future
- // iterations.
- epoll_events_.resize(std::max(epoll_events_.size() * 2, kMaxEpollEvents));
- }
-
if (cmsWait != kForever) {
tvWait = TimeDiff(tvStop, TimeMillis());
if (tvWait < 0) {
@@ -1746,62 +1551,6 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, Dispatcher* dispatcher) {
#endif // WEBRTC_USE_EPOLL
-static void GlobalSignalHandler(int signum) {
- PosixSignalHandler::Instance()->OnPosixSignalReceived(signum);
-}
-
-bool PhysicalSocketServer::SetPosixSignalHandler(int signum,
- void (*handler)(int)) {
- // If handler is SIG_IGN or SIG_DFL then clear our user-level handler,
- // otherwise set one.
- if (handler == SIG_IGN || handler == SIG_DFL) {
- if (!InstallSignal(signum, handler)) {
- return false;
- }
- if (signal_dispatcher_) {
- signal_dispatcher_->ClearHandler(signum);
- if (!signal_dispatcher_->HasHandlers()) {
- signal_dispatcher_.reset();
- }
- }
- } else {
- if (!signal_dispatcher_) {
- signal_dispatcher_.reset(new PosixSignalDispatcher(this));
- }
- signal_dispatcher_->SetHandler(signum, handler);
- if (!InstallSignal(signum, &GlobalSignalHandler)) {
- return false;
- }
- }
- return true;
-}
-
-Dispatcher* PhysicalSocketServer::signal_dispatcher() {
- return signal_dispatcher_.get();
-}
-
-bool PhysicalSocketServer::InstallSignal(int signum, void (*handler)(int)) {
- struct sigaction act;
- // It doesn't really matter what we set this mask to.
- if (sigemptyset(&act.sa_mask) != 0) {
- RTC_LOG_ERR(LS_ERROR) << "Couldn't set mask";
- return false;
- }
- act.sa_handler = handler;
-#if !defined(__native_client__)
- // Use SA_RESTART so that our syscalls don't get EINTR, since we don't need it
- // and it's a nuisance. Though some syscalls still return EINTR and there's no
- // real standard for which ones. :(
- act.sa_flags = SA_RESTART;
-#else
- act.sa_flags = 0;
-#endif
- if (sigaction(signum, &act, nullptr) != 0) {
- RTC_LOG_ERR(LS_ERROR) << "Couldn't set sigaction";
- return false;
- }
- return true;
-}
#endif // WEBRTC_POSIX
#if defined(WEBRTC_WIN)
diff --git a/rtc_base/physical_socket_server.h b/rtc_base/physical_socket_server.h
index a71810f3db..7eaf590e3a 100644
--- a/rtc_base/physical_socket_server.h
+++ b/rtc_base/physical_socket_server.h
@@ -16,14 +16,16 @@
#define WEBRTC_USE_EPOLL 1
#endif
+#include <array>
#include <memory>
#include <set>
#include <vector>
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#include "rtc_base/net_helpers.h"
#include "rtc_base/socket_server.h"
#include "rtc_base/system/rtc_export.h"
+#include "rtc_base/thread_annotations.h"
#if defined(WEBRTC_POSIX)
typedef int SOCKET;
@@ -41,9 +43,6 @@ enum DispatcherEvent {
};
class Signaler;
-#if defined(WEBRTC_POSIX)
-class PosixSignalDispatcher;
-#endif
class Dispatcher {
public:
@@ -82,33 +81,16 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer {
void Remove(Dispatcher* dispatcher);
void Update(Dispatcher* dispatcher);
-#if defined(WEBRTC_POSIX)
- // Sets the function to be executed in response to the specified POSIX signal.
- // The function is executed from inside Wait() using the "self-pipe trick"--
- // regardless of which thread receives the signal--and hence can safely
- // manipulate user-level data structures.
- // "handler" may be SIG_IGN, SIG_DFL, or a user-specified function, just like
- // with signal(2).
- // Only one PhysicalSocketServer should have user-level signal handlers.
- // Dispatching signals on multiple PhysicalSocketServers is not reliable.
- // The signal mask is not modified. It is the caller's responsibily to
- // maintain it as desired.
- virtual bool SetPosixSignalHandler(int signum, void (*handler)(int));
-
- protected:
- Dispatcher* signal_dispatcher();
-#endif
-
private:
+ // The number of events to process with one call to "epoll_wait".
+ static constexpr size_t kNumEpollEvents = 128;
+
typedef std::set<Dispatcher*> DispatcherSet;
- void AddRemovePendingDispatchers();
+ void AddRemovePendingDispatchers() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
#if defined(WEBRTC_POSIX)
bool WaitSelect(int cms, bool process_io);
- static bool InstallSignal(int signum, void (*handler)(int));
-
- std::unique_ptr<PosixSignalDispatcher> signal_dispatcher_;
#endif // WEBRTC_POSIX
#if defined(WEBRTC_USE_EPOLL)
void AddEpoll(Dispatcher* dispatcher);
@@ -117,19 +99,23 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer {
bool WaitEpoll(int cms);
bool WaitPoll(int cms, Dispatcher* dispatcher);
- int epoll_fd_ = INVALID_SOCKET;
- std::vector<struct epoll_event> epoll_events_;
+ // This array is accessed in isolation by a thread calling into Wait().
+ // It's useless to use a SequenceChecker to guard it because a socket
+ // server can outlive the thread it's bound to, forcing the Wait call
+ // to have to reset the sequence checker on Wait calls.
+ std::array<epoll_event, kNumEpollEvents> epoll_events_;
+ const int epoll_fd_ = INVALID_SOCKET;
#endif // WEBRTC_USE_EPOLL
- DispatcherSet dispatchers_;
- DispatcherSet pending_add_dispatchers_;
- DispatcherSet pending_remove_dispatchers_;
- bool processing_dispatchers_ = false;
- Signaler* signal_wakeup_;
- CriticalSection crit_;
- bool fWait_;
+ DispatcherSet dispatchers_ RTC_GUARDED_BY(crit_);
+ DispatcherSet pending_add_dispatchers_ RTC_GUARDED_BY(crit_);
+ DispatcherSet pending_remove_dispatchers_ RTC_GUARDED_BY(crit_);
+ bool processing_dispatchers_ RTC_GUARDED_BY(crit_) = false;
+ Signaler* signal_wakeup_; // Assigned in constructor only
+ RecursiveCriticalSection crit_;
#if defined(WEBRTC_WIN)
- WSAEVENT socket_ev_;
+ const WSAEVENT socket_ev_;
#endif
+ bool fWait_;
};
class PhysicalSocket : public AsyncSocket, public sigslot::has_slots<> {
@@ -205,7 +191,7 @@ class PhysicalSocket : public AsyncSocket, public sigslot::has_slots<> {
SOCKET s_;
bool udp_;
int family_ = 0;
- CriticalSection crit_;
+ RecursiveCriticalSection crit_;
int error_ RTC_GUARDED_BY(crit_);
ConnState state_;
AsyncResolver* resolver_;
diff --git a/rtc_base/physical_socket_server_unittest.cc b/rtc_base/physical_socket_server_unittest.cc
index 5083ca1791..586b9db292 100644
--- a/rtc_base/physical_socket_server_unittest.cc
+++ b/rtc_base/physical_socket_server_unittest.cc
@@ -501,139 +501,6 @@ TEST_F(PhysicalSocketTest,
server_->set_network_binder(nullptr);
}
-class PosixSignalDeliveryTest : public ::testing::Test {
- public:
- static void RecordSignal(int signum) {
- signals_received_.push_back(signum);
- signaled_thread_ = Thread::Current();
- }
-
- protected:
- void SetUp() override { ss_.reset(new PhysicalSocketServer()); }
-
- void TearDown() override {
- ss_.reset(nullptr);
- signals_received_.clear();
- signaled_thread_ = nullptr;
- }
-
- bool ExpectSignal(int signum) {
- if (signals_received_.empty()) {
- RTC_LOG(LS_ERROR) << "ExpectSignal(): No signal received";
- return false;
- }
- if (signals_received_[0] != signum) {
- RTC_LOG(LS_ERROR) << "ExpectSignal(): Received signal "
- << signals_received_[0] << ", expected " << signum;
- return false;
- }
- signals_received_.erase(signals_received_.begin());
- return true;
- }
-
- bool ExpectNone() {
- bool ret = signals_received_.empty();
- if (!ret) {
- RTC_LOG(LS_ERROR) << "ExpectNone(): Received signal "
- << signals_received_[0] << ", expected none";
- }
- return ret;
- }
-
- static std::vector<int> signals_received_;
- static Thread* signaled_thread_;
-
- std::unique_ptr<PhysicalSocketServer> ss_;
-};
-
-std::vector<int> PosixSignalDeliveryTest::signals_received_;
-Thread* PosixSignalDeliveryTest::signaled_thread_ = nullptr;
-
-// Test receiving a synchronous signal while not in Wait() and then entering
-// Wait() afterwards.
-// TODO(webrtc:7864): Fails on real iOS devices
-#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM_FAMILY)
-#define MAYBE_RaiseThenWait DISABLED_RaiseThenWait
-#else
-#define MAYBE_RaiseThenWait RaiseThenWait
-#endif
-TEST_F(PosixSignalDeliveryTest, MAYBE_RaiseThenWait) {
- ASSERT_TRUE(ss_->SetPosixSignalHandler(SIGTERM, &RecordSignal));
- raise(SIGTERM);
- EXPECT_TRUE(ss_->Wait(0, true));
- EXPECT_TRUE(ExpectSignal(SIGTERM));
- EXPECT_TRUE(ExpectNone());
-}
-
-// Test that we can handle getting tons of repeated signals and that we see all
-// the different ones.
-// TODO(webrtc:7864): Fails on real iOS devices
-#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM_FAMILY)
-#define MAYBE_InsanelyManySignals DISABLED_InsanelyManySignals
-#else
-#define MAYBE_InsanelyManySignals InsanelyManySignals
-#endif
-TEST_F(PosixSignalDeliveryTest, MAYBE_InsanelyManySignals) {
- ss_->SetPosixSignalHandler(SIGTERM, &RecordSignal);
- ss_->SetPosixSignalHandler(SIGINT, &RecordSignal);
- for (int i = 0; i < 10000; ++i) {
- raise(SIGTERM);
- }
- raise(SIGINT);
- EXPECT_TRUE(ss_->Wait(0, true));
- // Order will be lowest signal numbers first.
- EXPECT_TRUE(ExpectSignal(SIGINT));
- EXPECT_TRUE(ExpectSignal(SIGTERM));
- EXPECT_TRUE(ExpectNone());
-}
-
-// Test that a signal during a Wait() call is detected.
-TEST_F(PosixSignalDeliveryTest, SignalDuringWait) {
- ss_->SetPosixSignalHandler(SIGALRM, &RecordSignal);
- alarm(1);
- EXPECT_TRUE(ss_->Wait(1500, true));
- EXPECT_TRUE(ExpectSignal(SIGALRM));
- EXPECT_TRUE(ExpectNone());
-}
-
-// Test that it works no matter what thread the kernel chooses to give the
-// signal to (since it's not guaranteed to be the one that Wait() runs on).
-// TODO(webrtc:7864): Fails on real iOS devices
-#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM_FAMILY)
-#define MAYBE_SignalOnDifferentThread DISABLED_SignalOnDifferentThread
-#else
-#define MAYBE_SignalOnDifferentThread SignalOnDifferentThread
-#endif
-TEST_F(PosixSignalDeliveryTest, DISABLED_SignalOnDifferentThread) {
- ss_->SetPosixSignalHandler(SIGTERM, &RecordSignal);
- // Mask out SIGTERM so that it can't be delivered to this thread.
- sigset_t mask;
- sigemptyset(&mask);
- sigaddset(&mask, SIGTERM);
- EXPECT_EQ(0, pthread_sigmask(SIG_SETMASK, &mask, nullptr));
- // Start a new thread that raises it. It will have to be delivered to that
- // thread. Our implementation should safely handle it and dispatch
- // RecordSignal() on this thread.
- std::unique_ptr<Thread> thread(Thread::CreateWithSocketServer());
- thread->Start();
- thread->PostTask(RTC_FROM_HERE, [&thread]() {
- thread->socketserver()->Wait(1000, false);
- // Allow SIGTERM. This will be the only thread with it not masked so it will
- // be delivered to us.
- sigset_t mask;
- sigemptyset(&mask);
- pthread_sigmask(SIG_SETMASK, &mask, nullptr);
-
- // Raise it.
- raise(SIGTERM);
- });
-
- EXPECT_TRUE(ss_->Wait(1500, true));
- EXPECT_TRUE(ExpectSignal(SIGTERM));
- EXPECT_EQ(Thread::Current(), signaled_thread_);
- EXPECT_TRUE(ExpectNone());
-}
-
#endif
} // namespace rtc
diff --git a/rtc_base/platform_thread_types.cc b/rtc_base/platform_thread_types.cc
index ed4a228262..b0243b41dc 100644
--- a/rtc_base/platform_thread_types.cc
+++ b/rtc_base/platform_thread_types.cc
@@ -15,6 +15,16 @@
#include <sys/syscall.h>
#endif
+#if defined(WEBRTC_WIN)
+#include "rtc_base/arraysize.h"
+
+// The SetThreadDescription API was brought in version 1607 of Windows 10.
+// For compatibility with various versions of winuser and avoid clashing with
+// a potentially defined type, we use the RTC_ prefix.
+typedef HRESULT(WINAPI* RTC_SetThreadDescription)(HANDLE hThread,
+ PCWSTR lpThreadDescription);
+#endif
+
namespace rtc {
PlatformThreadId CurrentThreadId() {
@@ -58,6 +68,24 @@ bool IsThreadRefEqual(const PlatformThreadRef& a, const PlatformThreadRef& b) {
void SetCurrentThreadName(const char* name) {
#if defined(WEBRTC_WIN)
+ // The SetThreadDescription API works even if no debugger is attached.
+ // The names set with this API also show up in ETW traces. Very handy.
+ static auto set_thread_description_func =
+ reinterpret_cast<RTC_SetThreadDescription>(::GetProcAddress(
+ ::GetModuleHandleA("Kernel32.dll"), "SetThreadDescription"));
+ if (set_thread_description_func) {
+ // Convert from ASCII to UTF-16.
+ wchar_t wide_thread_name[64];
+ for (size_t i = 0; i < arraysize(wide_thread_name) - 1; ++i) {
+ wide_thread_name[i] = name[i];
+ if (wide_thread_name[i] == L'\0')
+ break;
+ }
+ // Guarantee null-termination.
+ wide_thread_name[arraysize(wide_thread_name) - 1] = L'\0';
+ set_thread_description_func(::GetCurrentThread(), wide_thread_name);
+ }
+
// For details see:
// https://docs.microsoft.com/en-us/visualstudio/debugger/how-to-set-a-thread-name-in-native-code
#pragma pack(push, 8)
diff --git a/rtc_base/rate_limiter.cc b/rtc_base/rate_limiter.cc
index 7394c3eb89..0f3f343aed 100644
--- a/rtc_base/rate_limiter.cc
+++ b/rtc_base/rate_limiter.cc
@@ -31,7 +31,7 @@ RateLimiter::~RateLimiter() {}
// calling SetMaxRate() and a timed maintenance thread periodically updating
// the RTT.
bool RateLimiter::TryUseRate(size_t packet_size_bytes) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
int64_t now_ms = clock_->TimeInMilliseconds();
absl::optional<uint32_t> current_rate = current_rate_.Rate(now_ms);
if (current_rate) {
@@ -53,14 +53,14 @@ bool RateLimiter::TryUseRate(size_t packet_size_bytes) {
}
void RateLimiter::SetMaxRate(uint32_t max_rate_bps) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
max_rate_bps_ = max_rate_bps;
}
// Set the window size over which to measure the current bitrate.
// For retransmissions, this is typically the RTT.
bool RateLimiter::SetWindowSize(int64_t window_size_ms) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
window_size_ms_ = window_size_ms;
return current_rate_.SetWindowSize(window_size_ms,
clock_->TimeInMilliseconds());
diff --git a/rtc_base/rate_limiter.h b/rtc_base/rate_limiter.h
index 1c956d788b..051ccf6aa6 100644
--- a/rtc_base/rate_limiter.h
+++ b/rtc_base/rate_limiter.h
@@ -15,8 +15,8 @@
#include <stdint.h>
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/rate_statistics.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -45,7 +45,7 @@ class RateLimiter {
private:
Clock* const clock_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
RateStatistics current_rate_ RTC_GUARDED_BY(lock_);
int64_t window_size_ms_ RTC_GUARDED_BY(lock_);
uint32_t max_rate_bps_ RTC_GUARDED_BY(lock_);
diff --git a/rtc_base/rate_statistics.cc b/rtc_base/rate_statistics.cc
index c4c2e78581..85621fa555 100644
--- a/rtc_base/rate_statistics.cc
+++ b/rtc_base/rate_statistics.cc
@@ -20,29 +20,26 @@
namespace webrtc {
+RateStatistics::Bucket::Bucket(int64_t timestamp)
+ : sum(0), num_samples(0), timestamp(timestamp) {}
+
RateStatistics::RateStatistics(int64_t window_size_ms, float scale)
- : buckets_(new Bucket[window_size_ms]()),
- accumulated_count_(0),
+ : accumulated_count_(0),
+ first_timestamp_(-1),
num_samples_(0),
- oldest_time_(-window_size_ms),
- oldest_index_(0),
scale_(scale),
max_window_size_ms_(window_size_ms),
current_window_size_ms_(max_window_size_ms_) {}
RateStatistics::RateStatistics(const RateStatistics& other)
- : accumulated_count_(other.accumulated_count_),
+ : buckets_(other.buckets_),
+ accumulated_count_(other.accumulated_count_),
+ first_timestamp_(other.first_timestamp_),
overflow_(other.overflow_),
num_samples_(other.num_samples_),
- oldest_time_(other.oldest_time_),
- oldest_index_(other.oldest_index_),
scale_(other.scale_),
max_window_size_ms_(other.max_window_size_ms_),
- current_window_size_ms_(other.current_window_size_ms_) {
- buckets_ = std::make_unique<Bucket[]>(other.max_window_size_ms_);
- std::copy(other.buckets_.get(),
- other.buckets_.get() + other.max_window_size_ms_, buckets_.get());
-}
+ current_window_size_ms_(other.current_window_size_ms_) {}
RateStatistics::RateStatistics(RateStatistics&& other) = default;
@@ -52,33 +49,33 @@ void RateStatistics::Reset() {
accumulated_count_ = 0;
overflow_ = false;
num_samples_ = 0;
- oldest_time_ = -max_window_size_ms_;
- oldest_index_ = 0;
+ first_timestamp_ = -1;
current_window_size_ms_ = max_window_size_ms_;
- for (int64_t i = 0; i < max_window_size_ms_; i++)
- buckets_[i] = Bucket();
+ buckets_.clear();
}
void RateStatistics::Update(int64_t count, int64_t now_ms) {
- RTC_DCHECK_LE(0, count);
- if (now_ms < oldest_time_) {
- // Too old data is ignored.
- return;
- }
+ RTC_DCHECK_GE(count, 0);
EraseOld(now_ms);
+ if (first_timestamp_ == -1) {
+ first_timestamp_ = now_ms;
+ }
+
+ if (buckets_.empty() || now_ms != buckets_.back().timestamp) {
+ if (!buckets_.empty() && now_ms < buckets_.back().timestamp) {
+ RTC_LOG(LS_WARNING) << "Timestamp " << now_ms
+ << " is before the last added "
+ "timestamp in the rate window: "
+ << buckets_.back().timestamp << ", aligning to that.";
+ now_ms = buckets_.back().timestamp;
+ }
+ buckets_.emplace_back(now_ms);
+ }
+ Bucket& last_bucket = buckets_.back();
+ last_bucket.sum += count;
+ ++last_bucket.num_samples;
- // First ever sample, reset window to start now.
- if (!IsInitialized())
- oldest_time_ = now_ms;
-
- uint32_t now_offset = rtc::dchecked_cast<uint32_t>(now_ms - oldest_time_);
- RTC_DCHECK_LT(now_offset, max_window_size_ms_);
- uint32_t index = oldest_index_ + now_offset;
- if (index >= max_window_size_ms_)
- index -= max_window_size_ms_;
- buckets_[index].sum += count;
- ++buckets_[index].samples;
if (std::numeric_limits<int64_t>::max() - accumulated_count_ > count) {
accumulated_count_ += count;
} else {
@@ -92,10 +89,22 @@ absl::optional<int64_t> RateStatistics::Rate(int64_t now_ms) const {
// of the members as mutable...
const_cast<RateStatistics*>(this)->EraseOld(now_ms);
+ int active_window_size = 0;
+ if (first_timestamp_ != -1) {
+ if (first_timestamp_ <= now_ms - current_window_size_ms_) {
+ // Count window as full even if no data points currently in view, if the
+ // data stream started before the window.
+ active_window_size = current_window_size_ms_;
+ } else {
+ // Size of a single bucket is 1ms, so even if now_ms == first_timestmap_
+ // the window size should be 1.
+ active_window_size = now_ms - first_timestamp_ + 1;
+ }
+ }
+
// If window is a single bucket or there is only one sample in a data set that
// has not grown to the full window size, or if the accumulator has
// overflowed, treat this as rate unavailable.
- int active_window_size = now_ms - oldest_time_ + 1;
if (num_samples_ == 0 || active_window_size <= 1 ||
(num_samples_ <= 1 &&
rtc::SafeLt(active_window_size, current_window_size_ms_)) ||
@@ -114,43 +123,35 @@ absl::optional<int64_t> RateStatistics::Rate(int64_t now_ms) const {
}
void RateStatistics::EraseOld(int64_t now_ms) {
- if (!IsInitialized())
- return;
-
// New oldest time that is included in data set.
- int64_t new_oldest_time = now_ms - current_window_size_ms_ + 1;
-
- // New oldest time is older than the current one, no need to cull data.
- if (new_oldest_time <= oldest_time_)
- return;
+ const int64_t new_oldest_time = now_ms - current_window_size_ms_ + 1;
// Loop over buckets and remove too old data points.
- while (num_samples_ > 0 && oldest_time_ < new_oldest_time) {
- const Bucket& oldest_bucket = buckets_[oldest_index_];
+ while (!buckets_.empty() && buckets_.front().timestamp < new_oldest_time) {
+ const Bucket& oldest_bucket = buckets_.front();
RTC_DCHECK_GE(accumulated_count_, oldest_bucket.sum);
- RTC_DCHECK_GE(num_samples_, oldest_bucket.samples);
+ RTC_DCHECK_GE(num_samples_, oldest_bucket.num_samples);
accumulated_count_ -= oldest_bucket.sum;
- num_samples_ -= oldest_bucket.samples;
- buckets_[oldest_index_] = Bucket();
- if (++oldest_index_ >= max_window_size_ms_)
- oldest_index_ = 0;
- ++oldest_time_;
+ num_samples_ -= oldest_bucket.num_samples;
+ buckets_.pop_front();
// This does not clear overflow_ even when counter is empty.
// TODO(https://bugs.webrtc.org/11247): Consider if overflow_ can be reset.
}
- oldest_time_ = new_oldest_time;
}
bool RateStatistics::SetWindowSize(int64_t window_size_ms, int64_t now_ms) {
if (window_size_ms <= 0 || window_size_ms > max_window_size_ms_)
return false;
+ if (first_timestamp_ != -1) {
+ // If the window changes (e.g. decreases - removing data point, then
+ // increases again) we need to update the first timestamp mark as
+ // otherwise it indicates the window coveres a region of zeros, suddenly
+ // under-estimating the rate.
+ first_timestamp_ = std::max(first_timestamp_, now_ms - window_size_ms + 1);
+ }
current_window_size_ms_ = window_size_ms;
EraseOld(now_ms);
return true;
}
-bool RateStatistics::IsInitialized() const {
- return oldest_time_ != -max_window_size_ms_;
-}
-
} // namespace webrtc
diff --git a/rtc_base/rate_statistics.h b/rtc_base/rate_statistics.h
index 11c8cee7af..dc8d7f5272 100644
--- a/rtc_base/rate_statistics.h
+++ b/rtc_base/rate_statistics.h
@@ -14,6 +14,7 @@
#include <stddef.h>
#include <stdint.h>
+#include <deque>
#include <memory>
#include "absl/types/optional.h"
@@ -28,6 +29,10 @@ namespace webrtc {
// high; for instance, a 20 Mbit/sec video stream can wrap a 32-bit byte
// counter in 14 minutes.
+// Note that timestamps used in Update(), Rate() and SetWindowSize() must never
+// decrease for two consecutive calls.
+// TODO(bugs.webrtc.org/11600): Migrate from int64_t to Timestamp.
+
class RTC_EXPORT RateStatistics {
public:
static constexpr float kBpsScale = 8000.0f;
@@ -65,19 +70,22 @@ class RTC_EXPORT RateStatistics {
private:
void EraseOld(int64_t now_ms);
- bool IsInitialized() const;
- // Counters are kept in buckets (circular buffer), with one bucket
- // per millisecond.
struct Bucket {
+ explicit Bucket(int64_t timestamp);
int64_t sum; // Sum of all samples in this bucket.
- int samples; // Number of samples in this bucket.
+ int num_samples; // Number of samples in this bucket.
+ const int64_t timestamp; // Timestamp this bucket corresponds to.
};
- std::unique_ptr<Bucket[]> buckets_;
+ // All buckets within the time window, ordered by time.
+ std::deque<Bucket> buckets_;
- // Total count recorded in buckets.
+ // Total count recorded in all buckets.
int64_t accumulated_count_;
+ // Timestamp of the first data point seen, or -1 of none seen.
+ int64_t first_timestamp_;
+
// True if accumulated_count_ has ever grown too large to be
// contained in its integer type.
bool overflow_ = false;
@@ -85,12 +93,6 @@ class RTC_EXPORT RateStatistics {
// The total number of samples in the buckets.
int num_samples_;
- // Oldest time recorded in buckets.
- int64_t oldest_time_;
-
- // Bucket index of oldest counter recorded in buckets.
- int64_t oldest_index_;
-
// To convert counts/ms to desired units
const float scale_;
diff --git a/rtc_base/signal_thread.h b/rtc_base/signal_thread.h
index d9e8ade9b0..b444d54994 100644
--- a/rtc_base/signal_thread.h
+++ b/rtc_base/signal_thread.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -11,147 +11,9 @@
#ifndef RTC_BASE_SIGNAL_THREAD_H_
#define RTC_BASE_SIGNAL_THREAD_H_
-#include <string>
-
-#include "rtc_base/checks.h"
-#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
-#include "rtc_base/message_handler.h"
-#include "rtc_base/third_party/sigslot/sigslot.h"
-#include "rtc_base/thread.h"
-#include "rtc_base/thread_annotations.h"
-
-namespace rtc {
-
-///////////////////////////////////////////////////////////////////////////////
-// SignalThread - Base class for worker threads. The main thread should call
-// Start() to begin work, and then follow one of these models:
-// Normal: Wait for SignalWorkDone, and then call Release to destroy.
-// Cancellation: Call Release(true), to abort the worker thread.
-// Fire-and-forget: Call Release(false), which allows the thread to run to
-// completion, and then self-destruct without further notification.
-// Periodic tasks: Wait for SignalWorkDone, then eventually call Start()
-// again to repeat the task. When the instance isn't needed anymore,
-// call Release. DoWork, OnWorkStart and OnWorkStop are called again,
-// on a new thread.
-// The subclass should override DoWork() to perform the background task. By
-// periodically calling ContinueWork(), it can check for cancellation.
-// OnWorkStart and OnWorkDone can be overridden to do pre- or post-work
-// tasks in the context of the main thread.
-///////////////////////////////////////////////////////////////////////////////
-
-class SignalThread : public sigslot::has_slots<>, protected MessageHandler {
- public:
- SignalThread();
-
- // Context: Main Thread. Call before Start to change the worker's name.
- bool SetName(const std::string& name, const void* obj);
-
- // Context: Main Thread. Call to begin the worker thread.
- void Start();
-
- // Context: Main Thread. If the worker thread is not running, deletes the
- // object immediately. Otherwise, asks the worker thread to abort processing,
- // and schedules the object to be deleted once the worker exits.
- // SignalWorkDone will not be signalled. If wait is true, does not return
- // until the thread is deleted.
- void Destroy(bool wait);
-
- // Context: Main Thread. If the worker thread is complete, deletes the
- // object immediately. Otherwise, schedules the object to be deleted once
- // the worker thread completes. SignalWorkDone will be signalled.
- void Release();
-
- // Context: Main Thread. Signalled when work is complete.
- sigslot::signal1<SignalThread*> SignalWorkDone;
-
- enum { ST_MSG_WORKER_DONE, ST_MSG_FIRST_AVAILABLE };
-
- protected:
- ~SignalThread() override;
-
- Thread* worker() { return &worker_; }
-
- // Context: Main Thread. Subclass should override to do pre-work setup.
- virtual void OnWorkStart() {}
-
- // Context: Worker Thread. Subclass should override to do work.
- virtual void DoWork() = 0;
-
- // Context: Worker Thread. Subclass should call periodically to
- // dispatch messages and determine if the thread should terminate.
- bool ContinueWork();
-
- // Context: Worker Thread. Subclass should override when extra work is
- // needed to abort the worker thread.
- virtual void OnWorkStop() {}
-
- // Context: Main Thread. Subclass should override to do post-work cleanup.
- virtual void OnWorkDone() {}
-
- // Context: Any Thread. If subclass overrides, be sure to call the base
- // implementation. Do not use (message_id < ST_MSG_FIRST_AVAILABLE)
- void OnMessage(Message* msg) override;
-
- private:
- enum State {
- kInit, // Initialized, but not started
- kRunning, // Started and doing work
- kReleasing, // Same as running, but to be deleted when work is done
- kComplete, // Work is done
- kStopping, // Work is being interrupted
- };
-
- class Worker : public Thread {
- public:
- explicit Worker(SignalThread* parent);
- ~Worker() override;
- void Run() override;
- bool IsProcessingMessagesForTesting() override;
-
- private:
- SignalThread* parent_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Worker);
- };
-
- class RTC_SCOPED_LOCKABLE EnterExit {
- public:
- explicit EnterExit(SignalThread* t) RTC_EXCLUSIVE_LOCK_FUNCTION(t->cs_)
- : t_(t) {
- t_->cs_.Enter();
- // If refcount_ is zero then the object has already been deleted and we
- // will be double-deleting it in ~EnterExit()! (shouldn't happen)
- RTC_DCHECK_NE(0, t_->refcount_);
- ++t_->refcount_;
- }
- ~EnterExit() RTC_UNLOCK_FUNCTION() {
- bool d = (0 == --t_->refcount_);
- t_->cs_.Leave();
- if (d)
- delete t_;
- }
-
- private:
- SignalThread* t_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(EnterExit);
- };
-
- void Run();
- void OnMainThreadDestroyed();
-
- Thread* main_;
- Worker worker_;
- CriticalSection cs_;
- State state_;
- int refcount_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(SignalThread);
-};
-
-///////////////////////////////////////////////////////////////////////////////
-
-} // namespace rtc
+// The facilities in this file have been deprecated. Please do not use them
+// in new code. New code should use factilities exposed by api/task_queue/
+// instead.
+#include "rtc_base/deprecated/signal_thread.h"
#endif // RTC_BASE_SIGNAL_THREAD_H_
diff --git a/rtc_base/ssl_adapter_unittest.cc b/rtc_base/ssl_adapter_unittest.cc
index 125b4bd50d..498eba312b 100644
--- a/rtc_base/ssl_adapter_unittest.cc
+++ b/rtc_base/ssl_adapter_unittest.cc
@@ -50,7 +50,7 @@ static std::string GetSSLProtocolName(const rtc::SSLMode& ssl_mode) {
class MockCertVerifier : public rtc::SSLCertificateVerifier {
public:
virtual ~MockCertVerifier() = default;
- MOCK_METHOD1(Verify, bool(const rtc::SSLCertificate&));
+ MOCK_METHOD(bool, Verify, (const rtc::SSLCertificate&), (override));
};
// TODO(benwright) - Move to using INSTANTIATE_TEST_SUITE_P instead of using
diff --git a/rtc_base/stream.h b/rtc_base/stream.h
index bfb9dc2c41..dc77a7111c 100644
--- a/rtc_base/stream.h
+++ b/rtc_base/stream.h
@@ -15,7 +15,6 @@
#include "rtc_base/buffer.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/message_handler.h"
#include "rtc_base/system/rtc_export.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
diff --git a/rtc_base/strings/string_builder_unittest.cc b/rtc_base/strings/string_builder_unittest.cc
index 84717ad1d1..99dfd86292 100644
--- a/rtc_base/strings/string_builder_unittest.cc
+++ b/rtc_base/strings/string_builder_unittest.cc
@@ -59,7 +59,7 @@ TEST(SimpleStringBuilder, StdString) {
// off.
#if (GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)) || !RTC_DCHECK_IS_ON
-TEST(SimpleStringBuilder, BufferOverrunConstCharP) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharP) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
const char* const msg = "This is just too much";
@@ -71,7 +71,7 @@ TEST(SimpleStringBuilder, BufferOverrunConstCharP) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunStdString) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunStdString) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
sb << 12;
@@ -84,7 +84,7 @@ TEST(SimpleStringBuilder, BufferOverrunStdString) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunInt) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunInt) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
constexpr int num = -12345;
@@ -100,7 +100,7 @@ TEST(SimpleStringBuilder, BufferOverrunInt) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunDouble) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunDouble) {
char sb_buf[5];
SimpleStringBuilder sb(sb_buf);
constexpr double num = 123.456;
@@ -113,7 +113,7 @@ TEST(SimpleStringBuilder, BufferOverrunDouble) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunConstCharPAlreadyFull) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharPAlreadyFull) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
sb << 123;
@@ -126,7 +126,7 @@ TEST(SimpleStringBuilder, BufferOverrunConstCharPAlreadyFull) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunIntAlreadyFull) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunIntAlreadyFull) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
sb << "xyz";
diff --git a/rtc_base/swap_queue_unittest.cc b/rtc_base/swap_queue_unittest.cc
index 199ac6b185..3862d850fa 100644
--- a/rtc_base/swap_queue_unittest.cc
+++ b/rtc_base/swap_queue_unittest.cc
@@ -135,7 +135,7 @@ TEST(SwapQueueTest, SuccessfulItemVerifyFunctor) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(SwapQueueTest, UnsuccessfulItemVerifyFunctor) {
+TEST(SwapQueueDeathTest, UnsuccessfulItemVerifyFunctor) {
// Queue item verifier for the test.
auto minus_2_verifier = [](const int& i) { return i > -2; };
SwapQueue<int, decltype(minus_2_verifier)> queue(2, minus_2_verifier);
@@ -148,7 +148,7 @@ TEST(SwapQueueTest, UnsuccessfulItemVerifyFunctor) {
EXPECT_DEATH(result = queue.Insert(&invalid_value), "");
}
-TEST(SwapQueueTest, UnSuccessfulItemVerifyInsert) {
+TEST(SwapQueueDeathTest, UnSuccessfulItemVerifyInsert) {
std::vector<int> template_element(kChunkSize);
SwapQueue<std::vector<int>,
SwapQueueItemVerifier<std::vector<int>, &LengthVerifierFunction>>
@@ -158,7 +158,7 @@ TEST(SwapQueueTest, UnSuccessfulItemVerifyInsert) {
EXPECT_DEATH(result = queue.Insert(&invalid_chunk), "");
}
-TEST(SwapQueueTest, UnSuccessfulItemVerifyRemove) {
+TEST(SwapQueueDeathTest, UnSuccessfulItemVerifyRemove) {
std::vector<int> template_element(kChunkSize);
SwapQueue<std::vector<int>,
SwapQueueItemVerifier<std::vector<int>, &LengthVerifierFunction>>
diff --git a/rtc_base/synchronization/BUILD.gn b/rtc_base/synchronization/BUILD.gn
index 3e7b22d4f9..a79a0486af 100644
--- a/rtc_base/synchronization/BUILD.gn
+++ b/rtc_base/synchronization/BUILD.gn
@@ -12,6 +12,38 @@ if (is_android) {
import("//build/config/android/rules.gni")
}
+rtc_library("yield") {
+ sources = [
+ "yield.cc",
+ "yield.h",
+ ]
+ deps = []
+}
+
+rtc_library("mutex") {
+ sources = [
+ "mutex.cc",
+ "mutex.h",
+ "mutex_critical_section.h",
+ "mutex_pthread.h",
+ ]
+ if (rtc_use_absl_mutex) {
+ sources += [ "mutex_abseil.h" ]
+ }
+
+ deps = [
+ ":yield",
+ "..:checks",
+ "..:macromagic",
+ "..:platform_thread_types",
+ "../system:unused",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
+ if (rtc_use_absl_mutex) {
+ absl_deps += [ "//third_party/abseil-cpp/absl/synchronization" ]
+ }
+}
+
rtc_library("rw_lock_wrapper") {
public = [ "rw_lock_wrapper.h" ]
sources = [ "rw_lock_wrapper.cc" ]
@@ -36,10 +68,12 @@ rtc_library("sequence_checker") {
"sequence_checker.h",
]
deps = [
+ ":mutex",
"..:checks",
"..:criticalsection",
"..:macromagic",
"..:platform_thread_types",
+ "..:stringutils",
"../../api/task_queue",
"../system:rtc_export",
]
@@ -50,8 +84,8 @@ rtc_library("yield_policy") {
"yield_policy.cc",
"yield_policy.h",
]
- deps = [
- "..:checks",
+ deps = [ "..:checks" ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:config",
"//third_party/abseil-cpp/absl/base:core_headers",
]
@@ -60,11 +94,30 @@ rtc_library("yield_policy") {
if (rtc_include_tests) {
rtc_library("synchronization_unittests") {
testonly = true
- sources = [ "yield_policy_unittest.cc" ]
+ sources = [
+ "mutex_unittest.cc",
+ "yield_policy_unittest.cc",
+ ]
deps = [
+ ":mutex",
+ ":yield",
":yield_policy",
+ "..:checks",
+ "..:macromagic",
+ "..:rtc_base",
"..:rtc_event",
"../../test:test_support",
+ "//third_party/google_benchmark",
+ ]
+ }
+
+ rtc_library("mutex_benchmark") {
+ testonly = true
+ sources = [ "mutex_benchmark.cc" ]
+ deps = [
+ ":mutex",
+ "../system:unused",
+ "//third_party/google_benchmark",
]
}
diff --git a/rtc_base/synchronization/DEPS b/rtc_base/synchronization/DEPS
new file mode 100644
index 0000000000..4ed1f2444b
--- /dev/null
+++ b/rtc_base/synchronization/DEPS
@@ -0,0 +1,11 @@
+specific_include_rules = {
+ "mutex_abseil\.h": [
+ "+absl/synchronization"
+ ],
+ ".*_benchmark\.cc": [
+ "+benchmark",
+ ],
+ ".*_unittest\.cc": [
+ "+benchmark",
+ ]
+}
diff --git a/rtc_base/synchronization/mutex.cc b/rtc_base/synchronization/mutex.cc
new file mode 100644
index 0000000000..6c2d6ff7f0
--- /dev/null
+++ b/rtc_base/synchronization/mutex.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/synchronization/mutex.h"
+
+#include "rtc_base/checks.h"
+#include "rtc_base/synchronization/yield.h"
+
+namespace webrtc {
+
+#if !defined(WEBRTC_ABSL_MUTEX)
+void GlobalMutex::Lock() {
+ while (mutex_locked_.exchange(1)) {
+ YieldCurrentThread();
+ }
+}
+
+void GlobalMutex::Unlock() {
+ int old = mutex_locked_.exchange(0);
+ RTC_DCHECK_EQ(old, 1) << "Unlock called without calling Lock first";
+}
+
+GlobalMutexLock::GlobalMutexLock(GlobalMutex* mutex) : mutex_(mutex) {
+ mutex_->Lock();
+}
+
+GlobalMutexLock::~GlobalMutexLock() {
+ mutex_->Unlock();
+}
+#endif // #if !defined(WEBRTC_ABSL_MUTEX)
+
+} // namespace webrtc
diff --git a/rtc_base/synchronization/mutex.h b/rtc_base/synchronization/mutex.h
new file mode 100644
index 0000000000..1ccbbdcbd5
--- /dev/null
+++ b/rtc_base/synchronization/mutex.h
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_H_
+#define RTC_BASE_SYNCHRONIZATION_MUTEX_H_
+
+#include <atomic>
+
+#include "absl/base/const_init.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/system/unused.h"
+#include "rtc_base/thread_annotations.h"
+
+#if defined(WEBRTC_ABSL_MUTEX)
+#include "rtc_base/synchronization/mutex_abseil.h" // nogncheck
+#elif defined(WEBRTC_WIN)
+#include "rtc_base/synchronization/mutex_critical_section.h"
+#elif defined(WEBRTC_POSIX)
+#include "rtc_base/synchronization/mutex_pthread.h"
+#else
+#error Unsupported platform.
+#endif
+
+namespace webrtc {
+
+// The Mutex guarantees exclusive access and aims to follow Abseil semantics
+// (i.e. non-reentrant etc).
+class RTC_LOCKABLE Mutex final {
+ public:
+ Mutex() = default;
+ Mutex(const Mutex&) = delete;
+ Mutex& operator=(const Mutex&) = delete;
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() {
+ rtc::PlatformThreadRef current = CurrentThreadRefAssertingNotBeingHolder();
+ impl_.Lock();
+ // |holder_| changes from 0 to CurrentThreadRef().
+ holder_.store(current, std::memory_order_relaxed);
+ }
+ RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+ rtc::PlatformThreadRef current = CurrentThreadRefAssertingNotBeingHolder();
+ if (impl_.TryLock()) {
+ // |holder_| changes from 0 to CurrentThreadRef().
+ holder_.store(current, std::memory_order_relaxed);
+ return true;
+ }
+ return false;
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() {
+ // |holder_| changes from CurrentThreadRef() to 0. If something else than
+ // CurrentThreadRef() is stored in |holder_|, the Unlock results in
+ // undefined behavior as mutexes can't be unlocked from another thread than
+ // the one that locked it, or called while not being locked.
+ holder_.store(0, std::memory_order_relaxed);
+ impl_.Unlock();
+ }
+
+ private:
+ rtc::PlatformThreadRef CurrentThreadRefAssertingNotBeingHolder() {
+ rtc::PlatformThreadRef holder = holder_.load(std::memory_order_relaxed);
+ rtc::PlatformThreadRef current = rtc::CurrentThreadRef();
+ // TODO(bugs.webrtc.org/11567): remove this temporary check after migrating
+ // fully to Mutex.
+ RTC_CHECK_NE(holder, current);
+ return current;
+ }
+
+ MutexImpl impl_;
+ // TODO(bugs.webrtc.org/11567): remove |holder_| after migrating fully to
+ // Mutex.
+ // |holder_| contains the PlatformThreadRef of the thread currently holding
+ // the lock, or 0.
+ // Remarks on the used memory orders: the atomic load in
+ // CurrentThreadRefAssertingNotBeingHolder() observes either of two things:
+ // 1. our own previous write to holder_ with our thread ID.
+ // 2. another thread (with ID y) writing y and then 0 from an initial value of
+ // 0. If we're observing case 1, our own stores are obviously ordered before
+ // the load, and hit the CHECK. If we're observing case 2, the value observed
+ // w.r.t |impl_| being locked depends on the memory order. Since we only care
+ // that it's different from CurrentThreadRef()), we use the more performant
+ // option, memory_order_relaxed.
+ std::atomic<rtc::PlatformThreadRef> holder_ = {0};
+};
+
+// MutexLock, for serializing execution through a scope.
+class RTC_SCOPED_LOCKABLE MutexLock final {
+ public:
+ MutexLock(const MutexLock&) = delete;
+ MutexLock& operator=(const MutexLock&) = delete;
+
+ explicit MutexLock(Mutex* mutex) RTC_EXCLUSIVE_LOCK_FUNCTION(mutex)
+ : mutex_(mutex) {
+ mutex->Lock();
+ }
+ ~MutexLock() RTC_UNLOCK_FUNCTION() { mutex_->Unlock(); }
+
+ private:
+ Mutex* mutex_;
+};
+
+// A mutex used to protect global variables. Do NOT use for other purposes.
+#if defined(WEBRTC_ABSL_MUTEX)
+using GlobalMutex = absl::Mutex;
+using GlobalMutexLock = absl::MutexLock;
+#else
+class RTC_LOCKABLE GlobalMutex final {
+ public:
+ GlobalMutex(const GlobalMutex&) = delete;
+ GlobalMutex& operator=(const GlobalMutex&) = delete;
+
+ constexpr explicit GlobalMutex(absl::ConstInitType /*unused*/)
+ : mutex_locked_(0) {}
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION();
+ void Unlock() RTC_UNLOCK_FUNCTION();
+
+ private:
+ std::atomic<int> mutex_locked_; // 0 means lock not taken, 1 means taken.
+};
+
+// GlobalMutexLock, for serializing execution through a scope.
+class RTC_SCOPED_LOCKABLE GlobalMutexLock final {
+ public:
+ GlobalMutexLock(const GlobalMutexLock&) = delete;
+ GlobalMutexLock& operator=(const GlobalMutexLock&) = delete;
+
+ explicit GlobalMutexLock(GlobalMutex* mutex)
+ RTC_EXCLUSIVE_LOCK_FUNCTION(mutex_);
+ ~GlobalMutexLock() RTC_UNLOCK_FUNCTION();
+
+ private:
+ GlobalMutex* mutex_;
+};
+#endif // if defined(WEBRTC_ABSL_MUTEX)
+
+} // namespace webrtc
+
+#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_H_
diff --git a/rtc_base/synchronization/mutex_abseil.h b/rtc_base/synchronization/mutex_abseil.h
new file mode 100644
index 0000000000..4ad1d07eef
--- /dev/null
+++ b/rtc_base/synchronization/mutex_abseil.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_
+#define RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_
+
+#include "absl/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class RTC_LOCKABLE MutexImpl final {
+ public:
+ MutexImpl() = default;
+ MutexImpl(const MutexImpl&) = delete;
+ MutexImpl& operator=(const MutexImpl&) = delete;
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { mutex_.Lock(); }
+ RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+ return mutex_.TryLock();
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); }
+
+ private:
+ absl::Mutex mutex_;
+};
+
+} // namespace webrtc
+
+#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_
diff --git a/rtc_base/synchronization/mutex_benchmark.cc b/rtc_base/synchronization/mutex_benchmark.cc
new file mode 100644
index 0000000000..40adca65d8
--- /dev/null
+++ b/rtc_base/synchronization/mutex_benchmark.cc
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "benchmark/benchmark.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/unused.h"
+
+namespace webrtc {
+
+class PerfTestData {
+ public:
+ PerfTestData() : cache_line_barrier_1_(), cache_line_barrier_2_() {
+ cache_line_barrier_1_[0]++; // Avoid 'is not used'.
+ cache_line_barrier_2_[0]++; // Avoid 'is not used'.
+ }
+
+ int AddToCounter(int add) {
+ MutexLock mu(&mu_);
+ my_counter_ += add;
+ return 0;
+ }
+
+ private:
+ uint8_t cache_line_barrier_1_[64];
+ Mutex mu_;
+ uint8_t cache_line_barrier_2_[64];
+ int64_t my_counter_ = 0;
+};
+
+void BM_LockWithMutex(benchmark::State& state) {
+ static PerfTestData test_data;
+ for (auto s : state) {
+ RTC_UNUSED(s);
+ benchmark::DoNotOptimize(test_data.AddToCounter(2));
+ }
+}
+
+BENCHMARK(BM_LockWithMutex)->Threads(1);
+BENCHMARK(BM_LockWithMutex)->Threads(2);
+BENCHMARK(BM_LockWithMutex)->Threads(4);
+BENCHMARK(BM_LockWithMutex)->ThreadPerCpu();
+
+} // namespace webrtc
+
+/*
+
+Results:
+
+NB when reproducing: Remember to turn of power management features such as CPU
+scaling before running!
+
+pthreads (Linux):
+----------------------------------------------------------------------
+Run on (12 X 4500 MHz CPU s)
+CPU Caches:
+ L1 Data 32 KiB (x6)
+ L1 Instruction 32 KiB (x6)
+ L2 Unified 1024 KiB (x6)
+ L3 Unified 8448 KiB (x1)
+Load Average: 0.26, 0.28, 0.44
+----------------------------------------------------------------------
+Benchmark Time CPU Iterations
+----------------------------------------------------------------------
+BM_LockWithMutex/threads:1 13.4 ns 13.4 ns 52192906
+BM_LockWithMutex/threads:2 44.2 ns 88.4 ns 8189944
+BM_LockWithMutex/threads:4 52.0 ns 198 ns 3743244
+BM_LockWithMutex/threads:12 84.9 ns 944 ns 733524
+
+std::mutex performs like the pthread implementation (Linux).
+
+Abseil (Linux):
+----------------------------------------------------------------------
+Run on (12 X 4500 MHz CPU s)
+CPU Caches:
+ L1 Data 32 KiB (x6)
+ L1 Instruction 32 KiB (x6)
+ L2 Unified 1024 KiB (x6)
+ L3 Unified 8448 KiB (x1)
+Load Average: 0.27, 0.24, 0.37
+----------------------------------------------------------------------
+Benchmark Time CPU Iterations
+----------------------------------------------------------------------
+BM_LockWithMutex/threads:1 15.0 ns 15.0 ns 46550231
+BM_LockWithMutex/threads:2 91.1 ns 182 ns 4059212
+BM_LockWithMutex/threads:4 40.8 ns 131 ns 5496560
+BM_LockWithMutex/threads:12 37.0 ns 130 ns 5377668
+
+*/
diff --git a/rtc_base/synchronization/mutex_critical_section.h b/rtc_base/synchronization/mutex_critical_section.h
new file mode 100644
index 0000000000..d206794988
--- /dev/null
+++ b/rtc_base/synchronization/mutex_critical_section.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_CRITICAL_SECTION_H_
+#define RTC_BASE_SYNCHRONIZATION_MUTEX_CRITICAL_SECTION_H_
+
+#if defined(WEBRTC_WIN)
+// clang-format off
+// clang formating would change include order.
+
+// Include winsock2.h before including <windows.h> to maintain consistency with
+// win32.h. To include win32.h directly, it must be broken out into its own
+// build target.
+#include <winsock2.h>
+#include <windows.h>
+#include <sal.h> // must come after windows headers.
+// clang-format on
+
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class RTC_LOCKABLE MutexImpl final {
+ public:
+ MutexImpl() { InitializeCriticalSection(&critical_section_); }
+ MutexImpl(const MutexImpl&) = delete;
+ MutexImpl& operator=(const MutexImpl&) = delete;
+ ~MutexImpl() { DeleteCriticalSection(&critical_section_); }
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() {
+ EnterCriticalSection(&critical_section_);
+ }
+ RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+ return TryEnterCriticalSection(&critical_section_) != FALSE;
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() {
+ LeaveCriticalSection(&critical_section_);
+ }
+
+ private:
+ CRITICAL_SECTION critical_section_;
+};
+
+} // namespace webrtc
+
+#endif // #if defined(WEBRTC_WIN)
+#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_CRITICAL_SECTION_H_
diff --git a/rtc_base/synchronization/mutex_pthread.h b/rtc_base/synchronization/mutex_pthread.h
new file mode 100644
index 0000000000..c9496e72c9
--- /dev/null
+++ b/rtc_base/synchronization/mutex_pthread.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_
+#define RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_
+
+#if defined(WEBRTC_POSIX)
+
+#include <pthread.h>
+#if defined(WEBRTC_MAC)
+#include <pthread_spis.h>
+#endif
+
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class RTC_LOCKABLE MutexImpl final {
+ public:
+ MutexImpl() {
+ pthread_mutexattr_t mutex_attribute;
+ pthread_mutexattr_init(&mutex_attribute);
+#if defined(WEBRTC_MAC)
+ pthread_mutexattr_setpolicy_np(&mutex_attribute,
+ _PTHREAD_MUTEX_POLICY_FIRSTFIT);
+#endif
+ pthread_mutex_init(&mutex_, &mutex_attribute);
+ pthread_mutexattr_destroy(&mutex_attribute);
+ }
+ MutexImpl(const MutexImpl&) = delete;
+ MutexImpl& operator=(const MutexImpl&) = delete;
+ ~MutexImpl() { pthread_mutex_destroy(&mutex_); }
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { pthread_mutex_lock(&mutex_); }
+ RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+ return pthread_mutex_trylock(&mutex_) == 0;
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() { pthread_mutex_unlock(&mutex_); }
+
+ private:
+ pthread_mutex_t mutex_;
+};
+
+} // namespace webrtc
+#endif // #if defined(WEBRTC_POSIX)
+#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_
diff --git a/rtc_base/synchronization/mutex_unittest.cc b/rtc_base/synchronization/mutex_unittest.cc
new file mode 100644
index 0000000000..6a930bc042
--- /dev/null
+++ b/rtc_base/synchronization/mutex_unittest.cc
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/synchronization/mutex.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <atomic>
+#include <memory>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "benchmark/benchmark.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+#include "rtc_base/location.h"
+#include "rtc_base/message_handler.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/yield.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::rtc::Event;
+using ::rtc::Message;
+using ::rtc::MessageHandler;
+using ::rtc::Thread;
+
+constexpr int kNumThreads = 16;
+
+template <class MutexType>
+class RTC_LOCKABLE RawMutexLocker {
+ public:
+ explicit RawMutexLocker(MutexType& mutex) : mutex_(mutex) {}
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { mutex_.Lock(); }
+ void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); }
+
+ private:
+ MutexType& mutex_;
+};
+
+class RTC_LOCKABLE RawMutexTryLocker {
+ public:
+ explicit RawMutexTryLocker(Mutex& mutex) : mutex_(mutex) {}
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() {
+ while (!mutex_.TryLock()) {
+ YieldCurrentThread();
+ }
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); }
+
+ private:
+ Mutex& mutex_;
+};
+
+template <class MutexType, class MutexLockType>
+class MutexLockLocker {
+ public:
+ explicit MutexLockLocker(MutexType& mutex) : mutex_(mutex) {}
+ void Lock() { lock_ = std::make_unique<MutexLockType>(&mutex_); }
+ void Unlock() { lock_ = nullptr; }
+
+ private:
+ MutexType& mutex_;
+ std::unique_ptr<MutexLockType> lock_;
+};
+
+template <class MutexType, class MutexLocker>
+class LockRunner : public MessageHandler {
+ public:
+ template <typename... Args>
+ explicit LockRunner(Args... args)
+ : threads_active_(0),
+ start_event_(true, false),
+ done_event_(true, false),
+ shared_value_(0),
+ mutex_(args...),
+ locker_(mutex_) {}
+
+ bool Run() {
+ // Signal all threads to start.
+ start_event_.Set();
+
+ // Wait for all threads to finish.
+ return done_event_.Wait(kLongTime);
+ }
+
+ void SetExpectedThreadCount(int count) { threads_active_ = count; }
+
+ int shared_value() {
+ int shared_value;
+ locker_.Lock();
+ shared_value = shared_value_;
+ locker_.Unlock();
+ return shared_value_;
+ }
+
+ void OnMessage(Message* msg) override {
+ ASSERT_TRUE(start_event_.Wait(kLongTime));
+ locker_.Lock();
+
+ EXPECT_EQ(0, shared_value_);
+ int old = shared_value_;
+
+ // Use a loop to increase the chance of race. If the |locker_|
+ // implementation is faulty, it would be improbable that the error slips
+ // through.
+ for (int i = 0; i < kOperationsToRun; ++i) {
+ benchmark::DoNotOptimize(++shared_value_);
+ }
+ EXPECT_EQ(old + kOperationsToRun, shared_value_);
+ shared_value_ = 0;
+
+ locker_.Unlock();
+ if (threads_active_.fetch_sub(1) == 1) {
+ done_event_.Set();
+ }
+ }
+
+ private:
+ static constexpr int kLongTime = 10000; // 10 seconds
+ static constexpr int kOperationsToRun = 1000;
+
+ std::atomic<int> threads_active_;
+ Event start_event_;
+ Event done_event_;
+ int shared_value_;
+ MutexType mutex_;
+ MutexLocker locker_;
+};
+
+void StartThreads(std::vector<std::unique_ptr<Thread>>& threads,
+ MessageHandler* handler) {
+ for (int i = 0; i < kNumThreads; ++i) {
+ std::unique_ptr<Thread> thread(Thread::Create());
+ thread->Start();
+ thread->Post(RTC_FROM_HERE, handler);
+ threads.push_back(std::move(thread));
+ }
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithMutexAndRawMutexLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<Mutex, RawMutexLocker<Mutex>> runner;
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithMutexAndRawMutexTryLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<Mutex, RawMutexTryLocker> runner;
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithMutexAndMutexLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<Mutex, MutexLockLocker<Mutex, MutexLock>> runner;
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithGlobalMutexAndRawMutexLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<GlobalMutex, RawMutexLocker<GlobalMutex>> runner(absl::kConstInit);
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithGlobalMutexAndMutexLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<GlobalMutex, MutexLockLocker<GlobalMutex, GlobalMutexLock>> runner(
+ absl::kConstInit);
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, GlobalMutexCanHaveStaticStorageDuration) {
+ ABSL_CONST_INIT static GlobalMutex global_lock(absl::kConstInit);
+ global_lock.Lock();
+ global_lock.Unlock();
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/rtc_base/synchronization/sequence_checker.cc b/rtc_base/synchronization/sequence_checker.cc
index d64f32a616..1de26cf0fe 100644
--- a/rtc_base/synchronization/sequence_checker.cc
+++ b/rtc_base/synchronization/sequence_checker.cc
@@ -13,6 +13,8 @@
#include <dispatch/dispatch.h>
#endif
+#include "rtc_base/strings/string_builder.h"
+
namespace webrtc {
namespace {
// On Mac, returns the label of the current dispatch queue; elsewhere, return
@@ -24,8 +26,16 @@ const void* GetSystemQueueRef() {
return nullptr;
#endif
}
+
} // namespace
+std::string ExpectationToString(const webrtc::SequenceChecker* checker) {
+#if RTC_DCHECK_IS_ON
+ return checker->ExpectationToString();
+#endif
+ return std::string();
+}
+
SequenceCheckerImpl::SequenceCheckerImpl()
: attached_(true),
valid_thread_(rtc::CurrentThreadRef()),
@@ -38,7 +48,7 @@ bool SequenceCheckerImpl::IsCurrent() const {
const TaskQueueBase* const current_queue = TaskQueueBase::Current();
const rtc::PlatformThreadRef current_thread = rtc::CurrentThreadRef();
const void* const current_system_queue = GetSystemQueueRef();
- rtc::CritScope scoped_lock(&lock_);
+ MutexLock scoped_lock(&lock_);
if (!attached_) { // Previously detached.
attached_ = true;
valid_thread_ = current_thread;
@@ -56,10 +66,47 @@ bool SequenceCheckerImpl::IsCurrent() const {
}
void SequenceCheckerImpl::Detach() {
- rtc::CritScope scoped_lock(&lock_);
+ MutexLock scoped_lock(&lock_);
attached_ = false;
// We don't need to touch the other members here, they will be
// reset on the next call to IsCurrent().
}
+#if RTC_DCHECK_IS_ON
+std::string SequenceCheckerImpl::ExpectationToString() const {
+ const TaskQueueBase* const current_queue = TaskQueueBase::Current();
+ const rtc::PlatformThreadRef current_thread = rtc::CurrentThreadRef();
+ const void* const current_system_queue = GetSystemQueueRef();
+ MutexLock scoped_lock(&lock_);
+ if (!attached_)
+ return "Checker currently not attached.";
+
+ // The format of the string is meant to compliment the one we have inside of
+ // FatalLog() (checks.cc). Example:
+ //
+ // # Expected: TQ: 0x0 SysQ: 0x7fff69541330 Thread: 0x11dcf6dc0
+ // # Actual: TQ: 0x7fa8f0604190 SysQ: 0x7fa8f0604a30 Thread: 0x700006f1a000
+ // TaskQueue doesn't match
+
+ rtc::StringBuilder message;
+ message.AppendFormat(
+ "# Expected: TQ: %p SysQ: %p Thread: %p\n"
+ "# Actual: TQ: %p SysQ: %p Thread: %p\n",
+ valid_queue_, valid_system_queue_,
+ reinterpret_cast<const void*>(valid_thread_), current_queue,
+ current_system_queue, reinterpret_cast<const void*>(current_thread));
+
+ if ((valid_queue_ || current_queue) && valid_queue_ != current_queue) {
+ message << "TaskQueue doesn't match\n";
+ } else if (valid_system_queue_ &&
+ valid_system_queue_ != current_system_queue) {
+ message << "System queue doesn't match\n";
+ } else if (!rtc::IsThreadRefEqual(valid_thread_, current_thread)) {
+ message << "Threads don't match\n";
+ }
+
+ return message.Release();
+}
+#endif // RTC_DCHECK_IS_ON
+
} // namespace webrtc
diff --git a/rtc_base/synchronization/sequence_checker.h b/rtc_base/synchronization/sequence_checker.h
index fe644fa14e..ecf8490cec 100644
--- a/rtc_base/synchronization/sequence_checker.h
+++ b/rtc_base/synchronization/sequence_checker.h
@@ -10,9 +10,11 @@
#ifndef RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_
#define RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_
+#include <type_traits>
+
#include "api/task_queue/task_queue_base.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/rtc_export.h"
#include "rtc_base/thread_annotations.h"
@@ -34,8 +36,13 @@ class RTC_EXPORT SequenceCheckerImpl {
// used exclusively on another thread.
void Detach();
+ // Returns a string that is formatted to match with the error string printed
+ // by RTC_CHECK() when a condition is not met.
+ // This is used in conjunction with the RTC_DCHECK_RUN_ON() macro.
+ std::string ExpectationToString() const;
+
private:
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
// These are mutable so that IsCurrent can set them.
mutable bool attached_ RTC_GUARDED_BY(lock_);
mutable rtc::PlatformThreadRef valid_thread_ RTC_GUARDED_BY(lock_);
@@ -162,8 +169,19 @@ class RTC_SCOPED_LOCKABLE SequenceCheckerScope {
#define RTC_RUN_ON(x) \
RTC_THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(x))
+namespace webrtc {
+std::string ExpectationToString(const webrtc::SequenceChecker* checker);
+
+// Catch-all implementation for types other than explicitly supported above.
+template <typename ThreadLikeObject>
+std::string ExpectationToString(const ThreadLikeObject*) {
+ return std::string();
+}
+
+} // namespace webrtc
+
#define RTC_DCHECK_RUN_ON(x) \
webrtc::webrtc_seq_check_impl::SequenceCheckerScope seq_check_scope(x); \
- RTC_DCHECK((x)->IsCurrent())
+ RTC_DCHECK((x)->IsCurrent()) << webrtc::ExpectationToString(x)
#endif // RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_
diff --git a/rtc_base/synchronization/sequence_checker_unittest.cc b/rtc_base/synchronization/sequence_checker_unittest.cc
index 1e62e9759b..6fcb522c54 100644
--- a/rtc_base/synchronization/sequence_checker_unittest.cc
+++ b/rtc_base/synchronization/sequence_checker_unittest.cc
@@ -31,7 +31,7 @@ class CompileTimeTestForGuardedBy {
int CalledOnSequence() RTC_RUN_ON(sequence_checker_) { return guarded_; }
void CallMeFromSequence() {
- RTC_DCHECK_RUN_ON(&sequence_checker_) << "Should be called on sequence";
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
guarded_ = 41;
}
@@ -158,7 +158,12 @@ void TestAnnotationsOnWrongQueue() {
}
#if RTC_DCHECK_IS_ON
-TEST(SequenceCheckerTest, TestAnnotationsOnWrongQueueDebug) {
+// Note: Ending the test suite name with 'DeathTest' is important as it causes
+// gtest to order this test before any other non-death-tests, to avoid potential
+// global process state pollution such as shared worker threads being started
+// (e.g. a side effect of calling InitCocoaMultiThreading() on Mac causes one or
+// two additional threads to be created).
+TEST(SequenceCheckerDeathTest, TestAnnotationsOnWrongQueueDebug) {
ASSERT_DEATH({ TestAnnotationsOnWrongQueue(); }, "");
}
#else
diff --git a/rtc_base/synchronization/yield.cc b/rtc_base/synchronization/yield.cc
new file mode 100644
index 0000000000..cbb58d12ab
--- /dev/null
+++ b/rtc_base/synchronization/yield.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/synchronization/yield.h"
+
+#if defined(WEBRTC_WIN)
+#include <windows.h>
+#else
+#include <sched.h>
+#include <time.h>
+#endif
+
+namespace webrtc {
+
+void YieldCurrentThread() {
+ // TODO(bugs.webrtc.org/11634): use dedicated OS functionality instead of
+ // sleep for yielding.
+#if defined(WEBRTC_WIN)
+ ::Sleep(0);
+#elif defined(WEBRTC_MAC) && defined(RTC_USE_NATIVE_MUTEX_ON_MAC) && \
+ !RTC_USE_NATIVE_MUTEX_ON_MAC
+ sched_yield();
+#else
+ static const struct timespec ts_null = {0};
+ nanosleep(&ts_null, nullptr);
+#endif
+}
+
+} // namespace webrtc
diff --git a/rtc_base/synchronization/yield.h b/rtc_base/synchronization/yield.h
new file mode 100644
index 0000000000..d4f5f99f37
--- /dev/null
+++ b/rtc_base/synchronization/yield.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef RTC_BASE_SYNCHRONIZATION_YIELD_H_
+#define RTC_BASE_SYNCHRONIZATION_YIELD_H_
+
+namespace webrtc {
+
+// Request rescheduling of threads.
+void YieldCurrentThread();
+
+} // namespace webrtc
+
+#endif // RTC_BASE_SYNCHRONIZATION_YIELD_H_
diff --git a/rtc_base/synchronization/yield_policy_unittest.cc b/rtc_base/synchronization/yield_policy_unittest.cc
index e0c622510a..0bf38f4537 100644
--- a/rtc_base/synchronization/yield_policy_unittest.cc
+++ b/rtc_base/synchronization/yield_policy_unittest.cc
@@ -20,7 +20,7 @@ namespace rtc {
namespace {
class MockYieldHandler : public YieldInterface {
public:
- MOCK_METHOD0(YieldExecution, void());
+ MOCK_METHOD(void, YieldExecution, (), (override));
};
} // namespace
TEST(YieldPolicyTest, HandlerReceivesYieldSignalWhenSet) {
diff --git a/rtc_base/system/BUILD.gn b/rtc_base/system/BUILD.gn
index 79cb301038..fdb3f96e00 100644
--- a/rtc_base/system/BUILD.gn
+++ b/rtc_base/system/BUILD.gn
@@ -58,7 +58,7 @@ if (is_mac || is_ios) {
"cocoa_threading.mm",
]
deps = [ "..:checks" ]
- libs = [ "Foundation.framework" ]
+ frameworks = [ "Foundation.framework" ]
}
rtc_library("gcd_helpers") {
@@ -72,13 +72,14 @@ if (is_mac || is_ios) {
rtc_source_set("thread_registry") {
sources = [ "thread_registry.h" ]
- deps = [ "..:rtc_base_approved" ]
+ deps = [
+ "..:rtc_base_approved",
+ "../synchronization:mutex",
+ ]
if (is_android && !build_with_chromium) {
sources += [ "thread_registry.cc" ]
- deps += [
- "../../sdk/android:native_api_stacktrace",
- "//third_party/abseil-cpp/absl/base:core_headers",
- ]
+ deps += [ "../../sdk/android:native_api_stacktrace" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
}
}
diff --git a/rtc_base/system/file_wrapper.h b/rtc_base/system/file_wrapper.h
index 24c333a6c3..42c463cb15 100644
--- a/rtc_base/system/file_wrapper.h
+++ b/rtc_base/system/file_wrapper.h
@@ -14,7 +14,7 @@
#include <stddef.h>
#include <stdio.h>
-#include "rtc_base/critical_section.h"
+#include <string>
// Implementation that can read (exclusive) or write from/to a file.
diff --git a/rtc_base/system/thread_registry.cc b/rtc_base/system/thread_registry.cc
index 86605446c7..b0e83ca1e9 100644
--- a/rtc_base/system/thread_registry.cc
+++ b/rtc_base/system/thread_registry.cc
@@ -14,9 +14,9 @@
#include <utility>
#include "absl/base/attributes.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/synchronization/mutex.h"
#include "sdk/android/native_api/stacktrace/stacktrace.h"
namespace webrtc {
@@ -30,7 +30,7 @@ struct ThreadData {
// The map of registered threads, and the lock that protects it. We create the
// map on first use, and never destroy it.
-ABSL_CONST_INIT rtc::GlobalLock g_thread_registry_lock;
+ABSL_CONST_INIT GlobalMutex g_thread_registry_lock(absl::kConstInit);
ABSL_CONST_INIT std::map<const ScopedRegisterThreadForDebugging*, ThreadData>*
g_registered_threads = nullptr;
@@ -38,7 +38,7 @@ ABSL_CONST_INIT std::map<const ScopedRegisterThreadForDebugging*, ThreadData>*
ScopedRegisterThreadForDebugging::ScopedRegisterThreadForDebugging(
rtc::Location location) {
- rtc::GlobalLockScope gls(&g_thread_registry_lock);
+ GlobalMutexLock gls(&g_thread_registry_lock);
if (g_registered_threads == nullptr) {
g_registered_threads =
new std::map<const ScopedRegisterThreadForDebugging*, ThreadData>();
@@ -49,14 +49,14 @@ ScopedRegisterThreadForDebugging::ScopedRegisterThreadForDebugging(
}
ScopedRegisterThreadForDebugging::~ScopedRegisterThreadForDebugging() {
- rtc::GlobalLockScope gls(&g_thread_registry_lock);
+ GlobalMutexLock gls(&g_thread_registry_lock);
RTC_DCHECK(g_registered_threads != nullptr);
const int num_erased = g_registered_threads->erase(this);
RTC_DCHECK_EQ(num_erased, 1);
}
void PrintStackTracesOfRegisteredThreads() {
- rtc::GlobalLockScope gls(&g_thread_registry_lock);
+ GlobalMutexLock gls(&g_thread_registry_lock);
if (g_registered_threads == nullptr) {
return;
}
diff --git a/rtc_base/task_queue_libevent.cc b/rtc_base/task_queue_libevent.cc
index 349a5f21fc..38660cd5a2 100644
--- a/rtc_base/task_queue_libevent.cc
+++ b/rtc_base/task_queue_libevent.cc
@@ -29,11 +29,11 @@
#include "api/task_queue/task_queue_base.h"
#include "base/third_party/libevent/event.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/time_utils.h"
@@ -130,7 +130,7 @@ class TaskQueueLibevent final : public TaskQueueBase {
event_base* event_base_;
event wakeup_event_;
rtc::PlatformThread thread_;
- rtc::CriticalSection pending_lock_;
+ Mutex pending_lock_;
absl::InlinedVector<std::unique_ptr<QueuedTask>, 4> pending_
RTC_GUARDED_BY(pending_lock_);
// Holds a list of events pending timers for cleanup when the loop exits.
@@ -216,7 +216,7 @@ void TaskQueueLibevent::Delete() {
void TaskQueueLibevent::PostTask(std::unique_ptr<QueuedTask> task) {
{
- rtc::CritScope lock(&pending_lock_);
+ MutexLock lock(&pending_lock_);
bool had_pending_tasks = !pending_.empty();
pending_.push_back(std::move(task));
@@ -282,7 +282,7 @@ void TaskQueueLibevent::OnWakeup(int socket,
case kRunTasks: {
absl::InlinedVector<std::unique_ptr<QueuedTask>, 4> tasks;
{
- rtc::CritScope lock(&me->pending_lock_);
+ MutexLock lock(&me->pending_lock_);
tasks.swap(me->pending_);
}
RTC_DCHECK(!tasks.empty());
diff --git a/rtc_base/task_queue_stdlib.cc b/rtc_base/task_queue_stdlib.cc
index 7052f7c6db..5de634512e 100644
--- a/rtc_base/task_queue_stdlib.cc
+++ b/rtc_base/task_queue_stdlib.cc
@@ -22,10 +22,10 @@
#include "api/task_queue/queued_task.h"
#include "api/task_queue/task_queue_base.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/time_utils.h"
@@ -97,7 +97,7 @@ class TaskQueueStdlib final : public TaskQueueBase {
// tasks (including delayed tasks).
rtc::PlatformThread thread_;
- rtc::CriticalSection pending_lock_;
+ Mutex pending_lock_;
// Indicates if the worker thread needs to shutdown now.
bool thread_should_quit_ RTC_GUARDED_BY(pending_lock_){false};
@@ -135,7 +135,7 @@ void TaskQueueStdlib::Delete() {
RTC_DCHECK(!IsCurrent());
{
- rtc::CritScope lock(&pending_lock_);
+ MutexLock lock(&pending_lock_);
thread_should_quit_ = true;
}
@@ -148,7 +148,7 @@ void TaskQueueStdlib::Delete() {
void TaskQueueStdlib::PostTask(std::unique_ptr<QueuedTask> task) {
{
- rtc::CritScope lock(&pending_lock_);
+ MutexLock lock(&pending_lock_);
OrderId order = thread_posting_order_++;
pending_queue_.push(std::pair<OrderId, std::unique_ptr<QueuedTask>>(
@@ -166,7 +166,7 @@ void TaskQueueStdlib::PostDelayedTask(std::unique_ptr<QueuedTask> task,
delay.next_fire_at_ms_ = fire_at;
{
- rtc::CritScope lock(&pending_lock_);
+ MutexLock lock(&pending_lock_);
delay.order_ = ++thread_posting_order_;
delayed_queue_[delay] = std::move(task);
}
@@ -179,7 +179,7 @@ TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() {
auto tick = rtc::TimeMillis();
- rtc::CritScope lock(&pending_lock_);
+ MutexLock lock(&pending_lock_);
if (thread_should_quit_) {
result.final_task_ = true;
diff --git a/rtc_base/task_queue_win.cc b/rtc_base/task_queue_win.cc
index 8c11b8764a..5eb3776cea 100644
--- a/rtc_base/task_queue_win.cc
+++ b/rtc_base/task_queue_win.cc
@@ -33,12 +33,12 @@
#include "api/task_queue/task_queue_base.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/time_utils.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
namespace {
@@ -205,7 +205,7 @@ class TaskQueueWin : public TaskQueueBase {
timer_tasks_;
UINT_PTR timer_id_ = 0;
WorkerThread thread_;
- rtc::CriticalSection pending_lock_;
+ Mutex pending_lock_;
std::queue<std::unique_ptr<QueuedTask>> pending_
RTC_GUARDED_BY(pending_lock_);
HANDLE in_queue_;
@@ -235,7 +235,7 @@ void TaskQueueWin::Delete() {
}
void TaskQueueWin::PostTask(std::unique_ptr<QueuedTask> task) {
- rtc::CritScope lock(&pending_lock_);
+ MutexLock lock(&pending_lock_);
pending_.push(std::move(task));
::SetEvent(in_queue_);
}
@@ -262,7 +262,7 @@ void TaskQueueWin::RunPendingTasks() {
while (true) {
std::unique_ptr<QueuedTask> task;
{
- rtc::CritScope lock(&pending_lock_);
+ MutexLock lock(&pending_lock_);
if (pending_.empty())
break;
task = std::move(pending_.front());
diff --git a/rtc_base/task_utils/BUILD.gn b/rtc_base/task_utils/BUILD.gn
index 1882cd9ee8..54f9a048f0 100644
--- a/rtc_base/task_utils/BUILD.gn
+++ b/rtc_base/task_utils/BUILD.gn
@@ -21,9 +21,10 @@ rtc_library("repeating_task") {
"../../api/task_queue",
"../../api/units:time_delta",
"../../api/units:timestamp",
+ "../../system_wrappers:system_wrappers",
"../synchronization:sequence_checker",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("pending_task_safety_flag") {
@@ -81,7 +82,7 @@ if (rtc_include_tests) {
":to_queued_task",
"../../api/task_queue",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
}
diff --git a/rtc_base/task_utils/pending_task_safety_flag.cc b/rtc_base/task_utils/pending_task_safety_flag.cc
index 307d2d594c..4be2131f3f 100644
--- a/rtc_base/task_utils/pending_task_safety_flag.cc
+++ b/rtc_base/task_utils/pending_task_safety_flag.cc
@@ -15,7 +15,7 @@
namespace webrtc {
// static
-PendingTaskSafetyFlag::Pointer PendingTaskSafetyFlag::Create() {
+rtc::scoped_refptr<PendingTaskSafetyFlag> PendingTaskSafetyFlag::Create() {
return new rtc::RefCountedObject<PendingTaskSafetyFlag>();
}
diff --git a/rtc_base/task_utils/pending_task_safety_flag.h b/rtc_base/task_utils/pending_task_safety_flag.h
index 1b301c8034..580fb3f912 100644
--- a/rtc_base/task_utils/pending_task_safety_flag.h
+++ b/rtc_base/task_utils/pending_task_safety_flag.h
@@ -36,12 +36,17 @@ namespace webrtc {
// MyMethod();
// }));
//
+// Or implicitly by letting ToQueuedTask do the checking:
+//
+// // Running outside of the main thread.
+// my_task_queue_->PostTask(ToQueuedTask(pending_task_safety_flag_,
+// [this]() { MyMethod(); }));
+//
// Note that checking the state only works on the construction/destruction
// thread of the ReceiveStatisticsProxy instance.
class PendingTaskSafetyFlag : public rtc::RefCountInterface {
public:
- using Pointer = rtc::scoped_refptr<PendingTaskSafetyFlag>;
- static Pointer Create();
+ static rtc::scoped_refptr<PendingTaskSafetyFlag> Create();
~PendingTaskSafetyFlag() = default;
@@ -56,6 +61,25 @@ class PendingTaskSafetyFlag : public rtc::RefCountInterface {
SequenceChecker main_sequence_;
};
+// Makes using PendingTaskSafetyFlag very simple. Automatic PTSF creation
+// and signalling of destruction when the ScopedTaskSafety instance goes out
+// of scope.
+// Should be used by the class that wants tasks dropped after destruction.
+// Requirements are that the instance be constructed and destructed on
+// the same thread as the potentially dropped tasks would be running on.
+class ScopedTaskSafety {
+ public:
+ ScopedTaskSafety() = default;
+ ~ScopedTaskSafety() { flag_->SetNotAlive(); }
+
+ // Returns a new reference to the safety flag.
+ rtc::scoped_refptr<PendingTaskSafetyFlag> flag() const { return flag_; }
+
+ private:
+ rtc::scoped_refptr<PendingTaskSafetyFlag> flag_ =
+ PendingTaskSafetyFlag::Create();
+};
+
} // namespace webrtc
#endif // RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_
diff --git a/rtc_base/task_utils/pending_task_safety_flag_unittest.cc b/rtc_base/task_utils/pending_task_safety_flag_unittest.cc
index 0c1c3c8e52..6df2fe2ffb 100644
--- a/rtc_base/task_utils/pending_task_safety_flag_unittest.cc
+++ b/rtc_base/task_utils/pending_task_safety_flag_unittest.cc
@@ -29,7 +29,7 @@ using ::testing::Return;
} // namespace
TEST(PendingTaskSafetyFlagTest, Basic) {
- PendingTaskSafetyFlag::Pointer safety_flag;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag;
{
// Scope for the |owner| instance.
class Owner {
@@ -37,12 +37,27 @@ TEST(PendingTaskSafetyFlagTest, Basic) {
Owner() = default;
~Owner() { flag_->SetNotAlive(); }
- PendingTaskSafetyFlag::Pointer flag_{PendingTaskSafetyFlag::Create()};
+ rtc::scoped_refptr<PendingTaskSafetyFlag> flag_ =
+ PendingTaskSafetyFlag::Create();
} owner;
EXPECT_TRUE(owner.flag_->alive());
safety_flag = owner.flag_;
EXPECT_TRUE(safety_flag->alive());
}
+ // |owner| now out of scope.
+ EXPECT_FALSE(safety_flag->alive());
+}
+
+TEST(PendingTaskSafetyFlagTest, BasicScoped) {
+ rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag;
+ {
+ struct Owner {
+ ScopedTaskSafety safety;
+ } owner;
+ safety_flag = owner.safety.flag();
+ EXPECT_TRUE(safety_flag->alive());
+ }
+ // |owner| now out of scope.
EXPECT_FALSE(safety_flag->alive());
}
@@ -72,7 +87,8 @@ TEST(PendingTaskSafetyFlagTest, PendingTaskSuccess) {
private:
TaskQueueBase* const tq_main_;
bool stuff_done_ = false;
- PendingTaskSafetyFlag::Pointer flag_{PendingTaskSafetyFlag::Create()};
+ rtc::scoped_refptr<PendingTaskSafetyFlag> flag_{
+ PendingTaskSafetyFlag::Create()};
};
std::unique_ptr<Owner> owner;
@@ -106,22 +122,18 @@ TEST(PendingTaskSafetyFlagTest, PendingTaskDropped) {
}
~Owner() {
RTC_DCHECK(tq_main_->IsCurrent());
- flag_->SetNotAlive();
}
void DoStuff() {
RTC_DCHECK(!tq_main_->IsCurrent());
- tq_main_->PostTask(ToQueuedTask([safe = flag_, this]() {
- if (!safe->alive())
- return;
- *stuff_done_ = true;
- }));
+ tq_main_->PostTask(
+ ToQueuedTask(safety_, [this]() { *stuff_done_ = true; }));
}
private:
TaskQueueBase* const tq_main_;
bool* const stuff_done_;
- PendingTaskSafetyFlag::Pointer flag_{PendingTaskSafetyFlag::Create()};
+ ScopedTaskSafety safety_;
};
std::unique_ptr<Owner> owner;
diff --git a/rtc_base/task_utils/repeating_task.cc b/rtc_base/task_utils/repeating_task.cc
index 4e460bb082..574e6331f1 100644
--- a/rtc_base/task_utils/repeating_task.cc
+++ b/rtc_base/task_utils/repeating_task.cc
@@ -17,10 +17,13 @@
namespace webrtc {
namespace webrtc_repeating_task_impl {
+
RepeatingTaskBase::RepeatingTaskBase(TaskQueueBase* task_queue,
- TimeDelta first_delay)
+ TimeDelta first_delay,
+ Clock* clock)
: task_queue_(task_queue),
- next_run_time_(Timestamp::Micros(rtc::TimeMicros()) + first_delay) {}
+ clock_(clock),
+ next_run_time_(clock_->CurrentTime() + first_delay) {}
RepeatingTaskBase::~RepeatingTaskBase() = default;
@@ -38,7 +41,7 @@ bool RepeatingTaskBase::Run() {
return true;
RTC_DCHECK(delay.IsFinite());
- TimeDelta lost_time = Timestamp::Micros(rtc::TimeMicros()) - next_run_time_;
+ TimeDelta lost_time = clock_->CurrentTime() - next_run_time_;
next_run_time_ += delay;
delay -= lost_time;
delay = std::max(delay, TimeDelta::Zero());
@@ -51,6 +54,7 @@ bool RepeatingTaskBase::Run() {
}
void RepeatingTaskBase::Stop() {
+ RTC_DCHECK_RUN_ON(task_queue_);
RTC_DCHECK(next_run_time_.IsFinite());
next_run_time_ = Timestamp::PlusInfinity();
}
@@ -75,7 +79,6 @@ RepeatingTaskHandle::RepeatingTaskHandle(
void RepeatingTaskHandle::Stop() {
if (repeating_task_) {
- RTC_DCHECK_RUN_ON(repeating_task_->task_queue_);
repeating_task_->Stop();
repeating_task_ = nullptr;
}
diff --git a/rtc_base/task_utils/repeating_task.h b/rtc_base/task_utils/repeating_task.h
index 1545d6f757..487b7d19d4 100644
--- a/rtc_base/task_utils/repeating_task.h
+++ b/rtc_base/task_utils/repeating_task.h
@@ -19,8 +19,7 @@
#include "api/task_queue/task_queue_base.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
-#include "rtc_base/synchronization/sequence_checker.h"
-#include "rtc_base/thread_checker.h"
+#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -29,17 +28,20 @@ class RepeatingTaskHandle;
namespace webrtc_repeating_task_impl {
class RepeatingTaskBase : public QueuedTask {
public:
- RepeatingTaskBase(TaskQueueBase* task_queue, TimeDelta first_delay);
+ RepeatingTaskBase(TaskQueueBase* task_queue,
+ TimeDelta first_delay,
+ Clock* clock);
~RepeatingTaskBase() override;
- virtual TimeDelta RunClosure() = 0;
+
+ void Stop();
private:
- friend class ::webrtc::RepeatingTaskHandle;
+ virtual TimeDelta RunClosure() = 0;
bool Run() final;
- void Stop() RTC_RUN_ON(task_queue_);
TaskQueueBase* const task_queue_;
+ Clock* const clock_;
// This is always finite, except for the special case where it's PlusInfinity
// to signal that the task should stop.
Timestamp next_run_time_ RTC_GUARDED_BY(task_queue_);
@@ -51,8 +53,9 @@ class RepeatingTaskImpl final : public RepeatingTaskBase {
public:
RepeatingTaskImpl(TaskQueueBase* task_queue,
TimeDelta first_delay,
- Closure&& closure)
- : RepeatingTaskBase(task_queue, first_delay),
+ Closure&& closure,
+ Clock* clock)
+ : RepeatingTaskBase(task_queue, first_delay, clock),
closure_(std::forward<Closure>(closure)) {
static_assert(
std::is_same<TimeDelta,
@@ -61,9 +64,9 @@ class RepeatingTaskImpl final : public RepeatingTaskBase {
"");
}
+ private:
TimeDelta RunClosure() override { return closure_(); }
- private:
typename std::remove_const<
typename std::remove_reference<Closure>::type>::type closure_;
};
@@ -92,10 +95,11 @@ class RepeatingTaskHandle {
// repeated task is owned by the TaskQueue.
template <class Closure>
static RepeatingTaskHandle Start(TaskQueueBase* task_queue,
- Closure&& closure) {
+ Closure&& closure,
+ Clock* clock = Clock::GetRealTimeClock()) {
auto repeating_task = std::make_unique<
webrtc_repeating_task_impl::RepeatingTaskImpl<Closure>>(
- task_queue, TimeDelta::Zero(), std::forward<Closure>(closure));
+ task_queue, TimeDelta::Zero(), std::forward<Closure>(closure), clock);
auto* repeating_task_ptr = repeating_task.get();
task_queue->PostTask(std::move(repeating_task));
return RepeatingTaskHandle(repeating_task_ptr);
@@ -104,12 +108,14 @@ class RepeatingTaskHandle {
// DelayedStart is equivalent to Start except that the first invocation of the
// closure will be delayed by the given amount.
template <class Closure>
- static RepeatingTaskHandle DelayedStart(TaskQueueBase* task_queue,
- TimeDelta first_delay,
- Closure&& closure) {
+ static RepeatingTaskHandle DelayedStart(
+ TaskQueueBase* task_queue,
+ TimeDelta first_delay,
+ Closure&& closure,
+ Clock* clock = Clock::GetRealTimeClock()) {
auto repeating_task = std::make_unique<
webrtc_repeating_task_impl::RepeatingTaskImpl<Closure>>(
- task_queue, first_delay, std::forward<Closure>(closure));
+ task_queue, first_delay, std::forward<Closure>(closure), clock);
auto* repeating_task_ptr = repeating_task.get();
task_queue->PostDelayedTask(std::move(repeating_task), first_delay.ms());
return RepeatingTaskHandle(repeating_task_ptr);
diff --git a/rtc_base/task_utils/repeating_task_unittest.cc b/rtc_base/task_utils/repeating_task_unittest.cc
index 83efb29209..2fb15d1e5a 100644
--- a/rtc_base/task_utils/repeating_task_unittest.cc
+++ b/rtc_base/task_utils/repeating_task_unittest.cc
@@ -40,8 +40,23 @@ void Sleep(TimeDelta time_delta) {
class MockClosure {
public:
- MOCK_METHOD0(Call, TimeDelta());
- MOCK_METHOD0(Delete, void());
+ MOCK_METHOD(TimeDelta, Call, ());
+ MOCK_METHOD(void, Delete, ());
+};
+
+class MockTaskQueue : public TaskQueueBase {
+ public:
+ MockTaskQueue() : task_queue_setter_(this) {}
+
+ MOCK_METHOD(void, Delete, (), (override));
+ MOCK_METHOD(void, PostTask, (std::unique_ptr<QueuedTask> task), (override));
+ MOCK_METHOD(void,
+ PostDelayedTask,
+ (std::unique_ptr<QueuedTask> task, uint32_t milliseconds),
+ (override));
+
+ private:
+ CurrentTaskQueueSetter task_queue_setter_;
};
class MoveOnlyClosure {
@@ -228,4 +243,37 @@ TEST(RepeatingTaskTest, Example) {
// task queue destruction and running the desctructor closure.
}
+TEST(RepeatingTaskTest, ClockIntegration) {
+ std::unique_ptr<QueuedTask> delayed_task;
+ uint32_t expected_ms = 0;
+ SimulatedClock clock(Timestamp::Millis(0));
+
+ NiceMock<MockTaskQueue> task_queue;
+ ON_CALL(task_queue, PostDelayedTask)
+ .WillByDefault(
+ Invoke([&delayed_task, &expected_ms](std::unique_ptr<QueuedTask> task,
+ uint32_t milliseconds) {
+ EXPECT_EQ(milliseconds, expected_ms);
+ delayed_task = std::move(task);
+ }));
+
+ expected_ms = 100;
+ RepeatingTaskHandle handle = RepeatingTaskHandle::DelayedStart(
+ &task_queue, TimeDelta::Millis(100),
+ [&clock]() {
+ EXPECT_EQ(Timestamp::Millis(100), clock.CurrentTime());
+ // Simulate work happening for 10ms.
+ clock.AdvanceTimeMilliseconds(10);
+ return TimeDelta::Millis(100);
+ },
+ &clock);
+
+ clock.AdvanceTimeMilliseconds(100);
+ QueuedTask* task_to_run = delayed_task.release();
+ expected_ms = 90;
+ EXPECT_FALSE(task_to_run->Run());
+ EXPECT_NE(nullptr, delayed_task.get());
+ handle.Stop();
+}
+
} // namespace webrtc
diff --git a/rtc_base/task_utils/to_queued_task.h b/rtc_base/task_utils/to_queued_task.h
index cc9325ebd6..07ab0ebe26 100644
--- a/rtc_base/task_utils/to_queued_task.h
+++ b/rtc_base/task_utils/to_queued_task.h
@@ -39,7 +39,7 @@ class ClosureTask : public QueuedTask {
template <typename Closure>
class SafetyClosureTask : public QueuedTask {
public:
- explicit SafetyClosureTask(PendingTaskSafetyFlag::Pointer safety,
+ explicit SafetyClosureTask(rtc::scoped_refptr<PendingTaskSafetyFlag> safety,
Closure&& closure)
: closure_(std::forward<Closure>(closure)),
safety_flag_(std::move(safety)) {}
@@ -52,7 +52,7 @@ class SafetyClosureTask : public QueuedTask {
}
typename std::decay<Closure>::type closure_;
- PendingTaskSafetyFlag::Pointer safety_flag_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag_;
};
// Extends ClosureTask to also allow specifying cleanup code.
@@ -81,13 +81,25 @@ std::unique_ptr<QueuedTask> ToQueuedTask(Closure&& closure) {
}
template <typename Closure>
-std::unique_ptr<QueuedTask> ToQueuedTask(PendingTaskSafetyFlag::Pointer safety,
- Closure&& closure) {
+std::unique_ptr<QueuedTask> ToQueuedTask(
+ rtc::scoped_refptr<PendingTaskSafetyFlag> safety,
+ Closure&& closure) {
return std::make_unique<webrtc_new_closure_impl::SafetyClosureTask<Closure>>(
std::move(safety), std::forward<Closure>(closure));
}
-template <typename Closure, typename Cleanup>
+template <typename Closure>
+std::unique_ptr<QueuedTask> ToQueuedTask(const ScopedTaskSafety& safety,
+ Closure&& closure) {
+ return ToQueuedTask(safety.flag(), std::forward<Closure>(closure));
+}
+
+template <typename Closure,
+ typename Cleanup,
+ typename std::enable_if<!std::is_same<
+ typename std::remove_const<
+ typename std::remove_reference<Closure>::type>::type,
+ ScopedTaskSafety>::value>::type* = nullptr>
std::unique_ptr<QueuedTask> ToQueuedTask(Closure&& closure, Cleanup&& cleanup) {
return std::make_unique<
webrtc_new_closure_impl::ClosureTaskWithCleanup<Closure, Cleanup>>(
diff --git a/rtc_base/task_utils/to_queued_task_unittest.cc b/rtc_base/task_utils/to_queued_task_unittest.cc
index e98c81e9ce..261b9e891b 100644
--- a/rtc_base/task_utils/to_queued_task_unittest.cc
+++ b/rtc_base/task_utils/to_queued_task_unittest.cc
@@ -127,7 +127,8 @@ TEST(ToQueuedTaskTest, AcceptsMoveOnlyCleanup) {
}
TEST(ToQueuedTaskTest, PendingTaskSafetyFlag) {
- PendingTaskSafetyFlag::Pointer flag(PendingTaskSafetyFlag::Create());
+ rtc::scoped_refptr<PendingTaskSafetyFlag> flag =
+ PendingTaskSafetyFlag::Create();
int count = 0;
// Create two identical tasks that increment the |count|.
diff --git a/rtc_base/test_client.cc b/rtc_base/test_client.cc
index e5aa9d7987..f23ac2aec0 100644
--- a/rtc_base/test_client.cc
+++ b/rtc_base/test_client.cc
@@ -75,7 +75,7 @@ std::unique_ptr<TestClient::Packet> TestClient::NextPacket(int timeout_ms) {
int64_t end = TimeAfter(timeout_ms);
while (TimeUntil(end) > 0) {
{
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
if (packets_.size() != 0) {
break;
}
@@ -85,7 +85,7 @@ std::unique_ptr<TestClient::Packet> TestClient::NextPacket(int timeout_ms) {
// Return the first packet placed in the queue.
std::unique_ptr<Packet> packet;
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
if (packets_.size() > 0) {
packet = std::move(packets_.front());
packets_.erase(packets_.begin());
@@ -149,7 +149,7 @@ void TestClient::OnPacket(AsyncPacketSocket* socket,
size_t size,
const SocketAddress& remote_addr,
const int64_t& packet_time_us) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
packets_.push_back(
std::make_unique<Packet>(remote_addr, buf, size, packet_time_us));
}
diff --git a/rtc_base/test_client.h b/rtc_base/test_client.h
index b45cf005bb..6989fe1d57 100644
--- a/rtc_base/test_client.h
+++ b/rtc_base/test_client.h
@@ -16,8 +16,8 @@
#include "rtc_base/async_udp_socket.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/fake_clock.h"
+#include "rtc_base/synchronization/mutex.h"
namespace rtc {
@@ -105,7 +105,7 @@ class TestClient : public sigslot::has_slots<> {
void AdvanceTime(int ms);
ThreadProcessingFakeClock* fake_clock_ = nullptr;
- CriticalSection crit_;
+ webrtc::Mutex mutex_;
std::unique_ptr<AsyncPacketSocket> socket_;
std::vector<std::unique_ptr<Packet>> packets_;
int ready_to_send_count_ = 0;
diff --git a/rtc_base/thread.cc b/rtc_base/thread.cc
index 0fb2e813e0..2882f50da3 100644
--- a/rtc_base/thread.cc
+++ b/rtc_base/thread.cc
@@ -31,9 +31,10 @@
#include "absl/algorithm/container.h"
#include "rtc_base/atomic_ops.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#include "rtc_base/logging.h"
#include "rtc_base/null_socket_server.h"
+#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/trace_event.h"
@@ -87,8 +88,8 @@ class MessageHandlerWithTask final : public MessageHandler {
class RTC_SCOPED_LOCKABLE MarkProcessingCritScope {
public:
- MarkProcessingCritScope(const CriticalSection* cs, size_t* processing)
- RTC_EXCLUSIVE_LOCK_FUNCTION(cs)
+ MarkProcessingCritScope(const RecursiveCriticalSection* cs,
+ size_t* processing) RTC_EXCLUSIVE_LOCK_FUNCTION(cs)
: cs_(cs), processing_(processing) {
cs_->Enter();
*processing_ += 1;
@@ -100,7 +101,7 @@ class RTC_SCOPED_LOCKABLE MarkProcessingCritScope {
}
private:
- const CriticalSection* const cs_;
+ const RecursiveCriticalSection* const cs_;
size_t* processing_;
RTC_DISALLOW_COPY_AND_ASSIGN(MarkProcessingCritScope);
@@ -168,8 +169,8 @@ void ThreadManager::RegisterSendAndCheckForCycles(Thread* source,
// We check the pre-existing who-sends-to-who graph for any path from target
// to source. This loop is guaranteed to terminate because per the send graph
// invariant, there are no cycles in the graph.
- for (auto it = all_targets.begin(); it != all_targets.end(); ++it) {
- const auto& targets = send_graph_[*it];
+ for (size_t i = 0; i < all_targets.size(); i++) {
+ const auto& targets = send_graph_[all_targets[i]];
all_targets.insert(all_targets.end(), targets.begin(), targets.end());
}
RTC_CHECK_EQ(absl::c_count(all_targets, source), 0)
@@ -296,6 +297,21 @@ void ThreadManager::SetCurrentThread(Thread* thread) {
RTC_DLOG(LS_ERROR) << "SetCurrentThread: Overwriting an existing value?";
}
#endif // RTC_DLOG_IS_ON
+
+ if (thread) {
+ thread->EnsureIsCurrentTaskQueue();
+ } else {
+ Thread* current = CurrentThread();
+ if (current) {
+ // The current thread is being cleared, e.g. as a result of
+ // UnwrapCurrent() being called or when a thread is being stopped
+ // (see PreRun()). This signals that the Thread instance is being detached
+ // from the thread, which also means that TaskQueue::Current() must not
+ // return a pointer to the Thread instance.
+ current->ClearCurrentTaskQueue();
+ }
+ }
+
SetCurrentThreadInternal(thread);
}
@@ -824,7 +840,6 @@ void* Thread::PreRun(void* pv) {
Thread* thread = static_cast<Thread*>(pv);
ThreadManager::Instance()->SetCurrentThread(thread);
rtc::SetCurrentThreadName(thread->name_.c_str());
- CurrentTaskQueueSetter set_current_task_queue(thread);
#if defined(WEBRTC_MAC)
ScopedAutoReleasePool pool;
#endif
@@ -878,6 +893,7 @@ void Thread::Send(const Location& posted_from,
AutoThread thread;
Thread* current_thread = Thread::Current();
RTC_DCHECK(current_thread != nullptr); // AutoThread ensures this
+ RTC_DCHECK(current_thread->IsInvokeToThreadAllowed(this));
#if RTC_DCHECK_IS_ON
ThreadManager::Instance()->RegisterSendAndCheckForCycles(current_thread,
this);
@@ -935,6 +951,17 @@ void Thread::InvokeInternal(const Location& posted_from,
Send(posted_from, &handler);
}
+// Called by the ThreadManager when being set as the current thread.
+void Thread::EnsureIsCurrentTaskQueue() {
+ task_queue_registration_ =
+ std::make_unique<TaskQueueBase::CurrentTaskQueueSetter>(this);
+}
+
+// Called by the ThreadManager when being set as the current thread.
+void Thread::ClearCurrentTaskQueue() {
+ task_queue_registration_.reset();
+}
+
void Thread::QueuedTaskHandler::OnMessage(Message* msg) {
RTC_DCHECK(msg);
auto* data = static_cast<ScopedMessageData<webrtc::QueuedTask>*>(msg->pdata);
@@ -949,6 +976,50 @@ void Thread::QueuedTaskHandler::OnMessage(Message* msg) {
task.release();
}
+void Thread::AllowInvokesToThread(Thread* thread) {
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ if (!IsCurrent()) {
+ PostTask(webrtc::ToQueuedTask(
+ [thread, this]() { AllowInvokesToThread(thread); }));
+ return;
+ }
+ RTC_DCHECK_RUN_ON(this);
+ allowed_threads_.push_back(thread);
+ invoke_policy_enabled_ = true;
+#endif
+}
+
+void Thread::DisallowAllInvokes() {
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ if (!IsCurrent()) {
+ PostTask(webrtc::ToQueuedTask([this]() { DisallowAllInvokes(); }));
+ return;
+ }
+ RTC_DCHECK_RUN_ON(this);
+ allowed_threads_.clear();
+ invoke_policy_enabled_ = true;
+#endif
+}
+
+// Returns true if no policies added or if there is at least one policy
+// that permits invocation to |target| thread.
+bool Thread::IsInvokeToThreadAllowed(rtc::Thread* target) {
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ RTC_DCHECK_RUN_ON(this);
+ if (!invoke_policy_enabled_) {
+ return true;
+ }
+ for (const auto* thread : allowed_threads_) {
+ if (thread == target) {
+ return true;
+ }
+ }
+ return false;
+#else
+ return true;
+#endif
+}
+
void Thread::PostTask(std::unique_ptr<webrtc::QueuedTask> task) {
// Though Post takes MessageData by raw pointer (last parameter), it still
// takes it with ownership.
diff --git a/rtc_base/thread.h b/rtc_base/thread.h
index 74aab623c8..27a5b7b510 100644
--- a/rtc_base/thread.h
+++ b/rtc_base/thread.h
@@ -29,7 +29,7 @@
#include "api/task_queue/queued_task.h"
#include "api/task_queue/task_queue_base.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#include "rtc_base/location.h"
#include "rtc_base/message_handler.h"
#include "rtc_base/platform_thread_types.h"
@@ -140,7 +140,7 @@ class RTC_EXPORT ThreadManager {
// Methods that don't modify the list of message queues may be called in a
// re-entrant fashion. "processing_" keeps track of the depth of re-entrant
// calls.
- CriticalSection crit_;
+ RecursiveCriticalSection crit_;
size_t processing_ RTC_GUARDED_BY(crit_) = 0;
#if RTC_DCHECK_IS_ON
// Represents all thread seand actions by storing all send targets per thread.
@@ -338,6 +338,18 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
InvokeInternal(posted_from, functor);
}
+ // Allows invoke to specified |thread|. Thread never will be dereferenced and
+ // will be used only for reference-based comparison, so instance can be safely
+ // deleted. If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined do nothing.
+ void AllowInvokesToThread(Thread* thread);
+ // If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined do nothing.
+ void DisallowAllInvokes();
+ // Returns true if |target| was allowed by AllowInvokesToThread() or if no
+ // calls were made to AllowInvokesToThread and DisallowAllInvokes. Otherwise
+ // returns false.
+ // If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined always returns true.
+ bool IsInvokeToThreadAllowed(rtc::Thread* target);
+
// Posts a task to invoke the functor on |this| thread asynchronously, i.e.
// without blocking the thread that invoked PostTask(). Ownership of |functor|
// is passed and (usually, see below) destroyed on |this| thread after it is
@@ -519,7 +531,7 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
friend class ScopedDisallowBlockingCalls;
- CriticalSection* CritForTest() { return &crit_; }
+ RecursiveCriticalSection* CritForTest() { return &crit_; }
private:
class QueuedTaskHandler final : public MessageHandler {
@@ -551,6 +563,12 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
void InvokeInternal(const Location& posted_from,
rtc::FunctionView<void()> functor);
+ // Called by the ThreadManager when being set as the current thread.
+ void EnsureIsCurrentTaskQueue();
+
+ // Called by the ThreadManager when being unset as the current thread.
+ void ClearCurrentTaskQueue();
+
// Returns a static-lifetime MessageHandler which runs message with
// MessageLikeTask payload data.
static MessageHandler* GetPostTaskMessageHandler();
@@ -560,7 +578,11 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
MessageList messages_ RTC_GUARDED_BY(crit_);
PriorityQueue delayed_messages_ RTC_GUARDED_BY(crit_);
uint32_t delayed_next_num_ RTC_GUARDED_BY(crit_);
- CriticalSection crit_;
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ std::vector<Thread*> allowed_threads_ RTC_GUARDED_BY(this);
+ bool invoke_policy_enabled_ RTC_GUARDED_BY(this) = false;
+#endif
+ RecursiveCriticalSection crit_;
bool fInitialized_;
bool fDestroyed_;
@@ -595,6 +617,8 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
// Runs webrtc::QueuedTask posted to the Thread.
QueuedTaskHandler queued_task_handler_;
+ std::unique_ptr<TaskQueueBase::CurrentTaskQueueSetter>
+ task_queue_registration_;
friend class ThreadManager;
diff --git a/rtc_base/thread_unittest.cc b/rtc_base/thread_unittest.cc
index d53a387914..d3cae34dfa 100644
--- a/rtc_base/thread_unittest.cc
+++ b/rtc_base/thread_unittest.cc
@@ -22,12 +22,14 @@
#include "rtc_base/null_socket_server.h"
#include "rtc_base/physical_socket_server.h"
#include "rtc_base/socket_address.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "test/testsupport/rtc_expect_death.h"
#if defined(WEBRTC_WIN)
#include <comdef.h> // NOLINT
+
#endif
namespace rtc {
@@ -161,17 +163,17 @@ class AtomicBool {
public:
explicit AtomicBool(bool value = false) : flag_(value) {}
AtomicBool& operator=(bool value) {
- CritScope scoped_lock(&cs_);
+ webrtc::MutexLock scoped_lock(&mutex_);
flag_ = value;
return *this;
}
bool get() const {
- CritScope scoped_lock(&cs_);
+ webrtc::MutexLock scoped_lock(&mutex_);
return flag_;
}
private:
- CriticalSection cs_;
+ mutable webrtc::Mutex mutex_;
bool flag_;
};
@@ -288,6 +290,63 @@ TEST(ThreadTest, Wrap) {
ThreadManager::Instance()->SetCurrentThread(current_thread);
}
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+TEST(ThreadTest, InvokeToThreadAllowedReturnsTrueWithoutPolicies) {
+ // Create and start the thread.
+ auto thread1 = Thread::CreateWithSocketServer();
+ auto thread2 = Thread::CreateWithSocketServer();
+
+ thread1->PostTask(ToQueuedTask(
+ [&]() { EXPECT_TRUE(thread1->IsInvokeToThreadAllowed(thread2.get())); }));
+ Thread* th_main = Thread::Current();
+ th_main->ProcessMessages(100);
+}
+
+TEST(ThreadTest, InvokeAllowedWhenThreadsAdded) {
+ // Create and start the thread.
+ auto thread1 = Thread::CreateWithSocketServer();
+ auto thread2 = Thread::CreateWithSocketServer();
+ auto thread3 = Thread::CreateWithSocketServer();
+ auto thread4 = Thread::CreateWithSocketServer();
+
+ thread1->AllowInvokesToThread(thread2.get());
+ thread1->AllowInvokesToThread(thread3.get());
+
+ thread1->PostTask(ToQueuedTask([&]() {
+ EXPECT_TRUE(thread1->IsInvokeToThreadAllowed(thread2.get()));
+ EXPECT_TRUE(thread1->IsInvokeToThreadAllowed(thread3.get()));
+ EXPECT_FALSE(thread1->IsInvokeToThreadAllowed(thread4.get()));
+ }));
+ Thread* th_main = Thread::Current();
+ th_main->ProcessMessages(100);
+}
+
+TEST(ThreadTest, InvokesDisallowedWhenDisallowAllInvokes) {
+ // Create and start the thread.
+ auto thread1 = Thread::CreateWithSocketServer();
+ auto thread2 = Thread::CreateWithSocketServer();
+
+ thread1->DisallowAllInvokes();
+
+ thread1->PostTask(ToQueuedTask([&]() {
+ EXPECT_FALSE(thread1->IsInvokeToThreadAllowed(thread2.get()));
+ }));
+ Thread* th_main = Thread::Current();
+ th_main->ProcessMessages(100);
+}
+#endif // (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+
+TEST(ThreadTest, InvokesAllowedByDefault) {
+ // Create and start the thread.
+ auto thread1 = Thread::CreateWithSocketServer();
+ auto thread2 = Thread::CreateWithSocketServer();
+
+ thread1->PostTask(ToQueuedTask(
+ [&]() { EXPECT_TRUE(thread1->IsInvokeToThreadAllowed(thread2.get())); }));
+ Thread* th_main = Thread::Current();
+ th_main->ProcessMessages(100);
+}
+
TEST(ThreadTest, Invoke) {
// Create and start the thread.
auto thread = Thread::CreateWithSocketServer();
@@ -356,18 +415,18 @@ TEST(ThreadTest, ThreeThreadsInvoke) {
explicit LockedBool(bool value) : value_(value) {}
void Set(bool value) {
- CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
value_ = value;
}
bool Get() {
- CritScope lock(&crit_);
+ webrtc::MutexLock lock(&mutex_);
return value_;
}
private:
- CriticalSection crit_;
- bool value_ RTC_GUARDED_BY(crit_);
+ webrtc::Mutex mutex_;
+ bool value_ RTC_GUARDED_BY(mutex_);
};
struct LocalFuncs {
@@ -390,7 +449,6 @@ TEST(ThreadTest, ThreeThreadsInvoke) {
Thread* thread1,
Thread* thread2,
LockedBool* out) {
- CriticalSection crit;
LockedBool async_invoked(false);
invoker->AsyncInvoke<void>(
@@ -780,105 +838,6 @@ TEST_F(AsyncInvokeTest, FlushWithIds) {
EXPECT_TRUE(flag2.get());
}
-class GuardedAsyncInvokeTest : public ::testing::Test {
- public:
- void IntCallback(int value) {
- EXPECT_EQ(expected_thread_, Thread::Current());
- int_value_ = value;
- }
- void SetExpectedThreadForIntCallback(Thread* thread) {
- expected_thread_ = thread;
- }
-
- protected:
- constexpr static int kWaitTimeout = 1000;
- GuardedAsyncInvokeTest() : int_value_(0), expected_thread_(nullptr) {}
-
- int int_value_;
- Thread* expected_thread_;
-};
-
-// Functor for creating an invoker.
-struct CreateInvoker {
- CreateInvoker(std::unique_ptr<GuardedAsyncInvoker>* invoker)
- : invoker_(invoker) {}
- void operator()() { invoker_->reset(new GuardedAsyncInvoker()); }
- std::unique_ptr<GuardedAsyncInvoker>* invoker_;
-};
-
-// Test that we can call AsyncInvoke<void>() after the thread died.
-TEST_F(GuardedAsyncInvokeTest, KillThreadFireAndForget) {
- // Create and start the thread.
- std::unique_ptr<Thread> thread(Thread::Create());
- thread->Start();
- std::unique_ptr<GuardedAsyncInvoker> invoker;
- // Create the invoker on |thread|.
- thread->Invoke<void>(RTC_FROM_HERE, CreateInvoker(&invoker));
- // Kill |thread|.
- thread = nullptr;
- // Try calling functor.
- AtomicBool called;
- EXPECT_FALSE(invoker->AsyncInvoke<void>(RTC_FROM_HERE, FunctorB(&called)));
- // With thread gone, nothing should happen.
- WAIT(called.get(), kWaitTimeout);
- EXPECT_FALSE(called.get());
-}
-
-// The remaining tests check that GuardedAsyncInvoker behaves as AsyncInvoker
-// when Thread is still alive.
-TEST_F(GuardedAsyncInvokeTest, FireAndForget) {
- GuardedAsyncInvoker invoker;
- // Try calling functor.
- AtomicBool called;
- EXPECT_TRUE(invoker.AsyncInvoke<void>(RTC_FROM_HERE, FunctorB(&called)));
- EXPECT_TRUE_WAIT(called.get(), kWaitTimeout);
-}
-
-TEST_F(GuardedAsyncInvokeTest, NonCopyableFunctor) {
- GuardedAsyncInvoker invoker;
- // Try calling functor.
- AtomicBool called;
- EXPECT_TRUE(invoker.AsyncInvoke<void>(RTC_FROM_HERE, FunctorD(&called)));
- EXPECT_TRUE_WAIT(called.get(), kWaitTimeout);
-}
-
-TEST_F(GuardedAsyncInvokeTest, Flush) {
- GuardedAsyncInvoker invoker;
- AtomicBool flag1;
- AtomicBool flag2;
- // Queue two async calls to the current thread.
- EXPECT_TRUE(invoker.AsyncInvoke<void>(RTC_FROM_HERE, FunctorB(&flag1)));
- EXPECT_TRUE(invoker.AsyncInvoke<void>(RTC_FROM_HERE, FunctorB(&flag2)));
- // Because we haven't pumped messages, these should not have run yet.
- EXPECT_FALSE(flag1.get());
- EXPECT_FALSE(flag2.get());
- // Force them to run now.
- EXPECT_TRUE(invoker.Flush());
- EXPECT_TRUE(flag1.get());
- EXPECT_TRUE(flag2.get());
-}
-
-TEST_F(GuardedAsyncInvokeTest, FlushWithIds) {
- GuardedAsyncInvoker invoker;
- AtomicBool flag1;
- AtomicBool flag2;
- // Queue two async calls to the current thread, one with a message id.
- EXPECT_TRUE(invoker.AsyncInvoke<void>(RTC_FROM_HERE, FunctorB(&flag1), 5));
- EXPECT_TRUE(invoker.AsyncInvoke<void>(RTC_FROM_HERE, FunctorB(&flag2)));
- // Because we haven't pumped messages, these should not have run yet.
- EXPECT_FALSE(flag1.get());
- EXPECT_FALSE(flag2.get());
- // Execute pending calls with id == 5.
- EXPECT_TRUE(invoker.Flush(5));
- EXPECT_TRUE(flag1.get());
- EXPECT_FALSE(flag2.get());
- flag1 = false;
- // Execute all pending calls. The id == 5 call should not execute again.
- EXPECT_TRUE(invoker.Flush());
- EXPECT_FALSE(flag1.get());
- EXPECT_TRUE(flag2.get());
-}
-
void ThreadIsCurrent(Thread* thread, bool* result, Event* event) {
*result = thread->IsCurrent();
event->Set();
@@ -1148,6 +1107,18 @@ TEST(ThreadPostDelayedTaskTest, InvokesInDelayOrder) {
EXPECT_TRUE(fourth.Wait(0));
}
+TEST(ThreadPostDelayedTaskTest, IsCurrentTaskQueue) {
+ auto current_tq = webrtc::TaskQueueBase::Current();
+ {
+ std::unique_ptr<rtc::Thread> thread(rtc::Thread::Create());
+ thread->WrapCurrent();
+ EXPECT_EQ(webrtc::TaskQueueBase::Current(),
+ static_cast<webrtc::TaskQueueBase*>(thread.get()));
+ thread->UnwrapCurrent();
+ }
+ EXPECT_EQ(webrtc::TaskQueueBase::Current(), current_tq);
+}
+
class ThreadFactory : public webrtc::TaskQueueFactory {
public:
std::unique_ptr<webrtc::TaskQueueBase, webrtc::TaskQueueDeleter>
diff --git a/rtc_base/time_utils.cc b/rtc_base/time_utils.cc
index 8d919262d3..11c9d5a47f 100644
--- a/rtc_base/time_utils.cc
+++ b/rtc_base/time_utils.cc
@@ -247,7 +247,7 @@ int64_t TimestampWrapAroundHandler::Unwrap(uint32_t ts) {
++num_wrap_;
} else if ((ts - last_ts_) > 0xf0000000) {
// Backwards wrap. Unwrap with last wrap count and don't update last_ts_.
- return ts + ((num_wrap_ - 1) << 32);
+ return ts + (num_wrap_ - 1) * (int64_t{1} << 32);
}
last_ts_ = ts;
diff --git a/rtc_base/virtual_socket_server.cc b/rtc_base/virtual_socket_server.cc
index d42873e18b..3d412d66cc 100644
--- a/rtc_base/virtual_socket_server.cc
+++ b/rtc_base/virtual_socket_server.cc
@@ -19,6 +19,7 @@
#include "absl/algorithm/container.h"
#include "rtc_base/checks.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/logging.h"
#include "rtc_base/physical_socket_server.h"
diff --git a/rtc_base/virtual_socket_server.h b/rtc_base/virtual_socket_server.h
index f45fabf0af..84f8fb1bdc 100644
--- a/rtc_base/virtual_socket_server.h
+++ b/rtc_base/virtual_socket_server.h
@@ -17,6 +17,7 @@
#include "rtc_base/checks.h"
#include "rtc_base/constructor_magic.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/message_handler.h"
@@ -294,7 +295,7 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> {
std::map<rtc::IPAddress, rtc::IPAddress> alternative_address_mapping_;
std::unique_ptr<Function> delay_dist_;
- CriticalSection delay_crit_;
+ RecursiveCriticalSection delay_crit_;
double drop_prob_;
bool sending_blocked_ = false;
@@ -379,7 +380,7 @@ class VirtualSocket : public AsyncSocket,
bool ready_to_send_ = true;
// Critical section to protect the recv_buffer and queue_
- CriticalSection crit_;
+ RecursiveCriticalSection crit_;
// Network model that enforces bandwidth and capacity constraints
NetworkQueue network_;
diff --git a/rtc_base/win32_socket_server.cc b/rtc_base/win32_socket_server.cc
index 8a5b93a608..cfe21a3630 100644
--- a/rtc_base/win32_socket_server.cc
+++ b/rtc_base/win32_socket_server.cc
@@ -733,7 +733,7 @@ bool Win32SocketServer::Wait(int cms, bool process_io) {
MSG msg;
b = GetMessage(&msg, nullptr, s_wm_wakeup_id, s_wm_wakeup_id);
{
- CritScope scope(&cs_);
+ webrtc::MutexLock lock(&mutex_);
posted_ = false;
}
} else {
@@ -747,7 +747,7 @@ void Win32SocketServer::WakeUp() {
if (wnd_.handle()) {
// Set the "message pending" flag, if not already set.
{
- CritScope scope(&cs_);
+ webrtc::MutexLock lock(&mutex_);
if (posted_)
return;
posted_ = true;
@@ -760,7 +760,7 @@ void Win32SocketServer::WakeUp() {
void Win32SocketServer::Pump() {
// Clear the "message pending" flag.
{
- CritScope scope(&cs_);
+ webrtc::MutexLock lock(&mutex_);
posted_ = false;
}
diff --git a/rtc_base/win32_socket_server.h b/rtc_base/win32_socket_server.h
index 92fd68cd83..317acce0d2 100644
--- a/rtc_base/win32_socket_server.h
+++ b/rtc_base/win32_socket_server.h
@@ -13,10 +13,10 @@
#if defined(WEBRTC_WIN)
#include "rtc_base/async_socket.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/socket.h"
#include "rtc_base/socket_factory.h"
#include "rtc_base/socket_server.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread.h"
#include "rtc_base/win32_window.h"
@@ -123,7 +123,7 @@ class Win32SocketServer : public SocketServer {
static const wchar_t kWindowName[];
Thread* message_queue_;
MessageWindow wnd_;
- CriticalSection cs_;
+ webrtc::Mutex mutex_;
bool posted_;
HWND hdlg_;
};
diff --git a/rtc_tools/BUILD.gn b/rtc_tools/BUILD.gn
index f293853f6e..ae1c930092 100644
--- a/rtc_tools/BUILD.gn
+++ b/rtc_tools/BUILD.gn
@@ -17,12 +17,12 @@ group("rtc_tools") {
deps = [
":frame_analyzer",
":video_file_reader",
- ":video_quality_analysis",
]
if (!build_with_chromium) {
deps += [
":psnr_ssim_analyzer",
":rgba_to_i420_converter",
+ ":video_quality_analysis",
]
if (rtc_enable_protobuf) {
deps += [ ":chart_proto" ]
@@ -60,6 +60,8 @@ rtc_library("video_file_reader") {
"../api/video:video_rtp_headers",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -77,6 +79,8 @@ rtc_library("video_file_writer") {
"../api/video:video_frame_i420",
"../api/video:video_rtp_headers",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -107,9 +111,9 @@ rtc_library("video_quality_analysis") {
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../test:perf_test",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_executable("frame_analyzer") {
@@ -319,8 +323,14 @@ if (!build_with_chromium) {
rtc_library("event_log_visualizer_utils") {
visibility = [ "*" ]
sources = [
+ "rtc_event_log_visualizer/alerts.cc",
+ "rtc_event_log_visualizer/alerts.h",
+ "rtc_event_log_visualizer/analyze_audio.cc",
+ "rtc_event_log_visualizer/analyze_audio.h",
"rtc_event_log_visualizer/analyzer.cc",
"rtc_event_log_visualizer/analyzer.h",
+ "rtc_event_log_visualizer/analyzer_common.cc",
+ "rtc_event_log_visualizer/analyzer_common.h",
"rtc_event_log_visualizer/log_simulation.cc",
"rtc_event_log_visualizer/log_simulation.h",
"rtc_event_log_visualizer/plot_base.cc",
@@ -329,11 +339,11 @@ if (!build_with_chromium) {
"rtc_event_log_visualizer/plot_protobuf.h",
"rtc_event_log_visualizer/plot_python.cc",
"rtc_event_log_visualizer/plot_python.h",
- "rtc_event_log_visualizer/triage_notifications.h",
]
deps = [
":chart_proto",
"../api:function_view",
+ "../rtc_base:deprecation",
"../rtc_base:ignore_wundef",
# TODO(kwiberg): Remove this dependency.
@@ -360,8 +370,12 @@ if (!build_with_chromium) {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_numerics",
"../rtc_base:stringutils",
+ "../test:explicit_key_value_config",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
}
@@ -391,6 +405,7 @@ if (rtc_include_tests) {
"../rtc_base:logging",
"../rtc_base:rtc_event",
"../rtc_base:rtc_task_queue",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/system:file_wrapper",
"../test:video_test_common",
"../test:video_test_support",
diff --git a/rtc_tools/converter/yuv_to_ivf_converter.cc b/rtc_tools/converter/yuv_to_ivf_converter.cc
index 6f15bd33a2..ae05b196dd 100644
--- a/rtc_tools/converter/yuv_to_ivf_converter.cc
+++ b/rtc_tools/converter/yuv_to_ivf_converter.cc
@@ -30,9 +30,9 @@
#include "modules/video_coding/include/video_error_codes.h"
#include "modules/video_coding/utility/ivf_file_writer.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/file_wrapper.h"
#include "rtc_base/task_queue.h"
#include "test/testsupport/frame_reader.h"
@@ -78,7 +78,7 @@ class IvfFileWriterEncodedCallback : public EncodedImageCallback {
const RTPFragmentationHeader* fragmentation) override {
RTC_CHECK(file_writer_->WriteFrame(encoded_image, video_codec_type_));
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
received_frames_count_++;
RTC_CHECK_LE(received_frames_count_, expected_frames_count_);
if (received_frames_count_ % kFrameLogInterval == 0) {
@@ -99,7 +99,7 @@ class IvfFileWriterEncodedCallback : public EncodedImageCallback {
const VideoCodecType video_codec_type_;
const int expected_frames_count_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
int received_frames_count_ RTC_GUARDED_BY(lock_) = 0;
rtc::Event next_frame_written_;
};
diff --git a/rtc_tools/network_tester/BUILD.gn b/rtc_tools/network_tester/BUILD.gn
index 47e600aa85..ccbfeba475 100644
--- a/rtc_tools/network_tester/BUILD.gn
+++ b/rtc_tools/network_tester/BUILD.gn
@@ -48,10 +48,11 @@ if (rtc_enable_protobuf) {
"../../rtc_base:protobuf_utils",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_task_queue",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/third_party/sigslot",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
network_tester_unittests_resources = [
@@ -101,7 +102,7 @@ if (is_android) {
testonly = true
apk_name = "NetworkTesterMobile"
android_manifest = "androidapp/AndroidManifest.xml"
- min_sdk_version = 16
+ min_sdk_version = 21
target_sdk_version = 24
deps = [
@@ -115,7 +116,7 @@ if (is_android) {
rtc_android_library("NetworkTesterMobile_javalib") {
testonly = true
- android_manifest_for_lint = "androidapp/AndroidManifest.xml"
+ android_manifest = "androidapp/AndroidManifest.xml"
sources = [
"androidapp/src/com/google/media/networktester/MainActivity.java",
@@ -138,11 +139,11 @@ if (is_android) {
"androidapp/res/mipmap-xhdpi/ic_launcher.png",
"androidapp/res/mipmap-xxhdpi/ic_launcher.png",
"androidapp/res/mipmap-xxxhdpi/ic_launcher.png",
+ "androidapp/res/values-v17/styles.xml",
+ "androidapp/res/values-w820dp/dimens.xml",
"androidapp/res/values/colors.xml",
"androidapp/res/values/dimens.xml",
"androidapp/res/values/strings.xml",
- "androidapp/res/values-v17/styles.xml",
- "androidapp/res/values-w820dp/dimens.xml",
]
# Needed for Bazel converter.
diff --git a/rtc_tools/network_tester/androidapp/AndroidManifest.xml b/rtc_tools/network_tester/androidapp/AndroidManifest.xml
index 3839955458..1ff519396b 100755
--- a/rtc_tools/network_tester/androidapp/AndroidManifest.xml
+++ b/rtc_tools/network_tester/androidapp/AndroidManifest.xml
@@ -4,7 +4,7 @@
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
- <uses-sdk android:minSdkVersion="16"
+ <uses-sdk android:minSdkVersion="21"
android:targetSdkVersion="24"
android:maxSdkVersion="24" />
<application
diff --git a/rtc_tools/network_tester/test_controller.cc b/rtc_tools/network_tester/test_controller.cc
index 49b470ce5f..85a5a57bc0 100644
--- a/rtc_tools/network_tester/test_controller.cc
+++ b/rtc_tools/network_tester/test_controller.cc
@@ -43,7 +43,7 @@ void TestController::SendConnectTo(const std::string& hostname, int port) {
NetworkTesterPacket packet;
packet.set_type(NetworkTesterPacket::HAND_SHAKING);
SendData(packet, absl::nullopt);
- rtc::CritScope scoped_lock(&local_test_done_lock_);
+ MutexLock scoped_lock(&local_test_done_lock_);
local_test_done_ = false;
remote_test_done_ = false;
}
@@ -71,13 +71,13 @@ void TestController::OnTestDone() {
NetworkTesterPacket packet;
packet.set_type(NetworkTesterPacket::TEST_DONE);
SendData(packet, absl::nullopt);
- rtc::CritScope scoped_lock(&local_test_done_lock_);
+ MutexLock scoped_lock(&local_test_done_lock_);
local_test_done_ = true;
}
bool TestController::IsTestDone() {
RTC_DCHECK_RUN_ON(&test_controller_thread_checker_);
- rtc::CritScope scoped_lock(&local_test_done_lock_);
+ MutexLock scoped_lock(&local_test_done_lock_);
return local_test_done_ && remote_test_done_;
}
@@ -100,7 +100,7 @@ void TestController::OnReadPacket(rtc::AsyncPacketSocket* socket,
SendData(packet, absl::nullopt);
packet_sender_.reset(new PacketSender(this, config_file_path_));
packet_sender_->StartSending();
- rtc::CritScope scoped_lock(&local_test_done_lock_);
+ MutexLock scoped_lock(&local_test_done_lock_);
local_test_done_ = false;
remote_test_done_ = false;
break;
@@ -108,7 +108,7 @@ void TestController::OnReadPacket(rtc::AsyncPacketSocket* socket,
case NetworkTesterPacket::TEST_START: {
packet_sender_.reset(new PacketSender(this, config_file_path_));
packet_sender_->StartSending();
- rtc::CritScope scoped_lock(&local_test_done_lock_);
+ MutexLock scoped_lock(&local_test_done_lock_);
local_test_done_ = false;
remote_test_done_ = false;
break;
diff --git a/rtc_tools/network_tester/test_controller.h b/rtc_tools/network_tester/test_controller.h
index d04158d934..b73ac94329 100644
--- a/rtc_tools/network_tester/test_controller.h
+++ b/rtc_tools/network_tester/test_controller.h
@@ -22,9 +22,9 @@
#include "p2p/base/basic_packet_socket_factory.h"
#include "rtc_base/async_packet_socket.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/ignore_wundef.h"
#include "rtc_base/socket_address.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread_annotations.h"
@@ -35,7 +35,6 @@
#ifdef WEBRTC_NETWORK_TESTER_PROTO
RTC_PUSH_IGNORING_WUNDEF()
#include "rtc_tools/network_tester/network_tester_packet.pb.h"
-
RTC_POP_IGNORING_WUNDEF()
using webrtc::network_tester::packet::NetworkTesterPacket;
#else
@@ -75,7 +74,7 @@ class TestController : public sigslot::has_slots<> {
rtc::BasicPacketSocketFactory socket_factory_;
const std::string config_file_path_;
PacketLogger packet_logger_;
- rtc::CriticalSection local_test_done_lock_;
+ Mutex local_test_done_lock_;
bool local_test_done_ RTC_GUARDED_BY(local_test_done_lock_);
bool remote_test_done_;
std::array<char, kEthernetMtu> send_data_;
diff --git a/rtc_tools/rtc_event_log_visualizer/alerts.cc b/rtc_tools/rtc_event_log_visualizer/alerts.cc
new file mode 100644
index 0000000000..86372de4cf
--- /dev/null
+++ b/rtc_tools/rtc_event_log_visualizer/alerts.cc
@@ -0,0 +1,227 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_tools/rtc_event_log_visualizer/alerts.h"
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <limits>
+#include <map>
+#include <string>
+
+#include "logging/rtc_event_log/rtc_event_processor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/format_macros.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+void TriageHelper::Print(FILE* file) {
+ fprintf(file, "========== TRIAGE NOTIFICATIONS ==========\n");
+ for (const auto& alert : triage_alerts_) {
+ fprintf(file, "%d %s. First occurrence at %3.3lf\n", alert.second.count,
+ alert.second.explanation.c_str(), alert.second.first_occurrence);
+ }
+ fprintf(file, "========== END TRIAGE NOTIFICATIONS ==========\n");
+}
+
+void TriageHelper::AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction) {
+ // With 100 packets/s (~800kbps), false positives would require 10 s without
+ // data.
+ constexpr int64_t kMaxSeqNumJump = 1000;
+ // With a 90 kHz clock, false positives would require 10 s without data.
+ constexpr int64_t kTicksPerMillisec = 90;
+ constexpr int64_t kCaptureTimeGraceMs = 10000;
+
+ std::string seq_num_explanation =
+ direction == kIncomingPacket
+ ? "Incoming RTP sequence number jumps more than 1000. Counter may "
+ "have been reset or rewritten incorrectly in a group call."
+ : "Outgoing RTP sequence number jumps more than 1000. Counter may "
+ "have been reset.";
+ std::string capture_time_explanation =
+ direction == kIncomingPacket ? "Incoming capture time jumps more than "
+ "10s. Clock might have been reset."
+ : "Outgoing capture time jumps more than "
+ "10s. Clock might have been reset.";
+ TriageAlertType seq_num_alert = direction == kIncomingPacket
+ ? TriageAlertType::kIncomingSeqNumJump
+ : TriageAlertType::kOutgoingSeqNumJump;
+ TriageAlertType capture_time_alert =
+ direction == kIncomingPacket ? TriageAlertType::kIncomingCaptureTimeJump
+ : TriageAlertType::kOutgoingCaptureTimeJump;
+
+ const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us();
+
+ // Check for gaps in sequence numbers and capture timestamps.
+ for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) {
+ if (IsRtxSsrc(parsed_log, direction, stream.ssrc)) {
+ continue;
+ }
+ auto packets = stream.packet_view;
+ if (packets.empty()) {
+ continue;
+ }
+ SeqNumUnwrapper<uint16_t> seq_num_unwrapper;
+ int64_t last_seq_num =
+ seq_num_unwrapper.Unwrap(packets[0].header.sequenceNumber);
+ SeqNumUnwrapper<uint32_t> capture_time_unwrapper;
+ int64_t last_capture_time =
+ capture_time_unwrapper.Unwrap(packets[0].header.timestamp);
+ int64_t last_log_time_ms = packets[0].log_time_ms();
+ for (const auto& packet : packets) {
+ if (packet.log_time_us() > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+
+ int64_t seq_num = seq_num_unwrapper.Unwrap(packet.header.sequenceNumber);
+ if (std::abs(seq_num - last_seq_num) > kMaxSeqNumJump) {
+ Alert(seq_num_alert, config_.GetCallTimeSec(packet.log_time_us()),
+ seq_num_explanation);
+ }
+ last_seq_num = seq_num;
+
+ int64_t capture_time =
+ capture_time_unwrapper.Unwrap(packet.header.timestamp);
+ if (std::abs(capture_time - last_capture_time) >
+ kTicksPerMillisec *
+ (kCaptureTimeGraceMs + packet.log_time_ms() - last_log_time_ms)) {
+ Alert(capture_time_alert, config_.GetCallTimeSec(packet.log_time_us()),
+ capture_time_explanation);
+ }
+ last_capture_time = capture_time;
+ }
+ }
+}
+
+void TriageHelper::AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction) {
+ constexpr int64_t kMaxRtpTransmissionGap = 500000;
+ constexpr int64_t kMaxRtcpTransmissionGap = 3000000;
+ std::string rtp_explanation =
+ direction == kIncomingPacket
+ ? "No RTP packets received for more than 500ms. This indicates a "
+ "network problem. Temporary video freezes and choppy or robotic "
+ "audio is unavoidable. Unnecessary BWE drops is a known issue."
+ : "No RTP packets sent for more than 500 ms. This might be an issue "
+ "with the pacer.";
+ std::string rtcp_explanation =
+ direction == kIncomingPacket
+ ? "No RTCP packets received for more than 3 s. Could be a longer "
+ "connection outage"
+ : "No RTCP packets sent for more than 3 s. This is most likely a "
+ "bug.";
+ TriageAlertType rtp_alert = direction == kIncomingPacket
+ ? TriageAlertType::kIncomingRtpGap
+ : TriageAlertType::kOutgoingRtpGap;
+ TriageAlertType rtcp_alert = direction == kIncomingPacket
+ ? TriageAlertType::kIncomingRtcpGap
+ : TriageAlertType::kOutgoingRtcpGap;
+
+ const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us();
+
+ // TODO(terelius): The parser could provide a list of all packets, ordered
+ // by time, for each direction.
+ std::multimap<int64_t, const LoggedRtpPacket*> rtp_in_direction;
+ for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) {
+ for (const LoggedRtpPacket& rtp_packet : stream.packet_view)
+ rtp_in_direction.emplace(rtp_packet.log_time_us(), &rtp_packet);
+ }
+ absl::optional<int64_t> last_rtp_time;
+ for (const auto& kv : rtp_in_direction) {
+ int64_t timestamp = kv.first;
+ if (timestamp > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+ int64_t duration = timestamp - last_rtp_time.value_or(0);
+ if (last_rtp_time.has_value() && duration > kMaxRtpTransmissionGap) {
+ // No packet sent/received for more than 500 ms.
+ Alert(rtp_alert, config_.GetCallTimeSec(timestamp), rtp_explanation);
+ }
+ last_rtp_time.emplace(timestamp);
+ }
+
+ absl::optional<int64_t> last_rtcp_time;
+ if (direction == kIncomingPacket) {
+ for (const auto& rtcp : parsed_log.incoming_rtcp_packets()) {
+ if (rtcp.log_time_us() > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+ int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
+ if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
+ // No feedback sent/received for more than 2000 ms.
+ Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()),
+ rtcp_explanation);
+ }
+ last_rtcp_time.emplace(rtcp.log_time_us());
+ }
+ } else {
+ for (const auto& rtcp : parsed_log.outgoing_rtcp_packets()) {
+ if (rtcp.log_time_us() > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+ int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
+ if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
+ // No feedback sent/received for more than 2000 ms.
+ Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()),
+ rtcp_explanation);
+ }
+ last_rtcp_time.emplace(rtcp.log_time_us());
+ }
+ }
+}
+
+// TODO(terelius): Notifications could possibly be generated by the same code
+// that produces the graphs. There is some code duplication that could be
+// avoided, but that might be solved anyway when we move functionality from the
+// analyzer to the parser.
+void TriageHelper::AnalyzeLog(const ParsedRtcEventLog& parsed_log) {
+ AnalyzeStreamGaps(parsed_log, kIncomingPacket);
+ AnalyzeStreamGaps(parsed_log, kOutgoingPacket);
+ AnalyzeTransmissionGaps(parsed_log, kIncomingPacket);
+ AnalyzeTransmissionGaps(parsed_log, kOutgoingPacket);
+
+ const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us();
+
+ int64_t first_occurrence = parsed_log.last_timestamp();
+ constexpr double kMaxLossFraction = 0.05;
+ // Loss feedback
+ int64_t total_lost_packets = 0;
+ int64_t total_expected_packets = 0;
+ for (auto& bwe_update : parsed_log.bwe_loss_updates()) {
+ if (bwe_update.log_time_us() > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+ int64_t lost_packets = static_cast<double>(bwe_update.fraction_lost) / 255 *
+ bwe_update.expected_packets;
+ total_lost_packets += lost_packets;
+ total_expected_packets += bwe_update.expected_packets;
+ if (bwe_update.fraction_lost >= 255 * kMaxLossFraction) {
+ first_occurrence = std::min(first_occurrence, bwe_update.log_time_us());
+ }
+ }
+ double avg_outgoing_loss =
+ static_cast<double>(total_lost_packets) / total_expected_packets;
+ if (avg_outgoing_loss > kMaxLossFraction) {
+ Alert(TriageAlertType::kOutgoingHighLoss, first_occurrence,
+ "More than 5% of outgoing packets lost.");
+ }
+}
+
+} // namespace webrtc
diff --git a/rtc_tools/rtc_event_log_visualizer/alerts.h b/rtc_tools/rtc_event_log_visualizer/alerts.h
new file mode 100644
index 0000000000..7bd9f05270
--- /dev/null
+++ b/rtc_tools/rtc_event_log_visualizer/alerts.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_
+#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_
+
+#include <stdio.h>
+
+#include <map>
+#include <string>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "logging/rtc_event_log/rtc_event_log_parser.h"
+#include "rtc_base/constructor_magic.h"
+#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
+
+namespace webrtc {
+
+enum class TriageAlertType {
+ kUnknown = 0,
+ kIncomingRtpGap,
+ kOutgoingRtpGap,
+ kIncomingRtcpGap,
+ kOutgoingRtcpGap,
+ kIncomingSeqNumJump,
+ kOutgoingSeqNumJump,
+ kIncomingCaptureTimeJump,
+ kOutgoingCaptureTimeJump,
+ kOutgoingHighLoss,
+ kLast,
+};
+
+struct TriageAlert {
+ TriageAlertType type = TriageAlertType::kUnknown;
+ int count = 0;
+ float first_occurrence = -1;
+ std::string explanation;
+};
+
+class TriageHelper {
+ public:
+ explicit TriageHelper(const AnalyzerConfig& config) : config_(config) {}
+
+ void AnalyzeLog(const ParsedRtcEventLog& parsed_log);
+
+ void AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction);
+ void AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction);
+ void Print(FILE* file);
+
+ private:
+ AnalyzerConfig config_;
+ std::map<TriageAlertType, TriageAlert> triage_alerts_;
+
+ void Alert(TriageAlertType type,
+ float time_seconds,
+ absl::string_view explanation) {
+ std::map<TriageAlertType, TriageAlert>::iterator it =
+ triage_alerts_.find(type);
+
+ if (it == triage_alerts_.end()) {
+ TriageAlert alert;
+ alert.type = type;
+ alert.first_occurrence = time_seconds;
+ alert.count = 1;
+ alert.explanation = std::string(explanation);
+ triage_alerts_.insert(std::make_pair(type, alert));
+ } else {
+ it->second.count += 1;
+ }
+ }
+ RTC_DISALLOW_COPY_AND_ASSIGN(TriageHelper);
+};
+
+} // namespace webrtc
+
+#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_
diff --git a/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc b/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
new file mode 100644
index 0000000000..becc0044ab
--- /dev/null
+++ b/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
@@ -0,0 +1,503 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h"
+
+#include <memory>
+#include <set>
+#include <utility>
+#include <vector>
+
+#include "modules/audio_coding/neteq/tools/audio_sink.h"
+#include "modules/audio_coding/neteq/tools/fake_decode_from_file.h"
+#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h"
+#include "modules/audio_coding/neteq/tools/neteq_replacement_input.h"
+#include "modules/audio_coding/neteq/tools/neteq_test.h"
+#include "modules/audio_coding/neteq/tools/resample_input_audio_file.h"
+#include "rtc_base/ref_counted_object.h"
+
+namespace webrtc {
+
+void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder target bitrate", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaBitrateBps = [](const LoggedAudioNetworkAdaptationEvent& ana_event)
+ -> absl::optional<float> {
+ if (ana_event.config.bitrate_bps)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.bitrate_bps));
+ return absl::nullopt;
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaBitrateBps,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder target bitrate");
+}
+
+void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder frame length", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaFrameLengthMs =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.frame_length_ms)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.frame_length_ms));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaFrameLengthMs,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder frame length");
+}
+
+void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder uplink packet loss fraction",
+ LineStyle::kLine, PointStyle::kHighlight);
+ auto GetAnaPacketLoss =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.uplink_packet_loss_fraction)
+ return absl::optional<float>(static_cast<float>(
+ *ana_event.config.uplink_packet_loss_fraction));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaPacketLoss,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin,
+ kTopMargin);
+ plot->SetTitle("Reported audio encoder lost packets");
+}
+
+void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder FEC", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaFecEnabled =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.enable_fec)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.enable_fec));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaFecEnabled,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder FEC");
+}
+
+void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder DTX", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaDtxEnabled =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.enable_dtx)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.enable_dtx));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaDtxEnabled,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder DTX");
+}
+
+void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder number of channels", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaNumChannels =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.num_channels)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.num_channels));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaNumChannels,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))",
+ kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder number of channels");
+}
+
+class NetEqStreamInput : public test::NetEqInput {
+ public:
+ // Does not take any ownership, and all pointers must refer to valid objects
+ // that outlive the one constructed.
+ NetEqStreamInput(const std::vector<LoggedRtpPacketIncoming>* packet_stream,
+ const std::vector<LoggedAudioPlayoutEvent>* output_events,
+ absl::optional<int64_t> end_time_ms)
+ : packet_stream_(*packet_stream),
+ packet_stream_it_(packet_stream_.begin()),
+ output_events_it_(output_events->begin()),
+ output_events_end_(output_events->end()),
+ end_time_ms_(end_time_ms) {
+ RTC_DCHECK(packet_stream);
+ RTC_DCHECK(output_events);
+ }
+
+ absl::optional<int64_t> NextPacketTime() const override {
+ if (packet_stream_it_ == packet_stream_.end()) {
+ return absl::nullopt;
+ }
+ if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) {
+ return absl::nullopt;
+ }
+ return packet_stream_it_->rtp.log_time_ms();
+ }
+
+ absl::optional<int64_t> NextOutputEventTime() const override {
+ if (output_events_it_ == output_events_end_) {
+ return absl::nullopt;
+ }
+ if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) {
+ return absl::nullopt;
+ }
+ return output_events_it_->log_time_ms();
+ }
+
+ std::unique_ptr<PacketData> PopPacket() override {
+ if (packet_stream_it_ == packet_stream_.end()) {
+ return std::unique_ptr<PacketData>();
+ }
+ std::unique_ptr<PacketData> packet_data(new PacketData());
+ packet_data->header = packet_stream_it_->rtp.header;
+ packet_data->time_ms = packet_stream_it_->rtp.log_time_ms();
+
+ // This is a header-only "dummy" packet. Set the payload to all zeros, with
+ // length according to the virtual length.
+ packet_data->payload.SetSize(packet_stream_it_->rtp.total_length -
+ packet_stream_it_->rtp.header_length);
+ std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0);
+
+ ++packet_stream_it_;
+ return packet_data;
+ }
+
+ void AdvanceOutputEvent() override {
+ if (output_events_it_ != output_events_end_) {
+ ++output_events_it_;
+ }
+ }
+
+ bool ended() const override { return !NextEventTime(); }
+
+ absl::optional<RTPHeader> NextHeader() const override {
+ if (packet_stream_it_ == packet_stream_.end()) {
+ return absl::nullopt;
+ }
+ return packet_stream_it_->rtp.header;
+ }
+
+ private:
+ const std::vector<LoggedRtpPacketIncoming>& packet_stream_;
+ std::vector<LoggedRtpPacketIncoming>::const_iterator packet_stream_it_;
+ std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_it_;
+ const std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_end_;
+ const absl::optional<int64_t> end_time_ms_;
+};
+
+namespace {
+
+// Factory to create a "replacement decoder" that produces the decoded audio
+// by reading from a file rather than from the encoded payloads.
+class ReplacementAudioDecoderFactory : public AudioDecoderFactory {
+ public:
+ ReplacementAudioDecoderFactory(const absl::string_view replacement_file_name,
+ int file_sample_rate_hz)
+ : replacement_file_name_(replacement_file_name),
+ file_sample_rate_hz_(file_sample_rate_hz) {}
+
+ std::vector<AudioCodecSpec> GetSupportedDecoders() override {
+ RTC_NOTREACHED();
+ return {};
+ }
+
+ bool IsSupportedDecoder(const SdpAudioFormat& format) override {
+ return true;
+ }
+
+ std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id) override {
+ auto replacement_file = std::make_unique<test::ResampleInputAudioFile>(
+ replacement_file_name_, file_sample_rate_hz_);
+ replacement_file->set_output_rate_hz(48000);
+ return std::make_unique<test::FakeDecodeFromFile>(
+ std::move(replacement_file), 48000, false);
+ }
+
+ private:
+ const std::string replacement_file_name_;
+ const int file_sample_rate_hz_;
+};
+
+// Creates a NetEq test object and all necessary input and output helpers. Runs
+// the test and returns the NetEqDelayAnalyzer object that was used to
+// instrument the test.
+std::unique_ptr<test::NetEqStatsGetter> CreateNetEqTestAndRun(
+ const std::vector<LoggedRtpPacketIncoming>* packet_stream,
+ const std::vector<LoggedAudioPlayoutEvent>* output_events,
+ absl::optional<int64_t> end_time_ms,
+ const std::string& replacement_file_name,
+ int file_sample_rate_hz) {
+ std::unique_ptr<test::NetEqInput> input(
+ new NetEqStreamInput(packet_stream, output_events, end_time_ms));
+
+ constexpr int kReplacementPt = 127;
+ std::set<uint8_t> cn_types;
+ std::set<uint8_t> forbidden_types;
+ input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt,
+ cn_types, forbidden_types));
+
+ NetEq::Config config;
+ config.max_packets_in_buffer = 200;
+ config.enable_fast_accelerate = true;
+
+ std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink());
+
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory =
+ new rtc::RefCountedObject<ReplacementAudioDecoderFactory>(
+ replacement_file_name, file_sample_rate_hz);
+
+ test::NetEqTest::DecoderMap codecs = {
+ {kReplacementPt, SdpAudioFormat("l16", 48000, 1)}};
+
+ std::unique_ptr<test::NetEqDelayAnalyzer> delay_cb(
+ new test::NetEqDelayAnalyzer);
+ std::unique_ptr<test::NetEqStatsGetter> neteq_stats_getter(
+ new test::NetEqStatsGetter(std::move(delay_cb)));
+ test::DefaultNetEqTestErrorCallback error_cb;
+ test::NetEqTest::Callbacks callbacks;
+ callbacks.error_callback = &error_cb;
+ callbacks.post_insert_packet = neteq_stats_getter->delay_analyzer();
+ callbacks.get_audio_callback = neteq_stats_getter.get();
+
+ test::NetEqTest test(config, decoder_factory, codecs, /*text_log=*/nullptr,
+ /*factory=*/nullptr, std::move(input), std::move(output),
+ callbacks);
+ test.Run();
+ return neteq_stats_getter;
+}
+} // namespace
+
+NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const std::string& replacement_file_name,
+ int file_sample_rate_hz) {
+ NetEqStatsGetterMap neteq_stats;
+
+ for (const auto& stream : parsed_log.incoming_rtp_packets_by_ssrc()) {
+ const uint32_t ssrc = stream.ssrc;
+ if (!IsAudioSsrc(parsed_log, kIncomingPacket, ssrc))
+ continue;
+ const std::vector<LoggedRtpPacketIncoming>* audio_packets =
+ &stream.incoming_packets;
+ if (audio_packets == nullptr) {
+ // No incoming audio stream found.
+ continue;
+ }
+
+ RTC_DCHECK(neteq_stats.find(ssrc) == neteq_stats.end());
+
+ std::map<uint32_t, std::vector<LoggedAudioPlayoutEvent>>::const_iterator
+ output_events_it = parsed_log.audio_playout_events().find(ssrc);
+ if (output_events_it == parsed_log.audio_playout_events().end()) {
+ // Could not find output events with SSRC matching the input audio stream.
+ // Using the first available stream of output events.
+ output_events_it = parsed_log.audio_playout_events().cbegin();
+ }
+
+ int64_t end_time_ms = parsed_log.first_log_segment().stop_time_ms();
+
+ neteq_stats[ssrc] = CreateNetEqTestAndRun(
+ audio_packets, &output_events_it->second, end_time_ms,
+ replacement_file_name, file_sample_rate_hz);
+ }
+
+ return neteq_stats;
+}
+
+// Given a NetEqStatsGetter and the SSRC that the NetEqStatsGetter was created
+// for, this method generates a plot for the jitter buffer delay profile.
+void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ uint32_t ssrc,
+ const test::NetEqStatsGetter* stats_getter,
+ Plot* plot) {
+ test::NetEqDelayAnalyzer::Delays arrival_delay_ms;
+ test::NetEqDelayAnalyzer::Delays corrected_arrival_delay_ms;
+ test::NetEqDelayAnalyzer::Delays playout_delay_ms;
+ test::NetEqDelayAnalyzer::Delays target_delay_ms;
+
+ stats_getter->delay_analyzer()->CreateGraphs(
+ &arrival_delay_ms, &corrected_arrival_delay_ms, &playout_delay_ms,
+ &target_delay_ms);
+
+ TimeSeries time_series_packet_arrival("packet arrival delay",
+ LineStyle::kLine);
+ TimeSeries time_series_relative_packet_arrival(
+ "Relative packet arrival delay", LineStyle::kLine);
+ TimeSeries time_series_play_time("Playout delay", LineStyle::kLine);
+ TimeSeries time_series_target_time("Target delay", LineStyle::kLine,
+ PointStyle::kHighlight);
+
+ for (const auto& data : arrival_delay_ms) {
+ const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float y = data.second;
+ time_series_packet_arrival.points.emplace_back(TimeSeriesPoint(x, y));
+ }
+ for (const auto& data : corrected_arrival_delay_ms) {
+ const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float y = data.second;
+ time_series_relative_packet_arrival.points.emplace_back(
+ TimeSeriesPoint(x, y));
+ }
+ for (const auto& data : playout_delay_ms) {
+ const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float y = data.second;
+ time_series_play_time.points.emplace_back(TimeSeriesPoint(x, y));
+ }
+ for (const auto& data : target_delay_ms) {
+ const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float y = data.second;
+ time_series_target_time.points.emplace_back(TimeSeriesPoint(x, y));
+ }
+
+ plot->AppendTimeSeries(std::move(time_series_packet_arrival));
+ plot->AppendTimeSeries(std::move(time_series_relative_packet_arrival));
+ plot->AppendTimeSeries(std::move(time_series_play_time));
+ plot->AppendTimeSeries(std::move(time_series_target_time));
+
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "Relative delay (ms)", kBottomMargin,
+ kTopMargin);
+ plot->SetTitle("NetEq timing for " +
+ GetStreamName(parsed_log, kIncomingPacket, ssrc));
+}
+
+template <typename NetEqStatsType>
+void CreateNetEqStatsGraphInternal(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats,
+ rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*(
+ const test::NetEqStatsGetter*)> data_extractor,
+ rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot) {
+ std::map<uint32_t, TimeSeries> time_series;
+
+ for (const auto& st : neteq_stats) {
+ const uint32_t ssrc = st.first;
+ const std::vector<std::pair<int64_t, NetEqStatsType>>* data_vector =
+ data_extractor(st.second.get());
+ for (const auto& data : *data_vector) {
+ const float time = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float value = stats_extractor(data.second);
+ time_series[ssrc].points.emplace_back(TimeSeriesPoint(time, value));
+ }
+ }
+
+ for (auto& series : time_series) {
+ series.second.label =
+ GetStreamName(parsed_log, kIncomingPacket, series.first);
+ series.second.line_style = LineStyle::kLine;
+ plot->AppendTimeSeries(std::move(series.second));
+ }
+
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, plot_name, kBottomMargin, kTopMargin);
+ plot->SetTitle(plot_name);
+}
+
+void CreateNetEqNetworkStatsGraph(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats,
+ rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot) {
+ CreateNetEqStatsGraphInternal<NetEqNetworkStatistics>(
+ parsed_log, config, neteq_stats,
+ [](const test::NetEqStatsGetter* stats_getter) {
+ return stats_getter->stats();
+ },
+ stats_extractor, plot_name, plot);
+}
+
+void CreateNetEqLifetimeStatsGraph(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats,
+ rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot) {
+ CreateNetEqStatsGraphInternal<NetEqLifetimeStatistics>(
+ parsed_log, config, neteq_stats,
+ [](const test::NetEqStatsGetter* stats_getter) {
+ return stats_getter->lifetime_stats();
+ },
+ stats_extractor, plot_name, plot);
+}
+
+} // namespace webrtc
diff --git a/rtc_tools/rtc_event_log_visualizer/analyze_audio.h b/rtc_tools/rtc_event_log_visualizer/analyze_audio.h
new file mode 100644
index 0000000000..726e84492d
--- /dev/null
+++ b/rtc_tools/rtc_event_log_visualizer/analyze_audio.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_
+#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_
+
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <string>
+
+#include "api/function_view.h"
+#include "logging/rtc_event_log/rtc_event_log_parser.h"
+#include "modules/audio_coding/neteq/tools/neteq_stats_getter.h"
+#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
+#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
+
+namespace webrtc {
+
+void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+
+using NetEqStatsGetterMap =
+ std::map<uint32_t, std::unique_ptr<test::NetEqStatsGetter>>;
+NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const std::string& replacement_file_name,
+ int file_sample_rate_hz);
+
+void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ uint32_t ssrc,
+ const test::NetEqStatsGetter* stats_getter,
+ Plot* plot);
+void CreateNetEqNetworkStatsGraph(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats_getters,
+ rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot);
+void CreateNetEqLifetimeStatsGraph(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats_getters,
+ rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot);
+
+} // namespace webrtc
+
+#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_
diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer.cc b/rtc_tools/rtc_event_log_visualizer/analyzer.cc
index 9fcb510adc..6d84b1b5ca 100644
--- a/rtc_tools/rtc_event_log_visualizer/analyzer.cc
+++ b/rtc_tools/rtc_event_log_visualizer/analyzer.cc
@@ -31,12 +31,6 @@
#include "logging/rtc_event_log/rtc_event_processor.h"
#include "logging/rtc_event_log/rtc_stream_config.h"
#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h"
-#include "modules/audio_coding/neteq/tools/audio_sink.h"
-#include "modules/audio_coding/neteq/tools/fake_decode_from_file.h"
-#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h"
-#include "modules/audio_coding/neteq/tools/neteq_replacement_input.h"
-#include "modules/audio_coding/neteq/tools/neteq_test.h"
-#include "modules/audio_coding/neteq/tools/resample_input_audio_file.h"
#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h"
#include "modules/congestion_controller/goog_cc/bitrate_estimator.h"
#include "modules/congestion_controller/goog_cc/delay_based_bwe.h"
@@ -45,7 +39,6 @@
#include "modules/pacing/paced_sender.h"
#include "modules/pacing/packet_router.h"
#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_packet.h"
#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h"
@@ -54,6 +47,7 @@
#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
#include "rtc_base/checks.h"
#include "rtc_base/format_macros.h"
@@ -62,17 +56,12 @@
#include "rtc_base/rate_statistics.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_tools/rtc_event_log_visualizer/log_simulation.h"
-
-#ifndef BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
-#define BWE_TEST_LOGGING_COMPILE_TIME_ENABLE 0
-#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+#include "test/explicit_key_value_config.h"
namespace webrtc {
namespace {
-const int kNumMicrosecsPerSec = 1000000;
-
std::string SsrcToString(uint32_t ssrc) {
rtc::StringBuilder ss;
ss << "SSRC " << ssrc;
@@ -168,11 +157,6 @@ absl::optional<uint32_t> EstimateRtpClockFrequency(
return absl::nullopt;
}
-constexpr float kLeftMargin = 0.01f;
-constexpr float kRightMargin = 0.02f;
-constexpr float kBottomMargin = 0.02f;
-constexpr float kTopMargin = 0.05f;
-
absl::optional<double> NetworkDelayDiff_AbsSendTime(
const LoggedRtpPacketIncoming& old_packet,
const LoggedRtpPacketIncoming& new_packet) {
@@ -222,99 +206,6 @@ absl::optional<double> NetworkDelayDiff_CaptureTime(
return delay_change;
}
-// For each element in data_view, use |f()| to extract a y-coordinate and
-// store the result in a TimeSeries.
-template <typename DataType, typename IterableType>
-void ProcessPoints(rtc::FunctionView<float(const DataType&)> fx,
- rtc::FunctionView<absl::optional<float>(const DataType&)> fy,
- const IterableType& data_view,
- TimeSeries* result) {
- for (size_t i = 0; i < data_view.size(); i++) {
- const DataType& elem = data_view[i];
- float x = fx(elem);
- absl::optional<float> y = fy(elem);
- if (y)
- result->points.emplace_back(x, *y);
- }
-}
-
-// For each pair of adjacent elements in |data|, use |f()| to extract a
-// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
-// will be the time of the second element in the pair.
-template <typename DataType, typename ResultType, typename IterableType>
-void ProcessPairs(
- rtc::FunctionView<float(const DataType&)> fx,
- rtc::FunctionView<absl::optional<ResultType>(const DataType&,
- const DataType&)> fy,
- const IterableType& data,
- TimeSeries* result) {
- for (size_t i = 1; i < data.size(); i++) {
- float x = fx(data[i]);
- absl::optional<ResultType> y = fy(data[i - 1], data[i]);
- if (y)
- result->points.emplace_back(x, static_cast<float>(*y));
- }
-}
-
-// For each pair of adjacent elements in |data|, use |f()| to extract a
-// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
-// will be the time of the second element in the pair.
-template <typename DataType, typename ResultType, typename IterableType>
-void AccumulatePairs(
- rtc::FunctionView<float(const DataType&)> fx,
- rtc::FunctionView<absl::optional<ResultType>(const DataType&,
- const DataType&)> fy,
- const IterableType& data,
- TimeSeries* result) {
- ResultType sum = 0;
- for (size_t i = 1; i < data.size(); i++) {
- float x = fx(data[i]);
- absl::optional<ResultType> y = fy(data[i - 1], data[i]);
- if (y) {
- sum += *y;
- result->points.emplace_back(x, static_cast<float>(sum));
- }
- }
-}
-
-// Calculates a moving average of |data| and stores the result in a TimeSeries.
-// A data point is generated every |step| microseconds from |begin_time|
-// to |end_time|. The value of each data point is the average of the data
-// during the preceding |window_duration_us| microseconds.
-template <typename DataType, typename ResultType, typename IterableType>
-void MovingAverage(
- rtc::FunctionView<absl::optional<ResultType>(const DataType&)> fy,
- const IterableType& data_view,
- AnalyzerConfig config,
- TimeSeries* result) {
- size_t window_index_begin = 0;
- size_t window_index_end = 0;
- ResultType sum_in_window = 0;
-
- for (int64_t t = config.begin_time_; t < config.end_time_ + config.step_;
- t += config.step_) {
- while (window_index_end < data_view.size() &&
- data_view[window_index_end].log_time_us() < t) {
- absl::optional<ResultType> value = fy(data_view[window_index_end]);
- if (value)
- sum_in_window += *value;
- ++window_index_end;
- }
- while (window_index_begin < data_view.size() &&
- data_view[window_index_begin].log_time_us() <
- t - config.window_duration_) {
- absl::optional<ResultType> value = fy(data_view[window_index_begin]);
- if (value)
- sum_in_window -= *value;
- ++window_index_begin;
- }
- float window_duration_s =
- static_cast<float>(config.window_duration_) / kNumMicrosecsPerSec;
- float x = config.GetCallTimeSec(t);
- float y = sum_in_window / window_duration_s;
- result->points.emplace_back(x, y);
- }
-}
template <typename T>
TimeSeries CreateRtcpTypeTimeSeries(const std::vector<T>& rtcp_list,
@@ -465,32 +356,21 @@ EventLogAnalyzer::EventLogAnalyzer(const ParsedRtcEventLog& log,
config_.begin_time_ = config_.end_time_ = 0;
}
- const auto& log_start_events = parsed_log_.start_log_events();
- const auto& log_end_events = parsed_log_.stop_log_events();
- auto start_iter = log_start_events.begin();
- auto end_iter = log_end_events.begin();
- while (start_iter != log_start_events.end()) {
- int64_t start = start_iter->log_time_us();
- ++start_iter;
- absl::optional<int64_t> next_start;
- if (start_iter != log_start_events.end())
- next_start.emplace(start_iter->log_time_us());
- if (end_iter != log_end_events.end() &&
- end_iter->log_time_us() <=
- next_start.value_or(std::numeric_limits<int64_t>::max())) {
- int64_t end = end_iter->log_time_us();
- RTC_DCHECK_LE(start, end);
- log_segments_.push_back(std::make_pair(start, end));
- ++end_iter;
- } else {
- // we're missing an end event. Assume that it occurred just before the
- // next start.
- log_segments_.push_back(
- std::make_pair(start, next_start.value_or(config_.end_time_)));
- }
- }
- RTC_LOG(LS_INFO) << "Found " << log_segments_.size()
- << " (LOG_START, LOG_END) segments in log.";
+ RTC_LOG(LS_INFO) << "Log is "
+ << (parsed_log_.last_timestamp() -
+ parsed_log_.first_timestamp()) /
+ 1000000
+ << " seconds long.";
+}
+
+EventLogAnalyzer::EventLogAnalyzer(const ParsedRtcEventLog& log,
+ const AnalyzerConfig& config)
+ : parsed_log_(log), config_(config) {
+ RTC_LOG(LS_INFO) << "Log is "
+ << (parsed_log_.last_timestamp() -
+ parsed_log_.first_timestamp()) /
+ 1000000
+ << " seconds long.";
}
class BitrateObserver : public RemoteBitrateObserver {
@@ -527,7 +407,7 @@ void EventLogAnalyzer::CreatePacketGraph(PacketDirection direction,
continue;
}
- TimeSeries time_series(GetStreamName(direction, stream.ssrc),
+ TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc),
LineStyle::kBar);
auto GetPacketSize = [](const LoggedRtpPacket& packet) {
return absl::optional<float>(packet.total_length);
@@ -597,8 +477,8 @@ void EventLogAnalyzer::CreateAccumulatedPacketsGraph(PacketDirection direction,
for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
if (!MatchingSsrc(stream.ssrc, desired_ssrc_))
continue;
- std::string label =
- std::string("RTP ") + GetStreamName(direction, stream.ssrc);
+ std::string label = std::string("RTP ") +
+ GetStreamName(parsed_log_, direction, stream.ssrc);
CreateAccumulatedPacketsTimeSeries(plot, stream.packet_view, label);
}
std::string label =
@@ -627,7 +507,8 @@ void EventLogAnalyzer::CreatePacketRateGraph(PacketDirection direction,
continue;
}
TimeSeries time_series(
- std::string("RTP ") + GetStreamName(direction, stream.ssrc),
+ std::string("RTP ") +
+ GetStreamName(parsed_log_, direction, stream.ssrc),
LineStyle::kLine);
MovingAverage<LoggedRtpPacket, double>(CountPackets, stream.packet_view,
config_, &time_series);
@@ -736,9 +617,9 @@ void EventLogAnalyzer::CreatePlayoutGraph(Plot* plot) {
void EventLogAnalyzer::CreateAudioLevelGraph(PacketDirection direction,
Plot* plot) {
for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
- if (!IsAudioSsrc(direction, stream.ssrc))
+ if (!IsAudioSsrc(parsed_log_, direction, stream.ssrc))
continue;
- TimeSeries time_series(GetStreamName(direction, stream.ssrc),
+ TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc),
LineStyle::kLine);
for (auto& packet : stream.packet_view) {
if (packet.header.extension.hasAudioLevel) {
@@ -767,8 +648,9 @@ void EventLogAnalyzer::CreateSequenceNumberGraph(Plot* plot) {
continue;
}
- TimeSeries time_series(GetStreamName(kIncomingPacket, stream.ssrc),
- LineStyle::kBar);
+ TimeSeries time_series(
+ GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc),
+ LineStyle::kBar);
auto GetSequenceNumberDiff = [](const LoggedRtpPacketIncoming& old_packet,
const LoggedRtpPacketIncoming& new_packet) {
int64_t diff =
@@ -801,8 +683,9 @@ void EventLogAnalyzer::CreateIncomingPacketLossGraph(Plot* plot) {
continue;
}
- TimeSeries time_series(GetStreamName(kIncomingPacket, stream.ssrc),
- LineStyle::kLine, PointStyle::kHighlight);
+ TimeSeries time_series(
+ GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc),
+ LineStyle::kLine, PointStyle::kHighlight);
// TODO(terelius): Should the window and step size be read from the class
// instead?
const int64_t kWindowUs = 1000000;
@@ -855,7 +738,7 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) {
for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) {
// Filter on SSRC.
if (!MatchingSsrc(stream.ssrc, desired_ssrc_) ||
- IsRtxSsrc(kIncomingPacket, stream.ssrc)) {
+ IsRtxSsrc(parsed_log_, kIncomingPacket, stream.ssrc)) {
continue;
}
@@ -866,15 +749,14 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) {
<< packets.size() << " packets in the stream.";
continue;
}
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
+ int64_t segment_end_us = parsed_log_.first_log_segment().stop_time_us();
absl::optional<uint32_t> estimated_frequency =
- EstimateRtpClockFrequency(packets, end_time_us);
+ EstimateRtpClockFrequency(packets, segment_end_us);
if (!estimated_frequency)
continue;
const double frequency_hz = *estimated_frequency;
- if (IsVideoSsrc(kIncomingPacket, stream.ssrc) && frequency_hz != 90000) {
+ if (IsVideoSsrc(parsed_log_, kIncomingPacket, stream.ssrc) &&
+ frequency_hz != 90000) {
RTC_LOG(LS_WARNING)
<< "Video stream should use a 90 kHz clock but appears to use "
<< frequency_hz / 1000 << ". Discarding.";
@@ -891,14 +773,16 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) {
};
TimeSeries capture_time_data(
- GetStreamName(kIncomingPacket, stream.ssrc) + " capture-time",
+ GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc) +
+ " capture-time",
LineStyle::kLine);
AccumulatePairs<LoggedRtpPacketIncoming, double>(
ToCallTime, ToNetworkDelay, packets, &capture_time_data);
plot->AppendTimeSeries(std::move(capture_time_data));
TimeSeries send_time_data(
- GetStreamName(kIncomingPacket, stream.ssrc) + " abs-send-time",
+ GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc) +
+ " abs-send-time",
LineStyle::kLine);
AccumulatePairs<LoggedRtpPacketIncoming, double>(
ToCallTime, NetworkDelayDiff_AbsSendTime, packets, &send_time_data);
@@ -1191,7 +1075,7 @@ void EventLogAnalyzer::CreateStreamBitrateGraph(PacketDirection direction,
continue;
}
- TimeSeries time_series(GetStreamName(direction, stream.ssrc),
+ TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc),
LineStyle::kLine);
auto GetPacketSizeKilobits = [](const LoggedRtpPacket& packet) {
return packet.total_length * 8.0 / 1000.0;
@@ -1325,10 +1209,13 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
TimeSeries time_series("Delay-based estimate", LineStyle::kStep,
PointStyle::kHighlight);
- TimeSeries acked_time_series("Acked bitrate", LineStyle::kLine,
+ TimeSeries acked_time_series("Raw acked bitrate", LineStyle::kLine,
PointStyle::kHighlight);
- TimeSeries acked_estimate_time_series(
- "Acked bitrate estimate", LineStyle::kLine, PointStyle::kHighlight);
+ TimeSeries robust_time_series("Robust throughput estimate", LineStyle::kLine,
+ PointStyle::kHighlight);
+ TimeSeries acked_estimate_time_series("Ackednowledged bitrate estimate",
+ LineStyle::kLine,
+ PointStyle::kHighlight);
auto rtp_iterator = outgoing_rtp.begin();
auto rtcp_iterator = incoming_rtcp.begin();
@@ -1354,20 +1241,18 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
return std::numeric_limits<int64_t>::max();
};
- RateStatistics acked_bitrate(250, 8000);
-#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
- FieldTrialBasedConfig field_trial_config_;
- // The event_log_visualizer should normally not be compiled with
- // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE since the normal plots won't work.
- // However, compiling with BWE_TEST_LOGGING, running with --plot=sendside_bwe
- // and piping the output to plot_dynamics.py can be used as a hack to get the
- // internal state of various BWE components. In this case, it is important
- // we don't instantiate the AcknowledgedBitrateEstimator both here and in
- // GoogCcNetworkController since that would lead to duplicate outputs.
+ RateStatistics acked_bitrate(750, 8000);
+ test::ExplicitKeyValueConfig throughput_config(
+ "WebRTC-Bwe-RobustThroughputEstimatorSettings/"
+ "enabled:true,reduce_bias:true,assume_shared_link:false,initial_packets:"
+ "10,min_packets:25,window_duration:750ms,unacked_weight:0.5/");
+ std::unique_ptr<AcknowledgedBitrateEstimatorInterface>
+ robust_throughput_estimator(
+ AcknowledgedBitrateEstimatorInterface::Create(&throughput_config));
+ FieldTrialBasedConfig field_trial_config;
std::unique_ptr<AcknowledgedBitrateEstimatorInterface>
acknowledged_bitrate_estimator(
- AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config_));
-#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
+ AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config));
int64_t time_us =
std::min({NextRtpTime(), NextRtcpTime(), NextProcessTime()});
int64_t last_update_us = 0;
@@ -1377,24 +1262,40 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextRtpTime());
const RtpPacketType& rtp_packet = *rtp_iterator->second;
if (rtp_packet.rtp.header.extension.hasTransportSequenceNumber) {
- RTC_DCHECK(rtp_packet.rtp.header.extension.hasTransportSequenceNumber);
RtpPacketSendInfo packet_info;
packet_info.ssrc = rtp_packet.rtp.header.ssrc;
packet_info.transport_sequence_number =
rtp_packet.rtp.header.extension.transportSequenceNumber;
packet_info.rtp_sequence_number = rtp_packet.rtp.header.sequenceNumber;
packet_info.length = rtp_packet.rtp.total_length;
+ if (IsRtxSsrc(parsed_log_, PacketDirection::kOutgoingPacket,
+ rtp_packet.rtp.header.ssrc)) {
+ // Don't set the optional media type as we don't know if it is
+ // a retransmission, FEC or padding.
+ } else if (IsVideoSsrc(parsed_log_, PacketDirection::kOutgoingPacket,
+ rtp_packet.rtp.header.ssrc)) {
+ packet_info.packet_type = RtpPacketMediaType::kVideo;
+ } else if (IsAudioSsrc(parsed_log_, PacketDirection::kOutgoingPacket,
+ rtp_packet.rtp.header.ssrc)) {
+ packet_info.packet_type = RtpPacketMediaType::kAudio;
+ }
transport_feedback.AddPacket(
packet_info,
0u, // Per packet overhead bytes.
Timestamp::Micros(rtp_packet.rtp.log_time_us()));
- rtc::SentPacket sent_packet(
- rtp_packet.rtp.header.extension.transportSequenceNumber,
- rtp_packet.rtp.log_time_us() / 1000);
- auto sent_msg = transport_feedback.ProcessSentPacket(sent_packet);
- if (sent_msg)
- observer.Update(goog_cc->OnSentPacket(*sent_msg));
}
+ rtc::SentPacket sent_packet;
+ sent_packet.send_time_ms = rtp_packet.rtp.log_time_ms();
+ sent_packet.info.included_in_allocation = true;
+ sent_packet.info.packet_size_bytes = rtp_packet.rtp.total_length;
+ if (rtp_packet.rtp.header.extension.hasTransportSequenceNumber) {
+ sent_packet.packet_id =
+ rtp_packet.rtp.header.extension.transportSequenceNumber;
+ sent_packet.info.included_in_feedback = true;
+ }
+ auto sent_msg = transport_feedback.ProcessSentPacket(sent_packet);
+ if (sent_msg)
+ observer.Update(goog_cc->OnSentPacket(*sent_msg));
++rtp_iterator;
}
if (clock.TimeInMicroseconds() >= NextRtcpTime()) {
@@ -1409,13 +1310,13 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
std::vector<PacketResult> feedback =
feedback_msg->SortedByReceiveTime();
if (!feedback.empty()) {
-#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
acknowledged_bitrate_estimator->IncomingPacketFeedbackVector(
feedback);
-#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
- for (const PacketResult& packet : feedback)
+ robust_throughput_estimator->IncomingPacketFeedbackVector(feedback);
+ for (const PacketResult& packet : feedback) {
acked_bitrate.Update(packet.sent_packet.size.bytes(),
packet.receive_time.ms());
+ }
bitrate_bps = acked_bitrate.Rate(feedback.back().receive_time.ms());
}
}
@@ -1423,12 +1324,14 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
float x = config_.GetCallTimeSec(clock.TimeInMicroseconds());
float y = bitrate_bps.value_or(0) / 1000;
acked_time_series.points.emplace_back(x, y);
-#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
+ y = robust_throughput_estimator->bitrate()
+ .value_or(DataRate::Zero())
+ .kbps();
+ robust_time_series.points.emplace_back(x, y);
y = acknowledged_bitrate_estimator->bitrate()
.value_or(DataRate::Zero())
.kbps();
acked_estimate_time_series.points.emplace_back(x, y);
-#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
++rtcp_iterator;
}
if (clock.TimeInMicroseconds() >= NextProcessTime()) {
@@ -1449,6 +1352,7 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
}
// Add the data set to the plot.
plot->AppendTimeSeries(std::move(time_series));
+ plot->AppendTimeSeries(std::move(robust_time_series));
plot->AppendTimeSeries(std::move(acked_time_series));
plot->AppendTimeSeriesIfNotEmpty(std::move(acked_estimate_time_series));
@@ -1476,14 +1380,16 @@ void EventLogAnalyzer::CreateReceiveSideBweSimulationGraph(Plot* plot) {
}
private:
- uint32_t last_bitrate_bps_;
- bool bitrate_updated_;
+ // We don't know the start bitrate, but assume that it is the default 300
+ // kbps.
+ uint32_t last_bitrate_bps_ = 300000;
+ bool bitrate_updated_ = false;
};
std::multimap<int64_t, const RtpPacketType*> incoming_rtp;
for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) {
- if (IsVideoSsrc(kIncomingPacket, stream.ssrc)) {
+ if (IsVideoSsrc(parsed_log_, kIncomingPacket, stream.ssrc)) {
for (const auto& rtp_packet : stream.incoming_packets)
incoming_rtp.insert(
std::make_pair(rtp_packet.rtp.log_time_us(), &rtp_packet));
@@ -1586,7 +1492,7 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) {
const std::vector<LoggedRtpPacketOutgoing>& packets =
stream.outgoing_packets;
- if (IsRtxSsrc(kOutgoingPacket, stream.ssrc)) {
+ if (IsRtxSsrc(parsed_log_, kOutgoingPacket, stream.ssrc)) {
continue;
}
@@ -1596,14 +1502,12 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) {
"pacer delay with less than 2 packets in the stream";
continue;
}
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
+ int64_t segment_end_us = parsed_log_.first_log_segment().stop_time_us();
absl::optional<uint32_t> estimated_frequency =
- EstimateRtpClockFrequency(packets, end_time_us);
+ EstimateRtpClockFrequency(packets, segment_end_us);
if (!estimated_frequency)
continue;
- if (IsVideoSsrc(kOutgoingPacket, stream.ssrc) &&
+ if (IsVideoSsrc(parsed_log_, kOutgoingPacket, stream.ssrc) &&
*estimated_frequency != 90000) {
RTC_LOG(LS_WARNING)
<< "Video stream should use a 90 kHz clock but appears to use "
@@ -1612,7 +1516,7 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) {
}
TimeSeries pacer_delay_series(
- GetStreamName(kOutgoingPacket, stream.ssrc) + "(" +
+ GetStreamName(parsed_log_, kOutgoingPacket, stream.ssrc) + "(" +
std::to_string(*estimated_frequency / 1000) + " kHz)",
LineStyle::kLine, PointStyle::kHighlight);
SeqNumUnwrapper<uint32_t> timestamp_unwrapper;
@@ -1645,7 +1549,7 @@ void EventLogAnalyzer::CreateTimestampGraph(PacketDirection direction,
Plot* plot) {
for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
TimeSeries rtp_timestamps(
- GetStreamName(direction, stream.ssrc) + " capture-time",
+ GetStreamName(parsed_log_, direction, stream.ssrc) + " capture-time",
LineStyle::kLine, PointStyle::kHighlight);
for (const auto& packet : stream.packet_view) {
float x = config_.GetCallTimeSec(packet.log_time_us());
@@ -1655,7 +1559,8 @@ void EventLogAnalyzer::CreateTimestampGraph(PacketDirection direction,
plot->AppendTimeSeries(std::move(rtp_timestamps));
TimeSeries rtcp_timestamps(
- GetStreamName(direction, stream.ssrc) + " rtcp capture-time",
+ GetStreamName(parsed_log_, direction, stream.ssrc) +
+ " rtcp capture-time",
LineStyle::kLine, PointStyle::kHighlight);
// TODO(terelius): Why only sender reports?
const auto& sender_reports = parsed_log_.sender_reports(direction);
@@ -1692,7 +1597,8 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot(
bool inserted;
if (sr_report_it == sr_reports_by_ssrc.end()) {
std::tie(sr_report_it, inserted) = sr_reports_by_ssrc.emplace(
- ssrc, TimeSeries(GetStreamName(direction, ssrc) + " Sender Reports",
+ ssrc, TimeSeries(GetStreamName(parsed_log_, direction, ssrc) +
+ " Sender Reports",
LineStyle::kLine, PointStyle::kHighlight));
}
sr_report_it->second.points.emplace_back(x, y);
@@ -1713,9 +1619,9 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot(
bool inserted;
if (rr_report_it == rr_reports_by_ssrc.end()) {
std::tie(rr_report_it, inserted) = rr_reports_by_ssrc.emplace(
- ssrc,
- TimeSeries(GetStreamName(direction, ssrc) + " Receiver Reports",
- LineStyle::kLine, PointStyle::kHighlight));
+ ssrc, TimeSeries(GetStreamName(parsed_log_, direction, ssrc) +
+ " Receiver Reports",
+ LineStyle::kLine, PointStyle::kHighlight));
}
rr_report_it->second.points.emplace_back(x, y);
}
@@ -1730,463 +1636,6 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot(
plot->SetTitle(title);
}
-void EventLogAnalyzer::CreateAudioEncoderTargetBitrateGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder target bitrate", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaBitrateBps = [](const LoggedAudioNetworkAdaptationEvent& ana_event)
- -> absl::optional<float> {
- if (ana_event.config.bitrate_bps)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.bitrate_bps));
- return absl::nullopt;
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaBitrateBps,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder target bitrate");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderFrameLengthGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder frame length", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaFrameLengthMs =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.frame_length_ms)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.frame_length_ms));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaFrameLengthMs,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder frame length");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderPacketLossGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder uplink packet loss fraction",
- LineStyle::kLine, PointStyle::kHighlight);
- auto GetAnaPacketLoss =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.uplink_packet_loss_fraction)
- return absl::optional<float>(static_cast<float>(
- *ana_event.config.uplink_packet_loss_fraction));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaPacketLoss,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin,
- kTopMargin);
- plot->SetTitle("Reported audio encoder lost packets");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderEnableFecGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder FEC", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaFecEnabled =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.enable_fec)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.enable_fec));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaFecEnabled,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder FEC");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderEnableDtxGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder DTX", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaDtxEnabled =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.enable_dtx)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.enable_dtx));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaDtxEnabled,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder DTX");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderNumChannelsGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder number of channels", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaNumChannels =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.num_channels)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.num_channels));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaNumChannels,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))",
- kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder number of channels");
-}
-
-class NetEqStreamInput : public test::NetEqInput {
- public:
- // Does not take any ownership, and all pointers must refer to valid objects
- // that outlive the one constructed.
- NetEqStreamInput(const std::vector<LoggedRtpPacketIncoming>* packet_stream,
- const std::vector<LoggedAudioPlayoutEvent>* output_events,
- absl::optional<int64_t> end_time_ms)
- : packet_stream_(*packet_stream),
- packet_stream_it_(packet_stream_.begin()),
- output_events_it_(output_events->begin()),
- output_events_end_(output_events->end()),
- end_time_ms_(end_time_ms) {
- RTC_DCHECK(packet_stream);
- RTC_DCHECK(output_events);
- }
-
- absl::optional<int64_t> NextPacketTime() const override {
- if (packet_stream_it_ == packet_stream_.end()) {
- return absl::nullopt;
- }
- if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) {
- return absl::nullopt;
- }
- return packet_stream_it_->rtp.log_time_ms();
- }
-
- absl::optional<int64_t> NextOutputEventTime() const override {
- if (output_events_it_ == output_events_end_) {
- return absl::nullopt;
- }
- if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) {
- return absl::nullopt;
- }
- return output_events_it_->log_time_ms();
- }
-
- std::unique_ptr<PacketData> PopPacket() override {
- if (packet_stream_it_ == packet_stream_.end()) {
- return std::unique_ptr<PacketData>();
- }
- std::unique_ptr<PacketData> packet_data(new PacketData());
- packet_data->header = packet_stream_it_->rtp.header;
- packet_data->time_ms = packet_stream_it_->rtp.log_time_ms();
-
- // This is a header-only "dummy" packet. Set the payload to all zeros, with
- // length according to the virtual length.
- packet_data->payload.SetSize(packet_stream_it_->rtp.total_length -
- packet_stream_it_->rtp.header_length);
- std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0);
-
- ++packet_stream_it_;
- return packet_data;
- }
-
- void AdvanceOutputEvent() override {
- if (output_events_it_ != output_events_end_) {
- ++output_events_it_;
- }
- }
-
- bool ended() const override { return !NextEventTime(); }
-
- absl::optional<RTPHeader> NextHeader() const override {
- if (packet_stream_it_ == packet_stream_.end()) {
- return absl::nullopt;
- }
- return packet_stream_it_->rtp.header;
- }
-
- private:
- const std::vector<LoggedRtpPacketIncoming>& packet_stream_;
- std::vector<LoggedRtpPacketIncoming>::const_iterator packet_stream_it_;
- std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_it_;
- const std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_end_;
- const absl::optional<int64_t> end_time_ms_;
-};
-
-namespace {
-
-// Factory to create a "replacement decoder" that produces the decoded audio
-// by reading from a file rather than from the encoded payloads.
-class ReplacementAudioDecoderFactory : public AudioDecoderFactory {
- public:
- ReplacementAudioDecoderFactory(const absl::string_view replacement_file_name,
- int file_sample_rate_hz)
- : replacement_file_name_(replacement_file_name),
- file_sample_rate_hz_(file_sample_rate_hz) {}
-
- std::vector<AudioCodecSpec> GetSupportedDecoders() override {
- RTC_NOTREACHED();
- return {};
- }
-
- bool IsSupportedDecoder(const SdpAudioFormat& format) override {
- return true;
- }
-
- std::unique_ptr<AudioDecoder> MakeAudioDecoder(
- const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id) override {
- auto replacement_file = std::make_unique<test::ResampleInputAudioFile>(
- replacement_file_name_, file_sample_rate_hz_);
- replacement_file->set_output_rate_hz(48000);
- return std::make_unique<test::FakeDecodeFromFile>(
- std::move(replacement_file), 48000, false);
- }
-
- private:
- const std::string replacement_file_name_;
- const int file_sample_rate_hz_;
-};
-
-// Creates a NetEq test object and all necessary input and output helpers. Runs
-// the test and returns the NetEqDelayAnalyzer object that was used to
-// instrument the test.
-std::unique_ptr<test::NetEqStatsGetter> CreateNetEqTestAndRun(
- const std::vector<LoggedRtpPacketIncoming>* packet_stream,
- const std::vector<LoggedAudioPlayoutEvent>* output_events,
- absl::optional<int64_t> end_time_ms,
- const std::string& replacement_file_name,
- int file_sample_rate_hz) {
- std::unique_ptr<test::NetEqInput> input(
- new NetEqStreamInput(packet_stream, output_events, end_time_ms));
-
- constexpr int kReplacementPt = 127;
- std::set<uint8_t> cn_types;
- std::set<uint8_t> forbidden_types;
- input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt,
- cn_types, forbidden_types));
-
- NetEq::Config config;
- config.max_packets_in_buffer = 200;
- config.enable_fast_accelerate = true;
-
- std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink());
-
- rtc::scoped_refptr<AudioDecoderFactory> decoder_factory =
- new rtc::RefCountedObject<ReplacementAudioDecoderFactory>(
- replacement_file_name, file_sample_rate_hz);
-
- test::NetEqTest::DecoderMap codecs = {
- {kReplacementPt, SdpAudioFormat("l16", 48000, 1)}};
-
- std::unique_ptr<test::NetEqDelayAnalyzer> delay_cb(
- new test::NetEqDelayAnalyzer);
- std::unique_ptr<test::NetEqStatsGetter> neteq_stats_getter(
- new test::NetEqStatsGetter(std::move(delay_cb)));
- test::DefaultNetEqTestErrorCallback error_cb;
- test::NetEqTest::Callbacks callbacks;
- callbacks.error_callback = &error_cb;
- callbacks.post_insert_packet = neteq_stats_getter->delay_analyzer();
- callbacks.get_audio_callback = neteq_stats_getter.get();
-
- test::NetEqTest test(config, decoder_factory, codecs, /*text_log=*/nullptr,
- /*factory=*/nullptr, std::move(input), std::move(output),
- callbacks);
- test.Run();
- return neteq_stats_getter;
-}
-} // namespace
-
-EventLogAnalyzer::NetEqStatsGetterMap EventLogAnalyzer::SimulateNetEq(
- const std::string& replacement_file_name,
- int file_sample_rate_hz) const {
- NetEqStatsGetterMap neteq_stats;
-
- for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) {
- const uint32_t ssrc = stream.ssrc;
- if (!IsAudioSsrc(kIncomingPacket, ssrc))
- continue;
- const std::vector<LoggedRtpPacketIncoming>* audio_packets =
- &stream.incoming_packets;
- if (audio_packets == nullptr) {
- // No incoming audio stream found.
- continue;
- }
-
- RTC_DCHECK(neteq_stats.find(ssrc) == neteq_stats.end());
-
- std::map<uint32_t, std::vector<LoggedAudioPlayoutEvent>>::const_iterator
- output_events_it = parsed_log_.audio_playout_events().find(ssrc);
- if (output_events_it == parsed_log_.audio_playout_events().end()) {
- // Could not find output events with SSRC matching the input audio stream.
- // Using the first available stream of output events.
- output_events_it = parsed_log_.audio_playout_events().cbegin();
- }
-
- absl::optional<int64_t> end_time_ms =
- log_segments_.empty()
- ? absl::nullopt
- : absl::optional<int64_t>(log_segments_.front().second / 1000);
-
- neteq_stats[ssrc] = CreateNetEqTestAndRun(
- audio_packets, &output_events_it->second, end_time_ms,
- replacement_file_name, file_sample_rate_hz);
- }
-
- return neteq_stats;
-}
-
-// Given a NetEqStatsGetter and the SSRC that the NetEqStatsGetter was created
-// for, this method generates a plot for the jitter buffer delay profile.
-void EventLogAnalyzer::CreateAudioJitterBufferGraph(
- uint32_t ssrc,
- const test::NetEqStatsGetter* stats_getter,
- Plot* plot) const {
- test::NetEqDelayAnalyzer::Delays arrival_delay_ms;
- test::NetEqDelayAnalyzer::Delays corrected_arrival_delay_ms;
- test::NetEqDelayAnalyzer::Delays playout_delay_ms;
- test::NetEqDelayAnalyzer::Delays target_delay_ms;
-
- stats_getter->delay_analyzer()->CreateGraphs(
- &arrival_delay_ms, &corrected_arrival_delay_ms, &playout_delay_ms,
- &target_delay_ms);
-
- TimeSeries time_series_packet_arrival("packet arrival delay",
- LineStyle::kLine);
- TimeSeries time_series_relative_packet_arrival(
- "Relative packet arrival delay", LineStyle::kLine);
- TimeSeries time_series_play_time("Playout delay", LineStyle::kLine);
- TimeSeries time_series_target_time("Target delay", LineStyle::kLine,
- PointStyle::kHighlight);
-
- for (const auto& data : arrival_delay_ms) {
- const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float y = data.second;
- time_series_packet_arrival.points.emplace_back(TimeSeriesPoint(x, y));
- }
- for (const auto& data : corrected_arrival_delay_ms) {
- const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float y = data.second;
- time_series_relative_packet_arrival.points.emplace_back(
- TimeSeriesPoint(x, y));
- }
- for (const auto& data : playout_delay_ms) {
- const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float y = data.second;
- time_series_play_time.points.emplace_back(TimeSeriesPoint(x, y));
- }
- for (const auto& data : target_delay_ms) {
- const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float y = data.second;
- time_series_target_time.points.emplace_back(TimeSeriesPoint(x, y));
- }
-
- plot->AppendTimeSeries(std::move(time_series_packet_arrival));
- plot->AppendTimeSeries(std::move(time_series_relative_packet_arrival));
- plot->AppendTimeSeries(std::move(time_series_play_time));
- plot->AppendTimeSeries(std::move(time_series_target_time));
-
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "Relative delay (ms)", kBottomMargin,
- kTopMargin);
- plot->SetTitle("NetEq timing for " + GetStreamName(kIncomingPacket, ssrc));
-}
-
-template <typename NetEqStatsType>
-void EventLogAnalyzer::CreateNetEqStatsGraphInternal(
- const NetEqStatsGetterMap& neteq_stats,
- rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*(
- const test::NetEqStatsGetter*)> data_extractor,
- rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const {
- std::map<uint32_t, TimeSeries> time_series;
-
- for (const auto& st : neteq_stats) {
- const uint32_t ssrc = st.first;
- const std::vector<std::pair<int64_t, NetEqStatsType>>* data_vector =
- data_extractor(st.second.get());
- for (const auto& data : *data_vector) {
- const float time =
- config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float value = stats_extractor(data.second);
- time_series[ssrc].points.emplace_back(TimeSeriesPoint(time, value));
- }
- }
-
- for (auto& series : time_series) {
- series.second.label = GetStreamName(kIncomingPacket, series.first);
- series.second.line_style = LineStyle::kLine;
- plot->AppendTimeSeries(std::move(series.second));
- }
-
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, plot_name, kBottomMargin, kTopMargin);
- plot->SetTitle(plot_name);
-}
-
-void EventLogAnalyzer::CreateNetEqNetworkStatsGraph(
- const NetEqStatsGetterMap& neteq_stats,
- rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const {
- CreateNetEqStatsGraphInternal<NetEqNetworkStatistics>(
- neteq_stats,
- [](const test::NetEqStatsGetter* stats_getter) {
- return stats_getter->stats();
- },
- stats_extractor, plot_name, plot);
-}
-
-void EventLogAnalyzer::CreateNetEqLifetimeStatsGraph(
- const NetEqStatsGetterMap& neteq_stats,
- rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const {
- CreateNetEqStatsGraphInternal<NetEqLifetimeStatistics>(
- neteq_stats,
- [](const test::NetEqStatsGetter* stats_getter) {
- return stats_getter->lifetime_stats();
- },
- stats_extractor, plot_name, plot);
-}
-
void EventLogAnalyzer::CreateIceCandidatePairConfigGraph(Plot* plot) {
std::map<uint32_t, TimeSeries> configs_by_cp_id;
for (const auto& config : parsed_log_.ice_candidate_pair_configs()) {
@@ -2326,181 +1775,4 @@ void EventLogAnalyzer::CreateDtlsWritableStateGraph(Plot* plot) {
plot->SetTitle("DTLS Writable State");
}
-void EventLogAnalyzer::PrintNotifications(FILE* file) {
- fprintf(file, "========== TRIAGE NOTIFICATIONS ==========\n");
- for (const auto& alert : incoming_rtp_recv_time_gaps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : incoming_rtcp_recv_time_gaps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_rtp_send_time_gaps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_rtcp_send_time_gaps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : incoming_seq_num_jumps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : incoming_capture_time_jumps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_seq_num_jumps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_capture_time_jumps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_high_loss_alerts_) {
- fprintf(file, " : %s\n", alert.ToString().c_str());
- }
- fprintf(file, "========== END TRIAGE NOTIFICATIONS ==========\n");
-}
-
-void EventLogAnalyzer::CreateStreamGapAlerts(PacketDirection direction) {
- // With 100 packets/s (~800kbps), false positives would require 10 s without
- // data.
- constexpr int64_t kMaxSeqNumJump = 1000;
- // With a 90 kHz clock, false positives would require 10 s without data.
- constexpr int64_t kMaxCaptureTimeJump = 900000;
-
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
-
- SeqNumUnwrapper<uint16_t> seq_num_unwrapper;
- absl::optional<int64_t> last_seq_num;
- SeqNumUnwrapper<uint32_t> capture_time_unwrapper;
- absl::optional<int64_t> last_capture_time;
- // Check for gaps in sequence numbers and capture timestamps.
- for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
- for (const auto& packet : stream.packet_view) {
- if (packet.log_time_us() > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
-
- int64_t seq_num = seq_num_unwrapper.Unwrap(packet.header.sequenceNumber);
- if (last_seq_num.has_value() &&
- std::abs(seq_num - last_seq_num.value()) > kMaxSeqNumJump) {
- Alert_SeqNumJump(direction,
- config_.GetCallTimeSec(packet.log_time_us()),
- packet.header.ssrc);
- }
- last_seq_num.emplace(seq_num);
-
- int64_t capture_time =
- capture_time_unwrapper.Unwrap(packet.header.timestamp);
- if (last_capture_time.has_value() &&
- std::abs(capture_time - last_capture_time.value()) >
- kMaxCaptureTimeJump) {
- Alert_CaptureTimeJump(direction,
- config_.GetCallTimeSec(packet.log_time_us()),
- packet.header.ssrc);
- }
- last_capture_time.emplace(capture_time);
- }
- }
-}
-
-void EventLogAnalyzer::CreateTransmissionGapAlerts(PacketDirection direction) {
- constexpr int64_t kMaxRtpTransmissionGap = 500000;
- constexpr int64_t kMaxRtcpTransmissionGap = 2000000;
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
-
- // TODO(terelius): The parser could provide a list of all packets, ordered
- // by time, for each direction.
- std::multimap<int64_t, const LoggedRtpPacket*> rtp_in_direction;
- for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
- for (const LoggedRtpPacket& rtp_packet : stream.packet_view)
- rtp_in_direction.emplace(rtp_packet.log_time_us(), &rtp_packet);
- }
- absl::optional<int64_t> last_rtp_time;
- for (const auto& kv : rtp_in_direction) {
- int64_t timestamp = kv.first;
- if (timestamp > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
- int64_t duration = timestamp - last_rtp_time.value_or(0);
- if (last_rtp_time.has_value() && duration > kMaxRtpTransmissionGap) {
- // No packet sent/received for more than 500 ms.
- Alert_RtpLogTimeGap(direction, config_.GetCallTimeSec(timestamp),
- duration / 1000);
- }
- last_rtp_time.emplace(timestamp);
- }
-
- absl::optional<int64_t> last_rtcp_time;
- if (direction == kIncomingPacket) {
- for (const auto& rtcp : parsed_log_.incoming_rtcp_packets()) {
- if (rtcp.log_time_us() > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
- int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
- if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
- // No feedback sent/received for more than 2000 ms.
- Alert_RtcpLogTimeGap(direction,
- config_.GetCallTimeSec(rtcp.log_time_us()),
- duration / 1000);
- }
- last_rtcp_time.emplace(rtcp.log_time_us());
- }
- } else {
- for (const auto& rtcp : parsed_log_.outgoing_rtcp_packets()) {
- if (rtcp.log_time_us() > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
- int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
- if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
- // No feedback sent/received for more than 2000 ms.
- Alert_RtcpLogTimeGap(direction,
- config_.GetCallTimeSec(rtcp.log_time_us()),
- duration / 1000);
- }
- last_rtcp_time.emplace(rtcp.log_time_us());
- }
- }
-}
-
-// TODO(terelius): Notifications could possibly be generated by the same code
-// that produces the graphs. There is some code duplication that could be
-// avoided, but that might be solved anyway when we move functionality from the
-// analyzer to the parser.
-void EventLogAnalyzer::CreateTriageNotifications() {
- CreateStreamGapAlerts(kIncomingPacket);
- CreateStreamGapAlerts(kOutgoingPacket);
- CreateTransmissionGapAlerts(kIncomingPacket);
- CreateTransmissionGapAlerts(kOutgoingPacket);
-
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
-
- constexpr double kMaxLossFraction = 0.05;
- // Loss feedback
- int64_t total_lost_packets = 0;
- int64_t total_expected_packets = 0;
- for (auto& bwe_update : parsed_log_.bwe_loss_updates()) {
- if (bwe_update.log_time_us() > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
- int64_t lost_packets = static_cast<double>(bwe_update.fraction_lost) / 255 *
- bwe_update.expected_packets;
- total_lost_packets += lost_packets;
- total_expected_packets += bwe_update.expected_packets;
- }
- double avg_outgoing_loss =
- static_cast<double>(total_lost_packets) / total_expected_packets;
- if (avg_outgoing_loss > kMaxLossFraction) {
- Alert_OutgoingHighLoss(avg_outgoing_loss);
- }
-}
-
} // namespace webrtc
diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer.h b/rtc_tools/rtc_event_log_visualizer/analyzer.h
index 1e09109959..4918cf48e1 100644
--- a/rtc_tools/rtc_event_log_visualizer/analyzer.h
+++ b/rtc_tools/rtc_event_log_visualizer/analyzer.h
@@ -21,41 +21,18 @@
#include "logging/rtc_event_log/rtc_event_log_parser.h"
#include "modules/audio_coding/neteq/tools/neteq_stats_getter.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
-#include "rtc_tools/rtc_event_log_visualizer/triage_notifications.h"
namespace webrtc {
-class AnalyzerConfig {
- public:
- float GetCallTimeSec(int64_t timestamp_us) const {
- int64_t offset = normalize_time_ ? begin_time_ : 0;
- return static_cast<float>(timestamp_us - offset) / 1000000;
- }
-
- float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); }
-
- float CallEndTimeSec() const { return GetCallTimeSec(end_time_); }
-
- // Window and step size used for calculating moving averages, e.g. bitrate.
- // The generated data points will be |step_| microseconds apart.
- // Only events occurring at most |window_duration_| microseconds before the
- // current data point will be part of the average.
- int64_t window_duration_;
- int64_t step_;
-
- // First and last events of the log.
- int64_t begin_time_;
- int64_t end_time_;
- bool normalize_time_;
-};
-
class EventLogAnalyzer {
public:
// The EventLogAnalyzer keeps a reference to the ParsedRtcEventLogNew for the
// duration of its lifetime. The ParsedRtcEventLogNew must not be destroyed or
// modified while the EventLogAnalyzer is being used.
EventLogAnalyzer(const ParsedRtcEventLog& log, bool normalize_time);
+ EventLogAnalyzer(const ParsedRtcEventLog& log, const AnalyzerConfig& config);
void CreatePacketGraph(PacketDirection direction, Plot* plot);
@@ -102,32 +79,6 @@ class EventLogAnalyzer {
std::string yaxis_label,
Plot* plot);
- void CreateAudioEncoderTargetBitrateGraph(Plot* plot);
- void CreateAudioEncoderFrameLengthGraph(Plot* plot);
- void CreateAudioEncoderPacketLossGraph(Plot* plot);
- void CreateAudioEncoderEnableFecGraph(Plot* plot);
- void CreateAudioEncoderEnableDtxGraph(Plot* plot);
- void CreateAudioEncoderNumChannelsGraph(Plot* plot);
-
- using NetEqStatsGetterMap =
- std::map<uint32_t, std::unique_ptr<test::NetEqStatsGetter>>;
- NetEqStatsGetterMap SimulateNetEq(const std::string& replacement_file_name,
- int file_sample_rate_hz) const;
-
- void CreateAudioJitterBufferGraph(uint32_t ssrc,
- const test::NetEqStatsGetter* stats_getter,
- Plot* plot) const;
- void CreateNetEqNetworkStatsGraph(
- const NetEqStatsGetterMap& neteq_stats_getters,
- rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const;
- void CreateNetEqLifetimeStatsGraph(
- const NetEqStatsGetterMap& neteq_stats_getters,
- rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const;
-
void CreateIceCandidatePairConfigGraph(Plot* plot);
void CreateIceConnectivityCheckGraph(Plot* plot);
@@ -138,145 +89,11 @@ class EventLogAnalyzer {
void PrintNotifications(FILE* file);
private:
- struct LayerDescription {
- LayerDescription(uint32_t ssrc,
- uint8_t spatial_layer,
- uint8_t temporal_layer)
- : ssrc(ssrc),
- spatial_layer(spatial_layer),
- temporal_layer(temporal_layer) {}
- bool operator<(const LayerDescription& other) const {
- if (ssrc != other.ssrc)
- return ssrc < other.ssrc;
- if (spatial_layer != other.spatial_layer)
- return spatial_layer < other.spatial_layer;
- return temporal_layer < other.temporal_layer;
- }
- uint32_t ssrc;
- uint8_t spatial_layer;
- uint8_t temporal_layer;
- };
-
- bool IsRtxSsrc(PacketDirection direction, uint32_t ssrc) const {
- if (direction == kIncomingPacket) {
- return parsed_log_.incoming_rtx_ssrcs().find(ssrc) !=
- parsed_log_.incoming_rtx_ssrcs().end();
- } else {
- return parsed_log_.outgoing_rtx_ssrcs().find(ssrc) !=
- parsed_log_.outgoing_rtx_ssrcs().end();
- }
- }
-
- bool IsVideoSsrc(PacketDirection direction, uint32_t ssrc) const {
- if (direction == kIncomingPacket) {
- return parsed_log_.incoming_video_ssrcs().find(ssrc) !=
- parsed_log_.incoming_video_ssrcs().end();
- } else {
- return parsed_log_.outgoing_video_ssrcs().find(ssrc) !=
- parsed_log_.outgoing_video_ssrcs().end();
- }
- }
-
- bool IsAudioSsrc(PacketDirection direction, uint32_t ssrc) const {
- if (direction == kIncomingPacket) {
- return parsed_log_.incoming_audio_ssrcs().find(ssrc) !=
- parsed_log_.incoming_audio_ssrcs().end();
- } else {
- return parsed_log_.outgoing_audio_ssrcs().find(ssrc) !=
- parsed_log_.outgoing_audio_ssrcs().end();
- }
- }
-
- template <typename NetEqStatsType>
- void CreateNetEqStatsGraphInternal(
- const NetEqStatsGetterMap& neteq_stats,
- rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*(
- const test::NetEqStatsGetter*)> data_extractor,
- rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const;
-
template <typename IterableType>
void CreateAccumulatedPacketsTimeSeries(Plot* plot,
const IterableType& packets,
const std::string& label);
- void CreateStreamGapAlerts(PacketDirection direction);
- void CreateTransmissionGapAlerts(PacketDirection direction);
-
- std::string GetStreamName(PacketDirection direction, uint32_t ssrc) const {
- char buffer[200];
- rtc::SimpleStringBuilder name(buffer);
- if (IsAudioSsrc(direction, ssrc)) {
- name << "Audio ";
- } else if (IsVideoSsrc(direction, ssrc)) {
- name << "Video ";
- } else {
- name << "Unknown ";
- }
- if (IsRtxSsrc(direction, ssrc)) {
- name << "RTX ";
- }
- if (direction == kIncomingPacket)
- name << "(In) ";
- else
- name << "(Out) ";
- name << "SSRC " << ssrc;
- return name.str();
- }
-
- std::string GetLayerName(LayerDescription layer) const {
- char buffer[100];
- rtc::SimpleStringBuilder name(buffer);
- name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl "
- << layer.temporal_layer;
- return name.str();
- }
-
- void Alert_RtpLogTimeGap(PacketDirection direction,
- float time_seconds,
- int64_t duration) {
- if (direction == kIncomingPacket) {
- incoming_rtp_recv_time_gaps_.emplace_back(time_seconds, duration);
- } else {
- outgoing_rtp_send_time_gaps_.emplace_back(time_seconds, duration);
- }
- }
-
- void Alert_RtcpLogTimeGap(PacketDirection direction,
- float time_seconds,
- int64_t duration) {
- if (direction == kIncomingPacket) {
- incoming_rtcp_recv_time_gaps_.emplace_back(time_seconds, duration);
- } else {
- outgoing_rtcp_send_time_gaps_.emplace_back(time_seconds, duration);
- }
- }
-
- void Alert_SeqNumJump(PacketDirection direction,
- float time_seconds,
- uint32_t ssrc) {
- if (direction == kIncomingPacket) {
- incoming_seq_num_jumps_.emplace_back(time_seconds, ssrc);
- } else {
- outgoing_seq_num_jumps_.emplace_back(time_seconds, ssrc);
- }
- }
-
- void Alert_CaptureTimeJump(PacketDirection direction,
- float time_seconds,
- uint32_t ssrc) {
- if (direction == kIncomingPacket) {
- incoming_capture_time_jumps_.emplace_back(time_seconds, ssrc);
- } else {
- outgoing_capture_time_jumps_.emplace_back(time_seconds, ssrc);
- }
- }
-
- void Alert_OutgoingHighLoss(double avg_loss_fraction) {
- outgoing_high_loss_alerts_.emplace_back(avg_loss_fraction);
- }
-
std::string GetCandidatePairLogDescriptionFromId(uint32_t candidate_pair_id);
const ParsedRtcEventLog& parsed_log_;
@@ -285,20 +102,6 @@ class EventLogAnalyzer {
// If left empty, all SSRCs will be considered relevant.
std::vector<uint32_t> desired_ssrc_;
- // Stores the timestamps for all log segments, in the form of associated start
- // and end events.
- std::vector<std::pair<int64_t, int64_t>> log_segments_;
-
- std::vector<IncomingRtpReceiveTimeGap> incoming_rtp_recv_time_gaps_;
- std::vector<IncomingRtcpReceiveTimeGap> incoming_rtcp_recv_time_gaps_;
- std::vector<OutgoingRtpSendTimeGap> outgoing_rtp_send_time_gaps_;
- std::vector<OutgoingRtcpSendTimeGap> outgoing_rtcp_send_time_gaps_;
- std::vector<IncomingSeqNumJump> incoming_seq_num_jumps_;
- std::vector<IncomingCaptureTimeJump> incoming_capture_time_jumps_;
- std::vector<OutgoingSeqNoJump> outgoing_seq_num_jumps_;
- std::vector<OutgoingCaptureTimeJump> outgoing_capture_time_jumps_;
- std::vector<OutgoingHighLoss> outgoing_high_loss_alerts_;
-
std::map<uint32_t, std::string> candidate_pair_desc_by_id_;
AnalyzerConfig config_;
diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc b/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc
new file mode 100644
index 0000000000..3d3ce5a4ac
--- /dev/null
+++ b/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc
@@ -0,0 +1,83 @@
+
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
+
+namespace webrtc {
+
+bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc) {
+ if (direction == kIncomingPacket) {
+ return parsed_log.incoming_rtx_ssrcs().find(ssrc) !=
+ parsed_log.incoming_rtx_ssrcs().end();
+ } else {
+ return parsed_log.outgoing_rtx_ssrcs().find(ssrc) !=
+ parsed_log.outgoing_rtx_ssrcs().end();
+ }
+}
+
+bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc) {
+ if (direction == kIncomingPacket) {
+ return parsed_log.incoming_video_ssrcs().find(ssrc) !=
+ parsed_log.incoming_video_ssrcs().end();
+ } else {
+ return parsed_log.outgoing_video_ssrcs().find(ssrc) !=
+ parsed_log.outgoing_video_ssrcs().end();
+ }
+}
+
+bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc) {
+ if (direction == kIncomingPacket) {
+ return parsed_log.incoming_audio_ssrcs().find(ssrc) !=
+ parsed_log.incoming_audio_ssrcs().end();
+ } else {
+ return parsed_log.outgoing_audio_ssrcs().find(ssrc) !=
+ parsed_log.outgoing_audio_ssrcs().end();
+ }
+}
+
+std::string GetStreamName(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc) {
+ char buffer[200];
+ rtc::SimpleStringBuilder name(buffer);
+ if (IsAudioSsrc(parsed_log, direction, ssrc)) {
+ name << "Audio ";
+ } else if (IsVideoSsrc(parsed_log, direction, ssrc)) {
+ name << "Video ";
+ } else {
+ name << "Unknown ";
+ }
+ if (IsRtxSsrc(parsed_log, direction, ssrc)) {
+ name << "RTX ";
+ }
+ if (direction == kIncomingPacket)
+ name << "(In) ";
+ else
+ name << "(Out) ";
+ name << "SSRC " << ssrc;
+ return name.str();
+}
+
+std::string GetLayerName(LayerDescription layer) {
+ char buffer[100];
+ rtc::SimpleStringBuilder name(buffer);
+ name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl "
+ << layer.temporal_layer;
+ return name.str();
+}
+
+} // namespace webrtc
diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer_common.h b/rtc_tools/rtc_event_log_visualizer/analyzer_common.h
new file mode 100644
index 0000000000..d5776acf62
--- /dev/null
+++ b/rtc_tools/rtc_event_log_visualizer/analyzer_common.h
@@ -0,0 +1,182 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_
+#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_
+
+#include <cstdint>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/function_view.h"
+#include "logging/rtc_event_log/rtc_event_log_parser.h"
+#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
+
+namespace webrtc {
+
+constexpr int kNumMicrosecsPerSec = 1000000;
+constexpr float kLeftMargin = 0.01f;
+constexpr float kRightMargin = 0.02f;
+constexpr float kBottomMargin = 0.02f;
+constexpr float kTopMargin = 0.05f;
+
+class AnalyzerConfig {
+ public:
+ float GetCallTimeSec(int64_t timestamp_us) const {
+ int64_t offset = normalize_time_ ? begin_time_ : 0;
+ return static_cast<float>(timestamp_us - offset) / 1000000;
+ }
+
+ float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); }
+
+ float CallEndTimeSec() const { return GetCallTimeSec(end_time_); }
+
+ // Window and step size used for calculating moving averages, e.g. bitrate.
+ // The generated data points will be |step_| microseconds apart.
+ // Only events occurring at most |window_duration_| microseconds before the
+ // current data point will be part of the average.
+ int64_t window_duration_;
+ int64_t step_;
+
+ // First and last events of the log.
+ int64_t begin_time_;
+ int64_t end_time_;
+ bool normalize_time_;
+};
+
+struct LayerDescription {
+ LayerDescription(uint32_t ssrc, uint8_t spatial_layer, uint8_t temporal_layer)
+ : ssrc(ssrc),
+ spatial_layer(spatial_layer),
+ temporal_layer(temporal_layer) {}
+ bool operator<(const LayerDescription& other) const {
+ if (ssrc != other.ssrc)
+ return ssrc < other.ssrc;
+ if (spatial_layer != other.spatial_layer)
+ return spatial_layer < other.spatial_layer;
+ return temporal_layer < other.temporal_layer;
+ }
+ uint32_t ssrc;
+ uint8_t spatial_layer;
+ uint8_t temporal_layer;
+};
+
+bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc);
+bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc);
+bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc);
+
+std::string GetStreamName(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc);
+std::string GetLayerName(LayerDescription layer);
+
+// For each element in data_view, use |f()| to extract a y-coordinate and
+// store the result in a TimeSeries.
+template <typename DataType, typename IterableType>
+void ProcessPoints(rtc::FunctionView<float(const DataType&)> fx,
+ rtc::FunctionView<absl::optional<float>(const DataType&)> fy,
+ const IterableType& data_view,
+ TimeSeries* result) {
+ for (size_t i = 0; i < data_view.size(); i++) {
+ const DataType& elem = data_view[i];
+ float x = fx(elem);
+ absl::optional<float> y = fy(elem);
+ if (y)
+ result->points.emplace_back(x, *y);
+ }
+}
+
+// For each pair of adjacent elements in |data|, use |f()| to extract a
+// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
+// will be the time of the second element in the pair.
+template <typename DataType, typename ResultType, typename IterableType>
+void ProcessPairs(
+ rtc::FunctionView<float(const DataType&)> fx,
+ rtc::FunctionView<absl::optional<ResultType>(const DataType&,
+ const DataType&)> fy,
+ const IterableType& data,
+ TimeSeries* result) {
+ for (size_t i = 1; i < data.size(); i++) {
+ float x = fx(data[i]);
+ absl::optional<ResultType> y = fy(data[i - 1], data[i]);
+ if (y)
+ result->points.emplace_back(x, static_cast<float>(*y));
+ }
+}
+
+// For each pair of adjacent elements in |data|, use |f()| to extract a
+// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
+// will be the time of the second element in the pair.
+template <typename DataType, typename ResultType, typename IterableType>
+void AccumulatePairs(
+ rtc::FunctionView<float(const DataType&)> fx,
+ rtc::FunctionView<absl::optional<ResultType>(const DataType&,
+ const DataType&)> fy,
+ const IterableType& data,
+ TimeSeries* result) {
+ ResultType sum = 0;
+ for (size_t i = 1; i < data.size(); i++) {
+ float x = fx(data[i]);
+ absl::optional<ResultType> y = fy(data[i - 1], data[i]);
+ if (y) {
+ sum += *y;
+ result->points.emplace_back(x, static_cast<float>(sum));
+ }
+ }
+}
+
+// Calculates a moving average of |data| and stores the result in a TimeSeries.
+// A data point is generated every |step| microseconds from |begin_time|
+// to |end_time|. The value of each data point is the average of the data
+// during the preceding |window_duration_us| microseconds.
+template <typename DataType, typename ResultType, typename IterableType>
+void MovingAverage(
+ rtc::FunctionView<absl::optional<ResultType>(const DataType&)> fy,
+ const IterableType& data_view,
+ AnalyzerConfig config,
+ TimeSeries* result) {
+ size_t window_index_begin = 0;
+ size_t window_index_end = 0;
+ ResultType sum_in_window = 0;
+
+ for (int64_t t = config.begin_time_; t < config.end_time_ + config.step_;
+ t += config.step_) {
+ while (window_index_end < data_view.size() &&
+ data_view[window_index_end].log_time_us() < t) {
+ absl::optional<ResultType> value = fy(data_view[window_index_end]);
+ if (value)
+ sum_in_window += *value;
+ ++window_index_end;
+ }
+ while (window_index_begin < data_view.size() &&
+ data_view[window_index_begin].log_time_us() <
+ t - config.window_duration_) {
+ absl::optional<ResultType> value = fy(data_view[window_index_begin]);
+ if (value)
+ sum_in_window -= *value;
+ ++window_index_begin;
+ }
+ float window_duration_s =
+ static_cast<float>(config.window_duration_) / kNumMicrosecsPerSec;
+ float x = config.GetCallTimeSec(t);
+ float y = sum_in_window / window_duration_s;
+ result->points.emplace_back(x, y);
+ }
+}
+
+} // namespace webrtc
+
+#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_
diff --git a/rtc_tools/rtc_event_log_visualizer/main.cc b/rtc_tools/rtc_event_log_visualizer/main.cc
index eb36b2679e..2563338e1a 100644
--- a/rtc_tools/rtc_event_log_visualizer/main.cc
+++ b/rtc_tools/rtc_event_log_visualizer/main.cc
@@ -30,10 +30,10 @@
#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
+#include "rtc_tools/rtc_event_log_visualizer/alerts.h"
+#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h"
#include "rtc_tools/rtc_event_log_visualizer/analyzer.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
-#include "rtc_tools/rtc_event_log_visualizer/plot_protobuf.h"
-#include "rtc_tools/rtc_event_log_visualizer/plot_python.h"
#include "system_wrappers/include/field_trial.h"
#include "test/field_trial.h"
#include "test/testsupport/file_utils.h"
@@ -77,7 +77,7 @@ ABSL_FLAG(bool,
ABSL_FLAG(bool,
print_triage_alerts,
- false,
+ true,
"Print triage alerts, i.e. a list of potential problems.");
ABSL_FLAG(bool,
@@ -194,9 +194,9 @@ int main(int argc, char* argv[]) {
"A tool for visualizing WebRTC event logs.\n"
"Example usage:\n"
"./event_log_visualizer <logfile> | python\n");
- absl::FlagsUsageConfig config;
- config.contains_help_flags = &ContainsHelppackageFlags;
- absl::SetFlagsUsageConfig(config);
+ absl::FlagsUsageConfig flag_config;
+ flag_config.contains_help_flags = &ContainsHelppackageFlags;
+ absl::SetFlagsUsageConfig(flag_config);
std::vector<char*> args = absl::ParseCommandLine(argc, argv);
// Print RTC_LOG warnings and errors even in release builds.
@@ -261,16 +261,22 @@ int main(int argc, char* argv[]) {
}
}
- webrtc::EventLogAnalyzer analyzer(parsed_log,
- absl::GetFlag(FLAGS_normalize_time));
- std::unique_ptr<webrtc::PlotCollection> collection;
- if (absl::GetFlag(FLAGS_protobuf_output)) {
- collection.reset(new webrtc::ProtobufPlotCollection());
- } else {
- collection.reset(
- new webrtc::PythonPlotCollection(absl::GetFlag(FLAGS_shared_xaxis)));
+ webrtc::AnalyzerConfig config;
+ config.window_duration_ = 250000;
+ config.step_ = 10000;
+ config.normalize_time_ = absl::GetFlag(FLAGS_normalize_time);
+ config.begin_time_ = parsed_log.first_timestamp();
+ config.end_time_ = parsed_log.last_timestamp();
+ if (config.end_time_ < config.begin_time_) {
+ RTC_LOG(LS_WARNING) << "Log end time " << config.end_time_
+ << " not after begin time " << config.begin_time_
+ << ". Nothing to analyze. Is the log broken?";
+ return -1;
}
+ webrtc::EventLogAnalyzer analyzer(parsed_log, config);
+ webrtc::PlotCollection collection;
+
PlotMap plots;
plots.RegisterPlot("incoming_packet_sizes", [&](Plot* plot) {
analyzer.CreatePacketGraph(webrtc::kIncomingPacket, plot);
@@ -423,22 +429,22 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("pacer_delay",
[&](Plot* plot) { analyzer.CreatePacerDelayGraph(plot); });
plots.RegisterPlot("audio_encoder_bitrate", [&](Plot* plot) {
- analyzer.CreateAudioEncoderTargetBitrateGraph(plot);
+ CreateAudioEncoderTargetBitrateGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_frame_length", [&](Plot* plot) {
- analyzer.CreateAudioEncoderFrameLengthGraph(plot);
+ CreateAudioEncoderFrameLengthGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_packet_loss", [&](Plot* plot) {
- analyzer.CreateAudioEncoderPacketLossGraph(plot);
+ CreateAudioEncoderPacketLossGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_fec", [&](Plot* plot) {
- analyzer.CreateAudioEncoderEnableFecGraph(plot);
+ CreateAudioEncoderEnableFecGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_dtx", [&](Plot* plot) {
- analyzer.CreateAudioEncoderEnableDtxGraph(plot);
+ CreateAudioEncoderEnableDtxGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_num_channels", [&](Plot* plot) {
- analyzer.CreateAudioEncoderNumChannelsGraph(plot);
+ CreateAudioEncoderNumChannelsGraph(parsed_log, config, plot);
});
plots.RegisterPlot("ice_candidate_pair_config", [&](Plot* plot) {
@@ -461,14 +467,14 @@ int main(int argc, char* argv[]) {
wav_path = webrtc::test::ResourcePath(
"audio_processing/conversational_speech/EN_script2_F_sp2_B1", "wav");
}
- absl::optional<webrtc::EventLogAnalyzer::NetEqStatsGetterMap> neteq_stats;
+ absl::optional<webrtc::NetEqStatsGetterMap> neteq_stats;
plots.RegisterPlot("simulated_neteq_expand_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.expand_rate / 16384.f;
},
@@ -477,10 +483,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_speech_expand_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.speech_expand_rate / 16384.f;
},
@@ -489,10 +495,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_accelerate_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.accelerate_rate / 16384.f;
},
@@ -501,10 +507,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_preemptive_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.preemptive_rate / 16384.f;
},
@@ -513,10 +519,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_packet_loss_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.packet_loss_rate / 16384.f;
},
@@ -525,10 +531,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_concealment_events", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqLifetimeStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqLifetimeStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqLifetimeStatistics& stats) {
return static_cast<float>(stats.concealment_events);
},
@@ -537,10 +543,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_preferred_buffer_size", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.preferred_buffer_size_ms;
},
@@ -587,7 +593,7 @@ int main(int argc, char* argv[]) {
for (const auto& plot : plots) {
if (plot.enabled) {
- Plot* output = collection->AppendNewPlot();
+ Plot* output = collection.AppendNewPlot();
plot.plot_func(output);
output->SetId(plot.label);
}
@@ -601,21 +607,28 @@ int main(int argc, char* argv[]) {
if (absl::c_find(plot_flags, "simulated_neteq_jitter_buffer_delay") !=
plot_flags.end()) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- for (webrtc::EventLogAnalyzer::NetEqStatsGetterMap::const_iterator it =
- neteq_stats->cbegin();
+ for (webrtc::NetEqStatsGetterMap::const_iterator it = neteq_stats->cbegin();
it != neteq_stats->cend(); ++it) {
- analyzer.CreateAudioJitterBufferGraph(it->first, it->second.get(),
- collection->AppendNewPlot());
+ webrtc::CreateAudioJitterBufferGraph(parsed_log, config, it->first,
+ it->second.get(),
+ collection.AppendNewPlot());
}
}
- collection->Draw();
+ if (absl::GetFlag(FLAGS_protobuf_output)) {
+ webrtc::analytics::ChartCollection proto_charts;
+ collection.ExportProtobuf(&proto_charts);
+ std::cout << proto_charts.SerializeAsString();
+ } else {
+ collection.PrintPythonCode(absl::GetFlag(FLAGS_shared_xaxis));
+ }
if (absl::GetFlag(FLAGS_print_triage_alerts)) {
- analyzer.CreateTriageNotifications();
- analyzer.PrintNotifications(stderr);
+ webrtc::TriageHelper triage_alerts(config);
+ triage_alerts.AnalyzeLog(parsed_log);
+ triage_alerts.Print(stderr);
}
return 0;
diff --git a/rtc_tools/rtc_event_log_visualizer/plot_base.cc b/rtc_tools/rtc_event_log_visualizer/plot_base.cc
index dfcd26fed5..dce601a832 100644
--- a/rtc_tools/rtc_event_log_visualizer/plot_base.cc
+++ b/rtc_tools/rtc_event_log_visualizer/plot_base.cc
@@ -11,6 +11,7 @@
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
#include <algorithm>
+#include <memory>
#include "rtc_base/checks.h"
@@ -93,4 +94,232 @@ void Plot::AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series) {
}
}
+void Plot::PrintPythonCode() const {
+ // Write python commands to stdout. Intended program usage is
+ // ./event_log_visualizer event_log160330.dump | python
+
+ if (!series_list_.empty()) {
+ printf("color_count = %zu\n", series_list_.size());
+ printf(
+ "hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i "
+ "in range(color_count)]\n");
+ printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n");
+
+ for (size_t i = 0; i < series_list_.size(); i++) {
+ printf("\n# === Series: %s ===\n", series_list_[i].label.c_str());
+ // List x coordinates
+ printf("x%zu = [", i);
+ if (!series_list_[i].points.empty())
+ printf("%.3f", series_list_[i].points[0].x);
+ for (size_t j = 1; j < series_list_[i].points.size(); j++)
+ printf(", %.3f", series_list_[i].points[j].x);
+ printf("]\n");
+
+ // List y coordinates
+ printf("y%zu = [", i);
+ if (!series_list_[i].points.empty())
+ printf("%G", series_list_[i].points[0].y);
+ for (size_t j = 1; j < series_list_[i].points.size(); j++)
+ printf(", %G", series_list_[i].points[j].y);
+ printf("]\n");
+
+ if (series_list_[i].line_style == LineStyle::kBar) {
+ // There is a plt.bar function that draws bar plots,
+ // but it is *way* too slow to be useful.
+ printf(
+ "plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: "
+ "max(t,0), y%zu), color=colors[%zu], "
+ "label=\'%s\')\n",
+ i, i, i, i, series_list_[i].label.c_str());
+ if (series_list_[i].point_style == PointStyle::kHighlight) {
+ printf(
+ "plt.plot(x%zu, y%zu, color=colors[%zu], "
+ "marker='.', ls=' ')\n",
+ i, i, i);
+ }
+ } else if (series_list_[i].line_style == LineStyle::kLine) {
+ if (series_list_[i].point_style == PointStyle::kHighlight) {
+ printf(
+ "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
+ "marker='.')\n",
+ i, i, i, series_list_[i].label.c_str());
+ } else {
+ printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i,
+ i, i, series_list_[i].label.c_str());
+ }
+ } else if (series_list_[i].line_style == LineStyle::kStep) {
+ // Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on
+ // to illustrate the "steps". This can be expressed by duplicating all
+ // elements except the first in x and the last in y.
+ printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i);
+ printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i);
+ printf(
+ "plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], "
+ "label=\'%s\')\n",
+ i, i, i, series_list_[i].label.c_str());
+ if (series_list_[i].point_style == PointStyle::kHighlight) {
+ printf(
+ "plt.plot(x%zu, y%zu, color=colors[%zu], "
+ "marker='.', ls=' ')\n",
+ i, i, i);
+ }
+ } else if (series_list_[i].line_style == LineStyle::kNone) {
+ printf(
+ "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
+ "marker='o', ls=' ')\n",
+ i, i, i, series_list_[i].label.c_str());
+ } else {
+ printf("raise Exception(\"Unknown graph type\")\n");
+ }
+ }
+
+ // IntervalSeries
+ printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n");
+ RTC_CHECK_LE(interval_list_.size(), 4);
+ // To get the intervals to show up in the legend we have to create patches
+ // for them.
+ printf("legend_patches = []\n");
+ for (size_t i = 0; i < interval_list_.size(); i++) {
+ // List intervals
+ printf("\n# === IntervalSeries: %s ===\n",
+ interval_list_[i].label.c_str());
+ printf("ival%zu = [", i);
+ if (!interval_list_[i].intervals.empty()) {
+ printf("(%G, %G)", interval_list_[i].intervals[0].begin,
+ interval_list_[i].intervals[0].end);
+ }
+ for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) {
+ printf(", (%G, %G)", interval_list_[i].intervals[j].begin,
+ interval_list_[i].intervals[j].end);
+ }
+ printf("]\n");
+
+ printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size());
+ if (interval_list_[i].orientation == IntervalSeries::kVertical) {
+ printf(
+ " plt.axhspan(ival%zu[i][0], ival%zu[i][1], "
+ "facecolor=interval_colors[%zu], "
+ "alpha=0.3)\n",
+ i, i, i);
+ } else {
+ printf(
+ " plt.axvspan(ival%zu[i][0], ival%zu[i][1], "
+ "facecolor=interval_colors[%zu], "
+ "alpha=0.3)\n",
+ i, i, i);
+ }
+ printf(
+ "legend_patches.append(mpatches.Patch(ec=\'black\', "
+ "fc=interval_colors[%zu], label='%s'))\n",
+ i, interval_list_[i].label.c_str());
+ }
+ }
+
+ printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_);
+ printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_);
+ printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str());
+ printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str());
+ printf("plt.title(\'%s\')\n", title_.c_str());
+ printf("fig = plt.gcf()\n");
+ printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str());
+ if (!yaxis_tick_labels_.empty()) {
+ printf("yaxis_tick_labels = [");
+ for (const auto& kv : yaxis_tick_labels_) {
+ printf("(%f,\"%s\"),", kv.first, kv.second.c_str());
+ }
+ printf("]\n");
+ printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n");
+ printf("plt.yticks(*yaxis_tick_labels)\n");
+ }
+ if (!series_list_.empty() || !interval_list_.empty()) {
+ printf("handles, labels = plt.gca().get_legend_handles_labels()\n");
+ printf("for lp in legend_patches:\n");
+ printf(" handles.append(lp)\n");
+ printf(" labels.append(lp.get_label())\n");
+ printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n");
+ }
+}
+
+void Plot::ExportProtobuf(webrtc::analytics::Chart* chart) const {
+ for (size_t i = 0; i < series_list_.size(); i++) {
+ webrtc::analytics::DataSet* data_set = chart->add_data_sets();
+ for (const auto& point : series_list_[i].points) {
+ data_set->add_x_values(point.x);
+ }
+ for (const auto& point : series_list_[i].points) {
+ data_set->add_y_values(point.y);
+ }
+
+ if (series_list_[i].line_style == LineStyle::kBar) {
+ data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART);
+ } else if (series_list_[i].line_style == LineStyle::kLine) {
+ data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART);
+ } else if (series_list_[i].line_style == LineStyle::kStep) {
+ data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART);
+ } else if (series_list_[i].line_style == LineStyle::kNone) {
+ data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART);
+ } else {
+ data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED);
+ }
+
+ if (series_list_[i].point_style == PointStyle::kHighlight)
+ data_set->set_highlight_points(true);
+
+ data_set->set_label(series_list_[i].label);
+ }
+
+ chart->set_xaxis_min(xaxis_min_);
+ chart->set_xaxis_max(xaxis_max_);
+ chart->set_yaxis_min(yaxis_min_);
+ chart->set_yaxis_max(yaxis_max_);
+ chart->set_xaxis_label(xaxis_label_);
+ chart->set_yaxis_label(yaxis_label_);
+ chart->set_title(title_);
+ chart->set_id(id_);
+
+ for (const auto& kv : yaxis_tick_labels_) {
+ webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels();
+ tick->set_value(kv.first);
+ tick->set_label(kv.second);
+ }
+}
+
+void PlotCollection::PrintPythonCode(bool shared_xaxis) const {
+ printf("import matplotlib.pyplot as plt\n");
+ printf("plt.rcParams.update({'figure.max_open_warning': 0})\n");
+ printf("import matplotlib.patches as mpatches\n");
+ printf("import matplotlib.patheffects as pe\n");
+ printf("import colorsys\n");
+ for (size_t i = 0; i < plots_.size(); i++) {
+ printf("plt.figure(%zu)\n", i);
+ if (shared_xaxis) {
+ // Link x-axes across all figures for synchronized zooming.
+ if (i == 0) {
+ printf("axis0 = plt.subplot(111)\n");
+ } else {
+ printf("plt.subplot(111, sharex=axis0)\n");
+ }
+ }
+ plots_[i]->PrintPythonCode();
+ }
+ printf("plt.show()\n");
+}
+
+void PlotCollection::ExportProtobuf(
+ webrtc::analytics::ChartCollection* collection) const {
+ for (const auto& plot : plots_) {
+ // TODO(terelius): Ensure that there is no way to insert plots other than
+ // ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast
+ // here.
+ webrtc::analytics::Chart* protobuf_representation =
+ collection->add_charts();
+ plot->ExportProtobuf(protobuf_representation);
+ }
+}
+
+Plot* PlotCollection::AppendNewPlot() {
+ plots_.push_back(std::make_unique<Plot>());
+ return plots_.back().get();
+}
+
} // namespace webrtc
diff --git a/rtc_tools/rtc_event_log_visualizer/plot_base.h b/rtc_tools/rtc_event_log_visualizer/plot_base.h
index 5e4ebfa522..06a206f031 100644
--- a/rtc_tools/rtc_event_log_visualizer/plot_base.h
+++ b/rtc_tools/rtc_event_log_visualizer/plot_base.h
@@ -15,6 +15,13 @@
#include <utility>
#include <vector>
+#include "rtc_base/deprecation.h"
+#include "rtc_base/ignore_wundef.h"
+
+RTC_PUSH_IGNORING_WUNDEF()
+#include "rtc_tools/rtc_event_log_visualizer/proto/chart.pb.h"
+RTC_POP_IGNORING_WUNDEF()
+
namespace webrtc {
enum class LineStyle {
@@ -94,8 +101,8 @@ class Plot {
public:
virtual ~Plot() {}
- // Overloaded to draw the plot.
- virtual void Draw() = 0;
+ // Deprecated. Use PrintPythonCode() or ExportProtobuf() instead.
+ RTC_DEPRECATED virtual void Draw() {}
// Sets the lower x-axis limit to min_value (if left_margin == 0).
// Sets the upper x-axis limit to max_value (if right_margin == 0).
@@ -158,6 +165,12 @@ class Plot {
// Otherwise, the call has no effect and the timeseries is destroyed.
void AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series);
+ // Replaces PythonPlot::Draw()
+ void PrintPythonCode() const;
+
+ // Replaces ProtobufPlot::Draw()
+ void ExportProtobuf(webrtc::analytics::Chart* chart) const;
+
protected:
float xaxis_min_;
float xaxis_max_;
@@ -175,8 +188,17 @@ class Plot {
class PlotCollection {
public:
virtual ~PlotCollection() {}
- virtual void Draw() = 0;
- virtual Plot* AppendNewPlot() = 0;
+
+ // Deprecated. Use PrintPythonCode() or ExportProtobuf() instead.
+ RTC_DEPRECATED virtual void Draw() {}
+
+ virtual Plot* AppendNewPlot();
+
+ // Replaces PythonPlotCollection::Draw()
+ void PrintPythonCode(bool shared_xaxis) const;
+
+ // Replaces ProtobufPlotCollections::Draw()
+ void ExportProtobuf(webrtc::analytics::ChartCollection* collection) const;
protected:
std::vector<std::unique_ptr<Plot>> plots_;
diff --git a/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc b/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc
index 9e82c01ba6..0f43191e8b 100644
--- a/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc
+++ b/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc
@@ -24,49 +24,7 @@ ProtobufPlot::~ProtobufPlot() {}
void ProtobufPlot::Draw() {}
-void ProtobufPlot::ExportProtobuf(webrtc::analytics::Chart* chart) {
- for (size_t i = 0; i < series_list_.size(); i++) {
- webrtc::analytics::DataSet* data_set = chart->add_data_sets();
- for (const auto& point : series_list_[i].points) {
- data_set->add_x_values(point.x);
- }
- for (const auto& point : series_list_[i].points) {
- data_set->add_y_values(point.y);
- }
- if (series_list_[i].line_style == LineStyle::kBar) {
- data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART);
- } else if (series_list_[i].line_style == LineStyle::kLine) {
- data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART);
- } else if (series_list_[i].line_style == LineStyle::kStep) {
- data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART);
- } else if (series_list_[i].line_style == LineStyle::kNone) {
- data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART);
- } else {
- data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED);
- }
-
- if (series_list_[i].point_style == PointStyle::kHighlight)
- data_set->set_highlight_points(true);
-
- data_set->set_label(series_list_[i].label);
- }
-
- chart->set_xaxis_min(xaxis_min_);
- chart->set_xaxis_max(xaxis_max_);
- chart->set_yaxis_min(yaxis_min_);
- chart->set_yaxis_max(yaxis_max_);
- chart->set_xaxis_label(xaxis_label_);
- chart->set_yaxis_label(yaxis_label_);
- chart->set_title(title_);
- chart->set_id(id_);
-
- for (const auto& kv : yaxis_tick_labels_) {
- webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels();
- tick->set_value(kv.first);
- tick->set_label(kv.second);
- }
-}
ProtobufPlotCollection::ProtobufPlotCollection() {}
@@ -78,19 +36,6 @@ void ProtobufPlotCollection::Draw() {
std::cout << collection.SerializeAsString();
}
-void ProtobufPlotCollection::ExportProtobuf(
- webrtc::analytics::ChartCollection* collection) {
- for (const auto& plot : plots_) {
- // TODO(terelius): Ensure that there is no way to insert plots other than
- // ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast
- // here.
- webrtc::analytics::Chart* protobuf_representation =
- collection->add_charts();
- static_cast<ProtobufPlot*>(plot.get())
- ->ExportProtobuf(protobuf_representation);
- }
-}
-
Plot* ProtobufPlotCollection::AppendNewPlot() {
Plot* plot = new ProtobufPlot();
plots_.push_back(std::unique_ptr<Plot>(plot));
diff --git a/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h b/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h
index 738247a309..0773b58d20 100644
--- a/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h
+++ b/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h
@@ -23,16 +23,15 @@ class ProtobufPlot final : public Plot {
ProtobufPlot();
~ProtobufPlot() override;
void Draw() override;
- void ExportProtobuf(webrtc::analytics::Chart* chart);
};
class ProtobufPlotCollection final : public PlotCollection {
public:
- ProtobufPlotCollection();
+ // This class is deprecated. Use PlotCollection and ExportProtobuf() instead.
+ RTC_DEPRECATED ProtobufPlotCollection();
~ProtobufPlotCollection() override;
void Draw() override;
Plot* AppendNewPlot() override;
- void ExportProtobuf(webrtc::analytics::ChartCollection* collection);
};
} // namespace webrtc
diff --git a/rtc_tools/rtc_event_log_visualizer/plot_python.cc b/rtc_tools/rtc_event_log_visualizer/plot_python.cc
index e7cde45f30..b3708110df 100644
--- a/rtc_tools/rtc_event_log_visualizer/plot_python.cc
+++ b/rtc_tools/rtc_event_log_visualizer/plot_python.cc
@@ -25,149 +25,7 @@ PythonPlot::PythonPlot() {}
PythonPlot::~PythonPlot() {}
void PythonPlot::Draw() {
- // Write python commands to stdout. Intended program usage is
- // ./event_log_visualizer event_log160330.dump | python
-
- if (!series_list_.empty()) {
- printf("color_count = %zu\n", series_list_.size());
- printf(
- "hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i "
- "in range(color_count)]\n");
- printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n");
-
- for (size_t i = 0; i < series_list_.size(); i++) {
- printf("\n# === Series: %s ===\n", series_list_[i].label.c_str());
- // List x coordinates
- printf("x%zu = [", i);
- if (!series_list_[i].points.empty())
- printf("%.3f", series_list_[i].points[0].x);
- for (size_t j = 1; j < series_list_[i].points.size(); j++)
- printf(", %.3f", series_list_[i].points[j].x);
- printf("]\n");
-
- // List y coordinates
- printf("y%zu = [", i);
- if (!series_list_[i].points.empty())
- printf("%G", series_list_[i].points[0].y);
- for (size_t j = 1; j < series_list_[i].points.size(); j++)
- printf(", %G", series_list_[i].points[j].y);
- printf("]\n");
-
- if (series_list_[i].line_style == LineStyle::kBar) {
- // There is a plt.bar function that draws bar plots,
- // but it is *way* too slow to be useful.
- printf(
- "plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: "
- "max(t,0), y%zu), color=colors[%zu], "
- "label=\'%s\')\n",
- i, i, i, i, series_list_[i].label.c_str());
- if (series_list_[i].point_style == PointStyle::kHighlight) {
- printf(
- "plt.plot(x%zu, y%zu, color=colors[%zu], "
- "marker='.', ls=' ')\n",
- i, i, i);
- }
- } else if (series_list_[i].line_style == LineStyle::kLine) {
- if (series_list_[i].point_style == PointStyle::kHighlight) {
- printf(
- "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
- "marker='.')\n",
- i, i, i, series_list_[i].label.c_str());
- } else {
- printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i,
- i, i, series_list_[i].label.c_str());
- }
- } else if (series_list_[i].line_style == LineStyle::kStep) {
- // Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on
- // to illustrate the "steps". This can be expressed by duplicating all
- // elements except the first in x and the last in y.
- printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i);
- printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i);
- printf(
- "plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], "
- "label=\'%s\')\n",
- i, i, i, series_list_[i].label.c_str());
- if (series_list_[i].point_style == PointStyle::kHighlight) {
- printf(
- "plt.plot(x%zu, y%zu, color=colors[%zu], "
- "marker='.', ls=' ')\n",
- i, i, i);
- }
- } else if (series_list_[i].line_style == LineStyle::kNone) {
- printf(
- "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
- "marker='o', ls=' ')\n",
- i, i, i, series_list_[i].label.c_str());
- } else {
- printf("raise Exception(\"Unknown graph type\")\n");
- }
- }
-
- // IntervalSeries
- printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n");
- RTC_CHECK_LE(interval_list_.size(), 4);
- // To get the intervals to show up in the legend we have to create patches
- // for them.
- printf("legend_patches = []\n");
- for (size_t i = 0; i < interval_list_.size(); i++) {
- // List intervals
- printf("\n# === IntervalSeries: %s ===\n",
- interval_list_[i].label.c_str());
- printf("ival%zu = [", i);
- if (!interval_list_[i].intervals.empty()) {
- printf("(%G, %G)", interval_list_[i].intervals[0].begin,
- interval_list_[i].intervals[0].end);
- }
- for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) {
- printf(", (%G, %G)", interval_list_[i].intervals[j].begin,
- interval_list_[i].intervals[j].end);
- }
- printf("]\n");
-
- printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size());
- if (interval_list_[i].orientation == IntervalSeries::kVertical) {
- printf(
- " plt.axhspan(ival%zu[i][0], ival%zu[i][1], "
- "facecolor=interval_colors[%zu], "
- "alpha=0.3)\n",
- i, i, i);
- } else {
- printf(
- " plt.axvspan(ival%zu[i][0], ival%zu[i][1], "
- "facecolor=interval_colors[%zu], "
- "alpha=0.3)\n",
- i, i, i);
- }
- printf(
- "legend_patches.append(mpatches.Patch(ec=\'black\', "
- "fc=interval_colors[%zu], label='%s'))\n",
- i, interval_list_[i].label.c_str());
- }
- }
-
- printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_);
- printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_);
- printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str());
- printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str());
- printf("plt.title(\'%s\')\n", title_.c_str());
- printf("fig = plt.gcf()\n");
- printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str());
- if (!yaxis_tick_labels_.empty()) {
- printf("yaxis_tick_labels = [");
- for (const auto& kv : yaxis_tick_labels_) {
- printf("(%f,\"%s\"),", kv.first, kv.second.c_str());
- }
- printf("]\n");
- printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n");
- printf("plt.yticks(*yaxis_tick_labels)\n");
- }
- if (!series_list_.empty() || !interval_list_.empty()) {
- printf("handles, labels = plt.gca().get_legend_handles_labels()\n");
- printf("for lp in legend_patches:\n");
- printf(" handles.append(lp)\n");
- printf(" labels.append(lp.get_label())\n");
- printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n");
- }
+ PrintPythonCode();
}
PythonPlotCollection::PythonPlotCollection(bool shared_xaxis)
@@ -176,24 +34,7 @@ PythonPlotCollection::PythonPlotCollection(bool shared_xaxis)
PythonPlotCollection::~PythonPlotCollection() {}
void PythonPlotCollection::Draw() {
- printf("import matplotlib.pyplot as plt\n");
- printf("plt.rcParams.update({'figure.max_open_warning': 0})\n");
- printf("import matplotlib.patches as mpatches\n");
- printf("import matplotlib.patheffects as pe\n");
- printf("import colorsys\n");
- for (size_t i = 0; i < plots_.size(); i++) {
- printf("plt.figure(%zu)\n", i);
- if (shared_xaxis_) {
- // Link x-axes across all figures for synchronized zooming.
- if (i == 0) {
- printf("axis0 = plt.subplot(111)\n");
- } else {
- printf("plt.subplot(111, sharex=axis0)\n");
- }
- }
- plots_[i]->Draw();
- }
- printf("plt.show()\n");
+ PrintPythonCode(shared_xaxis_);
}
Plot* PythonPlotCollection::AppendNewPlot() {
diff --git a/rtc_tools/rtc_event_log_visualizer/plot_python.h b/rtc_tools/rtc_event_log_visualizer/plot_python.h
index dcdcf23fcf..998ed7b221 100644
--- a/rtc_tools/rtc_event_log_visualizer/plot_python.h
+++ b/rtc_tools/rtc_event_log_visualizer/plot_python.h
@@ -23,7 +23,8 @@ class PythonPlot final : public Plot {
class PythonPlotCollection final : public PlotCollection {
public:
- explicit PythonPlotCollection(bool shared_xaxis = false);
+ // This class is deprecated. Use PlotCollection and PrintPythonCode() instead.
+ RTC_DEPRECATED explicit PythonPlotCollection(bool shared_xaxis = false);
~PythonPlotCollection() override;
void Draw() override;
Plot* AppendNewPlot() override;
diff --git a/rtc_tools/rtc_event_log_visualizer/triage_notifications.h b/rtc_tools/rtc_event_log_visualizer/triage_notifications.h
deleted file mode 100644
index 23b31ece42..0000000000
--- a/rtc_tools/rtc_event_log_visualizer/triage_notifications.h
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_
-#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_
-
-#include <string>
-
-namespace webrtc {
-
-class IncomingRtpReceiveTimeGap {
- public:
- IncomingRtpReceiveTimeGap(float time_seconds, int64_t duration)
- : time_seconds_(time_seconds), duration_(duration) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("No RTP packets received for ") +
- std::to_string(duration_) + std::string(" ms");
- }
-
- private:
- float time_seconds_;
- int64_t duration_;
-};
-
-class IncomingRtcpReceiveTimeGap {
- public:
- IncomingRtcpReceiveTimeGap(float time_seconds, int64_t duration)
- : time_seconds_(time_seconds), duration_(duration) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("No RTCP packets received for ") +
- std::to_string(duration_) + std::string(" ms");
- }
-
- private:
- float time_seconds_;
- int64_t duration_;
-};
-
-class OutgoingRtpSendTimeGap {
- public:
- OutgoingRtpSendTimeGap(float time_seconds, int64_t duration)
- : time_seconds_(time_seconds), duration_(duration) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("No RTP packets sent for ") + std::to_string(duration_) +
- std::string(" ms");
- }
-
- private:
- float time_seconds_;
- int64_t duration_;
-};
-
-class OutgoingRtcpSendTimeGap {
- public:
- OutgoingRtcpSendTimeGap(float time_seconds, int64_t duration)
- : time_seconds_(time_seconds), duration_(duration) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("No RTCP packets sent for ") +
- std::to_string(duration_) + std::string(" ms");
- }
-
- private:
- float time_seconds_;
- int64_t duration_;
-};
-
-class IncomingSeqNumJump {
- public:
- IncomingSeqNumJump(float time_seconds, uint32_t ssrc)
- : time_seconds_(time_seconds), ssrc_(ssrc) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("Sequence number jumps on incoming SSRC ") +
- std::to_string(ssrc_);
- }
-
- private:
- float time_seconds_;
-
- uint32_t ssrc_;
-};
-
-class IncomingCaptureTimeJump {
- public:
- IncomingCaptureTimeJump(float time_seconds, uint32_t ssrc)
- : time_seconds_(time_seconds), ssrc_(ssrc) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("Capture timestamp jumps on incoming SSRC ") +
- std::to_string(ssrc_);
- }
-
- private:
- float time_seconds_;
-
- uint32_t ssrc_;
-};
-
-class OutgoingSeqNoJump {
- public:
- OutgoingSeqNoJump(float time_seconds, uint32_t ssrc)
- : time_seconds_(time_seconds), ssrc_(ssrc) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("Sequence number jumps on outgoing SSRC ") +
- std::to_string(ssrc_);
- }
-
- private:
- float time_seconds_;
-
- uint32_t ssrc_;
-};
-
-class OutgoingCaptureTimeJump {
- public:
- OutgoingCaptureTimeJump(float time_seconds, uint32_t ssrc)
- : time_seconds_(time_seconds), ssrc_(ssrc) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("Capture timestamp jumps on outgoing SSRC ") +
- std::to_string(ssrc_);
- }
-
- private:
- float time_seconds_;
-
- uint32_t ssrc_;
-};
-
-class OutgoingHighLoss {
- public:
- explicit OutgoingHighLoss(double avg_loss_fraction)
- : avg_loss_fraction_(avg_loss_fraction) {}
- std::string ToString() const {
- return std::string("High average loss (") +
- std::to_string(avg_loss_fraction_ * 100) +
- std::string("%) across the call.");
- }
-
- private:
- double avg_loss_fraction_;
-};
-
-} // namespace webrtc
-
-#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_
diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn
index 1b313b3d87..6092d96e02 100644
--- a/sdk/BUILD.gn
+++ b/sdk/BUILD.gn
@@ -35,8 +35,8 @@ rtc_library("media_constraints") {
deps = [
"../api:audio_options_api",
"../api:libjingle_peerconnection_api",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("sdk_tests") {
@@ -154,7 +154,7 @@ if (is_ios || is_mac) {
"../rtc_base:checks",
]
- libs = [
+ frameworks = [
"AVFoundation.framework",
"CoreMedia.framework",
]
@@ -272,10 +272,10 @@ if (is_ios || is_mac) {
"../rtc_base:checks",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/base:core_headers",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
- libs = [ "AudioToolbox.framework" ]
+ frameworks = [ "AudioToolbox.framework" ]
}
# This target exists to expose :audio_session_objc and
@@ -327,7 +327,7 @@ if (is_ios || is_mac) {
public_configs = [ ":common_config_objc" ]
- libs = [ "AVFoundation.framework" ]
+ frameworks = [ "AVFoundation.framework" ]
deps = [
":base_objc",
@@ -394,7 +394,7 @@ if (is_ios || is_mac) {
"..:common_objc",
":used_from_extension",
]
- libs = [
+ frameworks = [
"VideoToolbox.framework",
"CoreGraphics.framework",
"CoreVideo.framework",
@@ -412,19 +412,19 @@ if (is_ios || is_mac) {
"objc/components/renderer/opengl/RTCShader.mm",
"objc/components/renderer/opengl/RTCVideoViewShading.h",
]
- libs = [ "CoreVideo.framework" ]
+ frameworks = [ "CoreVideo.framework" ]
if (is_ios) {
sources += [
"objc/components/renderer/opengl/RTCNV12TextureCache.h",
"objc/components/renderer/opengl/RTCNV12TextureCache.m",
]
- libs += [
+ frameworks += [
"GLKit.framework",
"OpenGLES.framework",
"QuartzCore.framework",
]
} else if (is_mac) {
- libs += [
+ frameworks += [
"CoreMedia.framework",
"OpenGL.framework",
]
@@ -444,8 +444,8 @@ if (is_ios || is_mac) {
"../media:rtc_media_base",
"../rtc_base",
"../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
configs += [
"..:common_objc",
@@ -504,7 +504,7 @@ if (is_ios || is_mac) {
"objc/components/renderer/metal/RTCMTLVideoView.m",
]
}
- libs = [
+ frameworks = [
"CoreVideo.framework",
"Metal.framework",
"MetalKit.framework",
@@ -514,7 +514,7 @@ if (is_ios || is_mac) {
"objc/components/renderer/metal/RTCMTLNSVideoView.h",
"objc/components/renderer/metal/RTCMTLNSVideoView.m",
]
- libs += [ "AppKit.framework" ]
+ frameworks += [ "AppKit.framework" ]
}
deps = [
":base_objc",
@@ -559,7 +559,7 @@ if (is_ios || is_mac) {
"objc/components/capturer/RTCFileVideoCapturer.m",
]
}
- libs = [
+ frameworks = [
"AVFoundation.framework",
"CoreVideo.framework",
]
@@ -946,7 +946,6 @@ if (is_ios || is_mac) {
"../api/crypto:frame_encryptor_interface",
"../api/rtc_event_log:rtc_event_log_factory",
"../api/task_queue:default_task_queue_factory",
- "../api/transport/media:media_transport_interface",
"../api/video:video_frame",
"../api/video:video_rtp_headers",
"../api/video_codecs:video_codecs_api",
@@ -1217,7 +1216,6 @@ if (is_ios || is_mac) {
"../api/audio_codecs:audio_codecs_api",
"../api/audio_codecs:builtin_audio_decoder_factory",
"../api/audio_codecs:builtin_audio_encoder_factory",
- "../api/transport/media:media_transport_interface",
"../api/video_codecs:video_codecs_api",
"../media:rtc_media_base",
"../modules:module_api",
@@ -1366,7 +1364,7 @@ if (is_ios || is_mac) {
]
}
- libs = [
+ frameworks = [
"AVFoundation.framework",
"CoreGraphics.framework",
"CoreMedia.framework",
@@ -1499,7 +1497,7 @@ if (is_ios || is_mac) {
]
}
- libs = [
+ frameworks = [
"AVFoundation.framework",
"CoreGraphics.framework",
"CoreMedia.framework",
@@ -1572,8 +1570,8 @@ if (is_ios || is_mac) {
"../api/video_codecs:video_codecs_api",
"../common_video",
"../rtc_base",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("native_video") {
@@ -1676,7 +1674,7 @@ if (is_ios || is_mac) {
"//third_party/libyuv",
]
- libs = [
+ frameworks = [
"CoreFoundation.framework",
"CoreMedia.framework",
"CoreVideo.framework",
diff --git a/sdk/android/AndroidManifest.xml b/sdk/android/AndroidManifest.xml
index 5afbd7bfe3..417f45fc5e 100644
--- a/sdk/android/AndroidManifest.xml
+++ b/sdk/android/AndroidManifest.xml
@@ -10,5 +10,5 @@
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.webrtc">
- <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="23" />
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="23" />
</manifest>
diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn
index 9ecf0afb34..4506cbdbf3 100644
--- a/sdk/android/BUILD.gn
+++ b/sdk/android/BUILD.gn
@@ -272,7 +272,6 @@ if (is_android) {
"api/org/webrtc/MediaSource.java",
"api/org/webrtc/MediaStream.java",
"api/org/webrtc/MediaStreamTrack.java",
- "api/org/webrtc/MediaTransportFactoryFactory.java",
"api/org/webrtc/NativeLibraryLoader.java",
"api/org/webrtc/NativePeerConnectionFactory.java",
"api/org/webrtc/NetEqFactoryFactory.java",
@@ -322,10 +321,11 @@ if (is_android) {
":video_java",
"//modules/audio_device:audio_device_java",
"//rtc_base:base_java",
+ "//third_party/android_deps:androidx_annotation_annotation_java",
"//third_party/android_deps:com_android_support_support_annotations_java",
]
srcjar_deps = [
- "//api:rtp_parameters_enums",
+ "//api:priority_enums",
"//api/video:video_frame_enums",
]
}
@@ -468,6 +468,7 @@ if (is_android) {
]
deps = [
+ ":base_java",
":video_api_java",
":video_java",
"//rtc_base:base_java",
@@ -557,8 +558,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_jni") {
@@ -653,10 +654,11 @@ if (current_os == "linux" || is_android) {
"../../rtc_base",
"../../rtc_base:checks",
"../../rtc_base:rtc_task_queue",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/task_utils:to_queued_task",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("peerconnection_jni") {
@@ -744,6 +746,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:rtc_task_queue",
"../../rtc_base/system:thread_registry",
"../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -836,8 +840,8 @@ if (current_os == "linux" || is_android) {
"//api:array_view",
"//rtc_base:checks",
"//rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("native_api_base") {
@@ -931,8 +935,9 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:criticalsection",
"../../rtc_base:logging",
"../../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/base:core_headers",
+ "../../rtc_base/synchronization:mutex",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
}
# API for creating C++ wrapper implementations of api/mediastreaminterface.h
@@ -1041,8 +1046,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("java_audio_device_module") {
@@ -1064,8 +1069,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_enable_android_aaudio) {
@@ -1091,8 +1096,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
@@ -1117,8 +1122,8 @@ if (current_os == "linux" || is_android) {
"../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
#########################
@@ -1304,7 +1309,7 @@ if (is_android) {
rtc_instrumentation_test_apk("android_instrumentation_test_apk") {
apk_name = "android_instrumentation_test_apk"
android_manifest = "instrumentationtests/AndroidManifest.xml"
- min_sdk_version = 16
+ min_sdk_version = 21
target_sdk_version = 21
sources = [
@@ -1342,17 +1347,29 @@ if (is_android) {
data = [ "../../sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m" ]
deps = [
+ ":audio_api_java",
":base_java",
+ ":builtin_audio_codecs_java",
+ ":camera_java",
":default_video_codec_factory_java",
+ ":filevideo_java",
+ ":hwcodecs_java",
":libjingle_peerconnection_java",
":libjingle_peerconnection_metrics_default_java",
+ ":peerconnection_java",
+ ":surfaceviewrenderer_java",
+ ":swcodecs_java",
":video_api_java",
":video_java",
"//base:base_java_test_support",
"//rtc_base:base_java",
+ "//third_party/android_deps:com_android_support_support_annotations_java",
"//third_party/android_support_test_runner:rules_java",
"//third_party/android_support_test_runner:runner_java",
"//third_party/google-truth:google_truth_java",
+ "//third_party/guava:guava_android_java",
+ "//third_party/hamcrest:hamcrest_java",
+ "//third_party/hamcrest:hamcrest_library_java",
"//third_party/junit",
"//third_party/mockito:mockito_java",
]
@@ -1442,6 +1459,7 @@ if (is_android) {
"../../pc:libjingle_peerconnection",
"../../rtc_base:checks",
"../../rtc_base:rtc_base",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/system:inline",
"../../system_wrappers",
"../../system_wrappers:field_trial",
@@ -1449,8 +1467,8 @@ if (is_android) {
"../../test:fileutils",
"../../test:test_support",
"../../testing/gtest",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_android_library("native_unittests_java") {
@@ -1507,9 +1525,17 @@ if (is_android) {
]
deps = [
+ ":base_java",
+ ":camera_java",
+ ":hwcodecs_java",
":libjingle_peerconnection_java",
+ ":peerconnection_java",
+ ":video_api_java",
+ ":video_java",
"//base:base_java_test_support",
+ "//third_party/android_deps:com_android_support_support_annotations_java",
"//third_party/google-truth:google_truth_java",
+ "//third_party/guava:guava_android_java",
]
additional_jar_files = [ [
diff --git a/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
index ac3b1b97c1..2876df028e 100644
--- a/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
+++ b/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
@@ -18,18 +18,9 @@ import java.util.Arrays;
public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
private final static Predicate<MediaCodecInfo> defaultAllowedPredicate =
new Predicate<MediaCodecInfo>() {
- private String[] prefixBlacklist =
- Arrays.copyOf(MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES,
- MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES.length);
@Override
public boolean test(MediaCodecInfo arg) {
- final String name = arg.getName();
- for (String prefix : prefixBlacklist) {
- if (name.startsWith(prefix)) {
- return false;
- }
- }
- return true;
+ return MediaCodecUtils.isHardwareAccelerated(arg);
}
};
diff --git a/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java b/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java
deleted file mode 100644
index c16a37a6d7..0000000000
--- a/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-/**
- * Factory for creating webrtc::MediaTransportFactory instances.
- */
-public interface MediaTransportFactoryFactory {
- /**
- * Dynamically allocates a webrtc::MediaTransportFactory instance and returns a pointer to it.
- * The caller takes ownership of the object.
- */
- public long createNativeMediaTransportFactory();
-}
diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java
index bf0a2e9441..920e2f12c9 100644
--- a/sdk/android/api/org/webrtc/PeerConnection.java
+++ b/sdk/android/api/org/webrtc/PeerConnection.java
@@ -536,18 +536,6 @@ public class PeerConnection {
// Null indicates no change to currently configured value.
@Nullable public Boolean allowCodecSwitching;
- /*
- * Experimental flag that enables a use of media transport. If this is true, the media transport
- * factory MUST be provided to the PeerConnectionFactory.
- */
- public boolean useMediaTransport;
-
- /*
- * Experimental flag that enables a use of media transport for data channels. If this is true,
- * the media transport factory MUST be provided to the PeerConnectionFactory.
- */
- public boolean useMediaTransportForDataChannels;
-
/**
* Defines advanced optional cryptographic settings related to SRTP and
* frame encryption for native WebRTC. Setting this will overwrite any
@@ -602,8 +590,6 @@ public class PeerConnection {
networkPreference = AdapterType.UNKNOWN;
sdpSemantics = SdpSemantics.PLAN_B;
activeResetSrtpParams = false;
- useMediaTransport = false;
- useMediaTransportForDataChannels = false;
cryptoOptions = null;
turnLoggingId = null;
allowCodecSwitching = null;
@@ -816,16 +802,6 @@ public class PeerConnection {
return allowCodecSwitching;
}
- @CalledByNative("RTCConfiguration")
- boolean getUseMediaTransport() {
- return useMediaTransport;
- }
-
- @CalledByNative("RTCConfiguration")
- boolean getUseMediaTransportForDataChannels() {
- return useMediaTransportForDataChannels;
- }
-
@Nullable
@CalledByNative("RTCConfiguration")
CryptoOptions getCryptoOptions() {
diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java
index decdc0cc42..c87e639f23 100644
--- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java
+++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java
@@ -175,7 +175,6 @@ public class PeerConnectionFactory {
@Nullable private FecControllerFactoryFactoryInterface fecControllerFactoryFactory;
@Nullable private NetworkControllerFactoryFactory networkControllerFactoryFactory;
@Nullable private NetworkStatePredictorFactoryFactory networkStatePredictorFactoryFactory;
- @Nullable private MediaTransportFactoryFactory mediaTransportFactoryFactory;
@Nullable private NetEqFactoryFactory neteqFactoryFactory;
private Builder() {}
@@ -247,13 +246,6 @@ public class PeerConnectionFactory {
return this;
}
- /** Sets a MediaTransportFactoryFactory for a PeerConnectionFactory. */
- public Builder setMediaTransportFactoryFactory(
- MediaTransportFactoryFactory mediaTransportFactoryFactory) {
- this.mediaTransportFactoryFactory = mediaTransportFactoryFactory;
- return this;
- }
-
/**
* Sets a NetEqFactoryFactory for the PeerConnectionFactory. When using a
* custom NetEqFactoryFactory, the AudioDecoderFactoryFactory will be set
@@ -284,9 +276,6 @@ public class PeerConnectionFactory {
networkStatePredictorFactoryFactory == null
? 0
: networkStatePredictorFactoryFactory.createNativeNetworkStatePredictorFactory(),
- mediaTransportFactoryFactory == null
- ? 0
- : mediaTransportFactoryFactory.createNativeMediaTransportFactory(),
neteqFactoryFactory == null ? 0 : neteqFactoryFactory.createNativeNetEqFactory());
}
}
@@ -607,7 +596,7 @@ public class PeerConnectionFactory {
long audioDecoderFactory, VideoEncoderFactory encoderFactory,
VideoDecoderFactory decoderFactory, long nativeAudioProcessor,
long nativeFecControllerFactory, long nativeNetworkControllerFactory,
- long nativeNetworkStatePredictorFactory, long mediaTransportFactory, long neteqFactory);
+ long nativeNetworkStatePredictorFactory, long neteqFactory);
private static native long nativeCreatePeerConnection(long factory,
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver,
diff --git a/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java b/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
index 82417fd980..d334dfab4e 100644
--- a/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
+++ b/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
@@ -21,19 +21,9 @@ public class PlatformSoftwareVideoDecoderFactory extends MediaCodecVideoDecoderF
*/
private static final Predicate<MediaCodecInfo> defaultAllowedPredicate =
new Predicate<MediaCodecInfo>() {
- private String[] prefixWhitelist =
- Arrays.copyOf(MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES,
- MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES.length);
-
@Override
public boolean test(MediaCodecInfo arg) {
- final String name = arg.getName();
- for (String prefix : prefixWhitelist) {
- if (name.startsWith(prefix)) {
- return true;
- }
- }
- return false;
+ return MediaCodecUtils.isSoftwareOnly(arg);
}
};
diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
index 3522a87487..b7b028f5ae 100644
--- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
+++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -263,6 +263,17 @@ public class SurfaceTextureHelper {
});
}
+ /**
+ * Forces a frame to be produced. If no new frame is available, the last frame is sent to the
+ * listener again.
+ */
+ public void forceFrame() {
+ handler.post(() -> {
+ hasPendingTexture = true;
+ tryDeliverTextureFrame();
+ });
+ }
+
/** Set the rotation of the delivered frames. */
public void setFrameRotation(int rotation) {
handler.post(() -> this.frameRotation = rotation);
diff --git a/sdk/android/instrumentationtests/AndroidManifest.xml b/sdk/android/instrumentationtests/AndroidManifest.xml
index 75df968f13..55028da703 100644
--- a/sdk/android/instrumentationtests/AndroidManifest.xml
+++ b/sdk/android/instrumentationtests/AndroidManifest.xml
@@ -16,7 +16,7 @@
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
- <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21" />
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="21" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
diff --git a/sdk/android/native_api/jni/java_types.cc b/sdk/android/native_api/jni/java_types.cc
index a97c81f1f2..af02c10f4c 100644
--- a/sdk/android/native_api/jni/java_types.cc
+++ b/sdk/android/native_api/jni/java_types.cc
@@ -10,6 +10,7 @@
#include "sdk/android/native_api/jni/java_types.h"
+#include <memory>
#include <string>
#include <utility>
@@ -51,14 +52,15 @@ Iterable::Iterator::Iterator(JNIEnv* jni, const JavaRef<jobject>& iterable)
Iterable::Iterator::Iterator(Iterator&& other)
: jni_(std::move(other.jni_)),
iterator_(std::move(other.iterator_)),
- value_(std::move(other.value_)),
- thread_checker_(std::move(other.thread_checker_)) {}
+ value_(std::move(other.value_)) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+}
Iterable::Iterator::~Iterator() = default;
// Advances the iterator one step.
Iterable::Iterator& Iterable::Iterator::operator++() {
- RTC_CHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK_RUN_ON(&thread_checker_);
if (AtEnd()) {
// Can't move past the end.
return *this;
@@ -93,7 +95,7 @@ ScopedJavaLocalRef<jobject>& Iterable::Iterator::operator*() {
}
bool Iterable::Iterator::AtEnd() const {
- RTC_CHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK_RUN_ON(&thread_checker_);
return jni_ == nullptr || IsNull(jni_, iterator_);
}
diff --git a/sdk/android/native_api/jni/java_types.h b/sdk/android/native_api/jni/java_types.h
index 955911c186..26fdd5a0b8 100644
--- a/sdk/android/native_api/jni/java_types.h
+++ b/sdk/android/native_api/jni/java_types.h
@@ -18,7 +18,9 @@
#define SDK_ANDROID_NATIVE_API_JNI_JAVA_TYPES_H_
#include <jni.h>
+
#include <map>
+#include <memory>
#include <string>
#include <vector>
diff --git a/sdk/android/native_api/stacktrace/stacktrace.cc b/sdk/android/native_api/stacktrace/stacktrace.cc
index 6350acaacf..cea3490091 100644
--- a/sdk/android/native_api/stacktrace/stacktrace.cc
+++ b/sdk/android/native_api/stacktrace/stacktrace.cc
@@ -27,9 +27,9 @@
#endif
#include "absl/base/attributes.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -92,7 +92,7 @@ struct SignalHandlerOutputState {
};
// Global lock to ensure only one thread gets interrupted at a time.
-ABSL_CONST_INIT rtc::GlobalLock g_signal_handler_lock;
+ABSL_CONST_INIT GlobalMutex g_signal_handler_lock(absl::kConstInit);
// Argument passed to the ThreadSignalHandler() from the sampling thread to the
// sampled (stopped) thread. This value is set just before sending signal to the
// thread and reset when handler is done.
@@ -153,7 +153,7 @@ const char* CaptureRawStacktrace(int pid,
act.sa_flags = SA_RESTART | SA_SIGINFO;
sigemptyset(&act.sa_mask);
- rtc::GlobalLockScope ls(&g_signal_handler_lock);
+ GlobalMutexLock ls(&g_signal_handler_lock);
g_signal_handler_output_state = params;
if (sigaction(kSignal, &act, &old_act) != 0)
diff --git a/sdk/android/native_unittests/audio_device/audio_device_unittest.cc b/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
index 88dfad4a88..31da60cbc3 100644
--- a/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
+++ b/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
@@ -16,9 +16,9 @@
#include "modules/audio_device/include/audio_device.h"
#include "modules/audio_device/include/mock_audio_transport.h"
#include "rtc_base/arraysize.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/format_macros.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/time_utils.h"
#include "sdk/android/generated_native_unittests_jni/BuildInfo_jni.h"
#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
@@ -179,7 +179,7 @@ class FifoAudioStream : public AudioStreamInterface {
}
int16_t* memory = new int16_t[frames_per_buffer_];
memcpy(static_cast<int16_t*>(&memory[0]), source, bytes_per_buffer_);
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
fifo_->push_back(memory);
const size_t size = fifo_->size();
if (size > largest_size_) {
@@ -195,7 +195,7 @@ class FifoAudioStream : public AudioStreamInterface {
void Read(void* destination, size_t num_frames) override {
ASSERT_EQ(num_frames, frames_per_buffer_);
PRINTD("-");
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (fifo_->empty()) {
memset(destination, 0, bytes_per_buffer_);
} else {
@@ -226,7 +226,7 @@ class FifoAudioStream : public AudioStreamInterface {
}
using AudioBufferList = std::list<int16_t*>;
- rtc::CriticalSection lock_;
+ Mutex lock_;
const size_t frames_per_buffer_;
const size_t bytes_per_buffer_;
std::unique_ptr<AudioBufferList> fifo_;
diff --git a/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc b/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
index e3b5e78cdd..fcd9c9b8f1 100644
--- a/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
+++ b/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
@@ -16,12 +16,12 @@
#include <memory>
#include <vector>
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/string_utils.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/inline.h"
#include "system_wrappers/include/sleep.h"
#include "test/gtest.h"
@@ -118,15 +118,15 @@ class RtcEventDeadlock : public DeadlockInterface {
class RtcCriticalSectionDeadlock : public DeadlockInterface {
public:
RtcCriticalSectionDeadlock()
- : critscope_(std::make_unique<rtc::CritScope>(&crit_)) {}
+ : mutex_lock_(std::make_unique<MutexLock>(&mutex_)) {}
private:
- void Deadlock() override { rtc::CritScope lock(&crit_); }
+ void Deadlock() override { MutexLock lock(&mutex_); }
- void Release() override { critscope_.reset(); }
+ void Release() override { mutex_lock_.reset(); }
- rtc::CriticalSection crit_;
- std::unique_ptr<rtc::CritScope> critscope_;
+ Mutex mutex_;
+ std::unique_ptr<MutexLock> mutex_lock_;
};
class SpinDeadlock : public DeadlockInterface {
diff --git a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
index 9028cc3ae4..cd43098015 100644
--- a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
+++ b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
@@ -10,6 +10,7 @@
package org.webrtc;
+import android.annotation.TargetApi;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.os.Build;
@@ -28,7 +29,8 @@ class MediaCodecUtils {
static final String INTEL_PREFIX = "OMX.Intel.";
static final String NVIDIA_PREFIX = "OMX.Nvidia.";
static final String QCOM_PREFIX = "OMX.qcom.";
- static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = {"OMX.google.", "OMX.SEC."};
+ static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = {
+ "OMX.google.", "OMX.SEC.", "c2.android"};
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
@@ -97,6 +99,36 @@ class MediaCodecUtils {
}
}
+ static boolean isHardwareAccelerated(MediaCodecInfo info) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isHardwareAcceleratedQOrHigher(info);
+ }
+ return !isSoftwareOnly(info);
+ }
+
+ @TargetApi(29)
+ private static boolean isHardwareAcceleratedQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isHardwareAccelerated();
+ }
+
+ static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isSoftwareOnlyQOrHigher(codecInfo);
+ }
+ String name = codecInfo.getName();
+ for (String prefix : SOFTWARE_IMPLEMENTATION_PREFIXES) {
+ if (name.startsWith(prefix)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @TargetApi(29)
+ private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isSoftwareOnly();
+ }
+
private MediaCodecUtils() {
// This class should not be instantiated.
}
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
index 6f575be2ce..6b69b264ea 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
@@ -219,14 +219,14 @@ class WebRtcAudioEffects {
// Returns true if an effect of the specified type is available. Functionally
// equivalent to (NoiseSuppressor|AutomaticGainControl|...).isAvailable(), but
// faster as it avoids the expensive OS call to enumerate effects.
- private static boolean isEffectTypeAvailable(UUID effectType, UUID blackListedUuid) {
+ private static boolean isEffectTypeAvailable(UUID effectType, UUID blockListedUuid) {
Descriptor[] effects = getAvailableEffects();
if (effects == null) {
return false;
}
for (Descriptor d : effects) {
if (d.type.equals(effectType)) {
- return !d.uuid.equals(blackListedUuid);
+ return !d.uuid.equals(blockListedUuid);
}
}
return false;
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
index edc9dd179d..94eb2a4357 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -183,7 +183,7 @@ class WebRtcAudioTrack {
}
@CalledByNative
- private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
+ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG,
"initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
@@ -212,14 +212,14 @@ class WebRtcAudioTrack {
// can happen that |minBufferSizeInBytes| contains an invalid value.
if (minBufferSizeInBytes < byteBuffer.capacity()) {
reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
- return false;
+ return -1;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
- return false;
+ return -1;
}
try {
// Create an AudioTrack object and initialize its associated audio buffer.
@@ -241,7 +241,7 @@ class WebRtcAudioTrack {
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
- return false;
+ return -1;
}
// It can happen that an AudioTrack is created but it was not successfully
@@ -250,11 +250,11 @@ class WebRtcAudioTrack {
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
releaseAudioResources();
- return false;
+ return -1;
}
logMainParameters();
logMainParametersExtended();
- return true;
+ return minBufferSizeInBytes;
}
@CalledByNative
@@ -423,6 +423,14 @@ class WebRtcAudioTrack {
}
}
+ @CalledByNative
+ private int getBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ return audioTrack.getBufferSizeInFrames();
+ }
+ return -1;
+ }
+
private void logBufferCapacityInFrames() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
Logging.d(TAG,
diff --git a/sdk/android/src/jni/android_network_monitor.cc b/sdk/android/src/jni/android_network_monitor.cc
index 0f02d179e7..69e89564e3 100644
--- a/sdk/android/src/jni/android_network_monitor.cc
+++ b/sdk/android/src/jni/android_network_monitor.cc
@@ -76,7 +76,9 @@ static NetworkType GetNetworkTypeFromJava(
return NetworkType::NETWORK_UNKNOWN;
}
-static rtc::AdapterType AdapterTypeFromNetworkType(NetworkType network_type) {
+static rtc::AdapterType AdapterTypeFromNetworkType(
+ NetworkType network_type,
+ bool surface_cellular_types) {
switch (network_type) {
case NETWORK_UNKNOWN:
return rtc::ADAPTER_TYPE_UNKNOWN;
@@ -85,9 +87,17 @@ static rtc::AdapterType AdapterTypeFromNetworkType(NetworkType network_type) {
case NETWORK_WIFI:
return rtc::ADAPTER_TYPE_WIFI;
case NETWORK_5G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_5G
+ : rtc::ADAPTER_TYPE_CELLULAR;
case NETWORK_4G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_4G
+ : rtc::ADAPTER_TYPE_CELLULAR;
case NETWORK_3G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_3G
+ : rtc::ADAPTER_TYPE_CELLULAR;
case NETWORK_2G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_2G
+ : rtc::ADAPTER_TYPE_CELLULAR;
case NETWORK_UNKNOWN_CELLULAR:
return rtc::ADAPTER_TYPE_CELLULAR;
case NETWORK_VPN:
@@ -196,6 +206,8 @@ void AndroidNetworkMonitor::Start() {
return;
}
started_ = true;
+ surface_cellular_types_ =
+ webrtc::field_trial::IsEnabled("WebRTC-SurfaceCellularTypes");
find_network_handle_without_ipv6_temporary_part_ =
webrtc::field_trial::IsEnabled(
"WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart");
@@ -347,10 +359,11 @@ void AndroidNetworkMonitor::OnNetworkConnected_w(
const NetworkInformation& network_info) {
RTC_LOG(LS_INFO) << "Network connected: " << network_info.ToString();
adapter_type_by_name_[network_info.interface_name] =
- AdapterTypeFromNetworkType(network_info.type);
+ AdapterTypeFromNetworkType(network_info.type, surface_cellular_types_);
if (network_info.type == NETWORK_VPN) {
vpn_underlying_adapter_type_by_name_[network_info.interface_name] =
- AdapterTypeFromNetworkType(network_info.underlying_type_for_vpn);
+ AdapterTypeFromNetworkType(network_info.underlying_type_for_vpn,
+ surface_cellular_types_);
}
network_info_by_handle_[network_info.handle] = network_info;
for (const rtc::IPAddress& address : network_info.ip_addresses) {
diff --git a/sdk/android/src/jni/android_network_monitor.h b/sdk/android/src/jni/android_network_monitor.h
index ee84588d7e..1d795df991 100644
--- a/sdk/android/src/jni/android_network_monitor.h
+++ b/sdk/android/src/jni/android_network_monitor.h
@@ -114,6 +114,7 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorBase,
std::map<rtc::IPAddress, NetworkHandle> network_handle_by_address_;
std::map<NetworkHandle, NetworkInformation> network_info_by_handle_;
bool find_network_handle_without_ipv6_temporary_part_;
+ bool surface_cellular_types_;
};
class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
diff --git a/sdk/android/src/jni/audio_device/audio_device_module.cc b/sdk/android/src/jni/audio_device/audio_device_module.cc
index b4cb184177..d77488ff4a 100644
--- a/sdk/android/src/jni/audio_device/audio_device_module.cc
+++ b/sdk/android/src/jni/audio_device/audio_device_module.cc
@@ -516,7 +516,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
}
// Returns true if the device both supports built in AEC and the device
- // is not blacklisted.
+ // is not blocklisted.
// Currently, if OpenSL ES is used in both directions, this method will still
// report the correct value and it has the correct effect. As an example:
// a device supports built in AEC and this method returns true. Libjingle
@@ -544,7 +544,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
}
// Returns true if the device both supports built in NS and the device
- // is not blacklisted.
+ // is not blocklisted.
// TODO(henrika): add implementation for OpenSL ES based audio as well.
// In addition, see comments for BuiltInAECIsAvailable().
bool BuiltInNSIsAvailable() const override {
diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.cc b/sdk/android/src/jni/audio_device/audio_track_jni.cc
index 8f0a041711..d5b880b1b0 100644
--- a/sdk/android/src/jni/audio_device/audio_track_jni.cc
+++ b/sdk/android/src/jni/audio_device/audio_track_jni.cc
@@ -20,6 +20,7 @@
#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
namespace webrtc {
@@ -89,12 +90,33 @@ int32_t AudioTrackJni::InitPlayout() {
nullptr);
if (buffer_size_factor == 0)
buffer_size_factor = 1.0;
- if (!Java_WebRtcAudioTrack_initPlayout(
- env_, j_audio_track_, audio_parameters_.sample_rate(),
- static_cast<int>(audio_parameters_.channels()), buffer_size_factor)) {
+ int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout(
+ env_, j_audio_track_, audio_parameters_.sample_rate(),
+ static_cast<int>(audio_parameters_.channels()), buffer_size_factor);
+ if (requested_buffer_size_bytes < 0) {
RTC_LOG(LS_ERROR) << "InitPlayout failed";
return -1;
}
+ // Update UMA histograms for both the requested and actual buffer size.
+ // To avoid division by zero, we assume the sample rate is 48k if an invalid
+ // value is found.
+ const int sample_rate = audio_parameters_.sample_rate() <= 0
+ ? 48000
+ : audio_parameters_.sample_rate();
+ // This calculation assumes that audio is mono.
+ const int requested_buffer_size_ms =
+ (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
+ requested_buffer_size_ms, 0, 1000, 100);
+ int actual_buffer_size_frames =
+ Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
+ if (actual_buffer_size_frames >= 0) {
+ const int actual_buffer_size_ms =
+ actual_buffer_size_frames * 1000 / sample_rate;
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
+ actual_buffer_size_ms, 0, 1000, 100);
+ }
+
initialized_ = true;
return 0;
}
diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc
index 0ae39fbf66..9cebda3813 100644
--- a/sdk/android/src/jni/pc/peer_connection.cc
+++ b/sdk/android/src/jni/pc/peer_connection.cc
@@ -264,11 +264,6 @@ void JavaToNativeRTCConfiguration(
rtc_config->sdp_semantics = JavaToNativeSdpSemantics(jni, j_sdp_semantics);
rtc_config->active_reset_srtp_params =
Java_RTCConfiguration_getActiveResetSrtpParams(jni, j_rtc_config);
- rtc_config->use_media_transport =
- Java_RTCConfiguration_getUseMediaTransport(jni, j_rtc_config);
- rtc_config->use_media_transport_for_data_channels =
- Java_RTCConfiguration_getUseMediaTransportForDataChannels(jni,
- j_rtc_config);
rtc_config->crypto_options =
JavaToNativeOptionalCryptoOptions(jni, j_crypto_options);
@@ -763,9 +758,9 @@ static jboolean JNI_PeerConnection_SetBitrate(
const JavaParamRef<jobject>& j_min,
const JavaParamRef<jobject>& j_current,
const JavaParamRef<jobject>& j_max) {
- PeerConnectionInterface::BitrateParameters params;
+ BitrateSettings params;
params.min_bitrate_bps = JavaToNativeOptionalInt(jni, j_min);
- params.current_bitrate_bps = JavaToNativeOptionalInt(jni, j_current);
+ params.start_bitrate_bps = JavaToNativeOptionalInt(jni, j_current);
params.max_bitrate_bps = JavaToNativeOptionalInt(jni, j_max);
return ExtractNativePC(jni, j_pc)->SetBitrate(params).ok();
}
diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc
index 48dd6e41d8..9a42a80ef8 100644
--- a/sdk/android/src/jni/pc/peer_connection_factory.cc
+++ b/sdk/android/src/jni/pc/peer_connection_factory.cc
@@ -246,7 +246,7 @@ static void JNI_PeerConnectionFactory_ShutdownInternalTracer(JNIEnv* jni) {
// Following parameters are optional:
// |audio_device_module|, |jencoder_factory|, |jdecoder_factory|,
-// |audio_processor|, |media_transport_factory|, |fec_controller_factory|,
+// |audio_processor|, |fec_controller_factory|,
// |network_state_predictor_factory|, |neteq_factory|.
ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
JNIEnv* jni,
@@ -263,7 +263,6 @@ ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
network_controller_factory,
std::unique_ptr<NetworkStatePredictorFactoryInterface>
network_state_predictor_factory,
- std::unique_ptr<MediaTransportFactory> media_transport_factory,
std::unique_ptr<NetEqFactory> neteq_factory) {
// talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
// ThreadManager only WrapCurrentThread()s the thread where it is first
@@ -310,7 +309,6 @@ ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
std::move(network_controller_factory);
dependencies.network_state_predictor_factory =
std::move(network_state_predictor_factory);
- dependencies.media_transport_factory = std::move(media_transport_factory);
dependencies.neteq_factory = std::move(neteq_factory);
cricket::MediaEngineDependencies media_dependencies;
@@ -355,7 +353,6 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
jlong native_fec_controller_factory,
jlong native_network_controller_factory,
jlong native_network_state_predictor_factory,
- jlong native_media_transport_factory,
jlong native_neteq_factory) {
rtc::scoped_refptr<AudioProcessing> audio_processor =
reinterpret_cast<AudioProcessing*>(native_audio_processor);
@@ -372,8 +369,6 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
native_network_controller_factory),
TakeOwnershipOfUniquePtr<NetworkStatePredictorFactoryInterface>(
native_network_state_predictor_factory),
- TakeOwnershipOfUniquePtr<MediaTransportFactory>(
- native_media_transport_factory),
TakeOwnershipOfUniquePtr<NetEqFactory>(native_neteq_factory));
}
diff --git a/sdk/android/src/jni/video_decoder_wrapper.cc b/sdk/android/src/jni/video_decoder_wrapper.cc
index 54c6e1574c..3aa18abbd7 100644
--- a/sdk/android/src/jni/video_decoder_wrapper.cc
+++ b/sdk/android/src/jni/video_decoder_wrapper.cc
@@ -109,7 +109,7 @@ int32_t VideoDecoderWrapper::Decode(
frame_extra_info.qp =
qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt;
{
- rtc::CritScope cs(&frame_extra_infos_lock_);
+ MutexLock lock(&frame_extra_infos_lock_);
frame_extra_infos_.push_back(frame_extra_info);
}
@@ -135,7 +135,7 @@ int32_t VideoDecoderWrapper::Release() {
jni, Java_VideoDecoder_release(jni, decoder_));
RTC_LOG(LS_INFO) << "release: " << status;
{
- rtc::CritScope cs(&frame_extra_infos_lock_);
+ MutexLock lock(&frame_extra_infos_lock_);
frame_extra_infos_.clear();
}
initialized_ = false;
@@ -163,7 +163,7 @@ void VideoDecoderWrapper::OnDecodedFrame(
FrameExtraInfo frame_extra_info;
{
- rtc::CritScope cs(&frame_extra_infos_lock_);
+ MutexLock lock(&frame_extra_infos_lock_);
do {
if (frame_extra_infos_.empty()) {
diff --git a/sdk/android/src/jni/video_decoder_wrapper.h b/sdk/android/src/jni/video_decoder_wrapper.h
index a7f686872c..f5c4787a6e 100644
--- a/sdk/android/src/jni/video_decoder_wrapper.h
+++ b/sdk/android/src/jni/video_decoder_wrapper.h
@@ -12,12 +12,14 @@
#define SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_
#include <jni.h>
+
#include <atomic>
#include <deque>
#include "api/video_codecs/video_decoder.h"
#include "common_video/h264/h264_bitstream_parser.h"
#include "rtc_base/race_checker.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/src/jni/jni_helpers.h"
@@ -103,7 +105,7 @@ class VideoDecoderWrapper : public VideoDecoder {
// Accessed both on the decoder thread and the callback thread.
std::atomic<bool> qp_parsing_enabled_;
- rtc::CriticalSection frame_extra_infos_lock_;
+ Mutex frame_extra_infos_lock_;
std::deque<FrameExtraInfo> frame_extra_infos_
RTC_GUARDED_BY(frame_extra_infos_lock_);
};
diff --git a/sdk/android/tests/resources/robolectric.properties b/sdk/android/tests/resources/robolectric.properties
index 3acb7e5d59..a9bc625b18 100644
--- a/sdk/android/tests/resources/robolectric.properties
+++ b/sdk/android/tests/resources/robolectric.properties
@@ -1 +1 @@
-sdk=19,21,25,26
+sdk=21,25,26
diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h
index 4e9c674ef8..86eaa6cee5 100644
--- a/sdk/objc/api/peerconnection/RTCConfiguration.h
+++ b/sdk/objc/api/peerconnection/RTCConfiguration.h
@@ -198,18 +198,6 @@ RTC_OBJC_EXPORT
@property(nonatomic, assign) BOOL allowCodecSwitching;
/**
- * If MediaTransportFactory is provided in PeerConnectionFactory, this flag informs PeerConnection
- * that it should use the MediaTransportInterface.
- */
-@property(nonatomic, assign) BOOL useMediaTransport;
-
-/**
- * If MediaTransportFactory is provided in PeerConnectionFactory, this flag informs PeerConnection
- * that it should use the MediaTransportInterface for data channels.
- */
-@property(nonatomic, assign) BOOL useMediaTransportForDataChannels;
-
-/**
* Defines advanced optional cryptographic settings related to SRTP and
* frame encryption for native WebRTC. Setting this will overwrite any
* options set through the PeerConnectionFactory (which is deprecated).
diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm
index 52c1450505..55abbcdb18 100644
--- a/sdk/objc/api/peerconnection/RTCConfiguration.mm
+++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm
@@ -52,8 +52,6 @@
@synthesize turnCustomizer = _turnCustomizer;
@synthesize activeResetSrtpParams = _activeResetSrtpParams;
@synthesize allowCodecSwitching = _allowCodecSwitching;
-@synthesize useMediaTransport = _useMediaTransport;
-@synthesize useMediaTransportForDataChannels = _useMediaTransportForDataChannels;
@synthesize cryptoOptions = _cryptoOptions;
@synthesize rtcpAudioReportIntervalMs = _rtcpAudioReportIntervalMs;
@synthesize rtcpVideoReportIntervalMs = _rtcpVideoReportIntervalMs;
@@ -106,8 +104,6 @@
_iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
_iceBackupCandidatePairPingInterval =
config.ice_backup_candidate_pair_ping_interval;
- _useMediaTransport = config.use_media_transport;
- _useMediaTransportForDataChannels = config.use_media_transport_for_data_channels;
_keyType = RTCEncryptionKeyTypeECDSA;
_iceCandidatePoolSize = config.ice_candidate_pool_size;
_shouldPruneTurnPorts = config.prune_turn_ports;
@@ -143,7 +139,7 @@
- (NSString *)description {
static NSString *formatString = @"RTC_OBJC_TYPE(RTCConfiguration): "
@"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n"
- @"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%@\n%d\n}\n";
+ @"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%@\n}\n";
return [NSString
stringWithFormat:formatString,
@@ -169,7 +165,6 @@
_disableIPV6OnWiFi,
_maxIPv6Networks,
_activeResetSrtpParams,
- _useMediaTransport,
_enableDscp];
}
@@ -208,8 +203,6 @@
_iceConnectionReceivingTimeout;
nativeConfig->ice_backup_candidate_pair_ping_interval =
_iceBackupCandidatePairPingInterval;
- nativeConfig->use_media_transport = _useMediaTransport;
- nativeConfig->use_media_transport_for_data_channels = _useMediaTransportForDataChannels;
rtc::KeyType keyType =
[[self class] nativeEncryptionKeyTypeForKeyType:_keyType];
if (_certificate != nullptr) {
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
index 46a6e3c780..8ded55200e 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
@@ -28,7 +28,8 @@ class StatsCollectorCallbackAdapter : public RTCStatsCollectorCallback {
void OnStatsDelivered(const rtc::scoped_refptr<const RTCStatsReport> &report) override {
RTC_DCHECK(completion_handler_);
- RTCStatisticsReport *statisticsReport = [[RTCStatisticsReport alloc] initWithReport:*report];
+ RTC_OBJC_TYPE(RTCStatisticsReport) *statisticsReport =
+ [[RTC_OBJC_TYPE(RTCStatisticsReport) alloc] initWithReport:*report];
completion_handler_(statisticsReport);
completion_handler_ = nil;
}
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.h b/sdk/objc/api/peerconnection/RTCPeerConnection.h
index cfc0a3d824..bb8d87bc2d 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection.h
@@ -25,7 +25,7 @@
@class RTC_OBJC_TYPE(RTCRtpTransceiver);
@class RTC_OBJC_TYPE(RTCRtpTransceiverInit);
@class RTC_OBJC_TYPE(RTCSessionDescription);
-@class RTCStatisticsReport;
+@class RTC_OBJC_TYPE(RTCStatisticsReport);
@class RTC_OBJC_TYPE(RTCLegacyStatsReport);
typedef NS_ENUM(NSInteger, RTCRtpMediaType);
@@ -341,7 +341,7 @@ RTC_OBJC_EXPORT
@end
-typedef void (^RTCStatisticsCompletionHandler)(RTCStatisticsReport *);
+typedef void (^RTCStatisticsCompletionHandler)(RTC_OBJC_TYPE(RTCStatisticsReport) *);
@interface RTC_OBJC_TYPE (RTCPeerConnection)
(Stats)
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/sdk/objc/api/peerconnection/RTCPeerConnection.mm
index fa68d08e74..42a43a79cd 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection.mm
@@ -29,7 +29,6 @@
#include "api/jsep_ice_candidate.h"
#include "api/rtc_event_log_output_file.h"
-#include "api/transport/media/media_transport_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/numerics/safe_conversions.h"
@@ -559,12 +558,12 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack(
- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps
currentBitrateBps:(nullable NSNumber *)currentBitrateBps
maxBitrateBps:(nullable NSNumber *)maxBitrateBps {
- webrtc::PeerConnectionInterface::BitrateParameters params;
+ webrtc::BitrateSettings params;
if (minBitrateBps != nil) {
params.min_bitrate_bps = absl::optional<int>(minBitrateBps.intValue);
}
if (currentBitrateBps != nil) {
- params.current_bitrate_bps = absl::optional<int>(currentBitrateBps.intValue);
+ params.start_bitrate_bps = absl::optional<int>(currentBitrateBps.intValue);
}
if (maxBitrateBps != nil) {
params.max_bitrate_bps = absl::optional<int>(maxBitrateBps.intValue);
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
index c2aab0be56..1d3b82550a 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
@@ -17,7 +17,6 @@ namespace webrtc {
class AudioDeviceModule;
class AudioEncoderFactory;
class AudioDecoderFactory;
-class MediaTransportFactory;
class NetworkControllerFactoryInterface;
class VideoEncoderFactory;
class VideoDecoderFactory;
@@ -65,30 +64,12 @@ NS_ASSUME_NONNULL_BEGIN
audioDeviceModule:(nullable webrtc::AudioDeviceModule *)audioDeviceModule
audioProcessingModule:
(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
- mediaTransportFactory:
- (std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory;
-
-- (instancetype)
- initWithNativeAudioEncoderFactory:
- (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
- nativeAudioDecoderFactory:
- (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
- nativeVideoEncoderFactory:
- (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
- nativeVideoDecoderFactory:
- (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
- audioDeviceModule:(nullable webrtc::AudioDeviceModule *)audioDeviceModule
- audioProcessingModule:
- (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
networkControllerFactory:(std::unique_ptr<webrtc::NetworkControllerFactoryInterface>)
- networkControllerFactory
- mediaTransportFactory:
- (std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory;
+ networkControllerFactory;
- (instancetype)
initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
- decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory
- mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory;
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory;
/** Initialize an RTCPeerConnection with a configuration, constraints, and
* dependencies.
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
index 2e34b05fed..4ce38dbd7f 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
@@ -52,7 +52,6 @@
// C++ target.
// TODO(zhihuang): Remove nogncheck once MediaEngineInterface is moved to C++
// API layer.
-#include "api/transport/media/media_transport_interface.h"
#include "media/engine/webrtc_media_engine.h" // nogncheck
@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) {
@@ -84,15 +83,13 @@
nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE(
RTCVideoDecoderFactoryH264) alloc] init])
audioDeviceModule:[self audioDeviceModule]
- audioProcessingModule:nullptr
- mediaTransportFactory:nullptr];
+ audioProcessingModule:nullptr];
#endif
}
- (instancetype)
initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
- decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory
- mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory {
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory {
#ifdef HAVE_NO_MEDIA
return [self initWithNoMedia];
#else
@@ -109,18 +106,9 @@
nativeVideoEncoderFactory:std::move(native_encoder_factory)
nativeVideoDecoderFactory:std::move(native_decoder_factory)
audioDeviceModule:[self audioDeviceModule]
- audioProcessingModule:nullptr
- mediaTransportFactory:std::move(mediaTransportFactory)];
+ audioProcessingModule:nullptr];
#endif
}
-- (instancetype)
- initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
- decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory {
- return [self initWithEncoderFactory:encoderFactory
- decoderFactory:decoderFactory
- mediaTransportFactory:nullptr];
-}
-
- (instancetype)initNative {
if (self = [super init]) {
_networkThread = rtc::Thread::CreateWithSocketServer();
@@ -170,30 +158,7 @@
nativeVideoDecoderFactory:std::move(videoDecoderFactory)
audioDeviceModule:audioDeviceModule
audioProcessingModule:audioProcessingModule
- mediaTransportFactory:nullptr];
-}
-
-- (instancetype)initWithNativeAudioEncoderFactory:
- (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
- nativeAudioDecoderFactory:
- (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
- nativeVideoEncoderFactory:
- (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
- nativeVideoDecoderFactory:
- (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
- audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule
- audioProcessingModule:
- (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
- mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)
- mediaTransportFactory {
- return [self initWithNativeAudioEncoderFactory:audioEncoderFactory
- nativeAudioDecoderFactory:audioDecoderFactory
- nativeVideoEncoderFactory:std::move(videoEncoderFactory)
- nativeVideoDecoderFactory:std::move(videoDecoderFactory)
- audioDeviceModule:audioDeviceModule
- audioProcessingModule:audioProcessingModule
- networkControllerFactory:nullptr
- mediaTransportFactory:std::move(mediaTransportFactory)];
+ networkControllerFactory:nullptr];
}
- (instancetype)initWithNativeAudioEncoderFactory:
(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
@@ -208,9 +173,7 @@
(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
networkControllerFactory:
(std::unique_ptr<webrtc::NetworkControllerFactoryInterface>)
- networkControllerFactory
- mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)
- mediaTransportFactory {
+ networkControllerFactory {
if (self = [self initNative]) {
webrtc::PeerConnectionFactoryDependencies dependencies;
dependencies.network_thread = _networkThread.get();
@@ -235,7 +198,6 @@
dependencies.event_log_factory =
std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
dependencies.network_controller_factory = std::move(networkControllerFactory);
- dependencies.media_transport_factory = std::move(mediaTransportFactory);
#endif
_nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
index 8f52bea8e3..991ec5a41c 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
@@ -13,7 +13,6 @@
#include "api/audio_codecs/audio_decoder_factory.h"
#include "api/audio_codecs/audio_encoder_factory.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "modules/audio_device/include/audio_device.h"
@@ -26,7 +25,6 @@
rtc::scoped_refptr<webrtc::AudioDecoderFactory> _audioDecoderFactory;
rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
rtc::scoped_refptr<webrtc::AudioProcessing> _audioProcessingModule;
- std::unique_ptr<webrtc::MediaTransportFactory> _mediaTransportFactory;
}
+ (RTCPeerConnectionFactoryBuilder *)builder {
@@ -41,8 +39,7 @@
nativeVideoEncoderFactory:std::move(_videoEncoderFactory)
nativeVideoDecoderFactory:std::move(_videoDecoderFactory)
audioDeviceModule:_audioDeviceModule
- audioProcessingModule:_audioProcessingModule
- mediaTransportFactory:std::move(_mediaTransportFactory)];
+ audioProcessingModule:_audioProcessingModule];
}
- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory {
diff --git a/sdk/objc/api/peerconnection/RTCRtpSender.h b/sdk/objc/api/peerconnection/RTCRtpSender.h
index 41bb083d2e..fcdf199869 100644
--- a/sdk/objc/api/peerconnection/RTCRtpSender.h
+++ b/sdk/objc/api/peerconnection/RTCRtpSender.h
@@ -21,8 +21,8 @@ RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE
(RTCRtpSender)<NSObject>
- /** A unique identifier for this sender. */
- @property(nonatomic, readonly) NSString *senderId;
+/** A unique identifier for this sender. */
+@property(nonatomic, readonly) NSString *senderId;
/** The currently active RTCRtpParameters, as defined in
* https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters.
diff --git a/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h b/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h
index 0220d186b7..47c5241d51 100644
--- a/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h
+++ b/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h
@@ -12,8 +12,8 @@
#include "api/stats/rtc_stats_report.h"
-@interface RTCStatisticsReport (Private)
+@interface RTC_OBJC_TYPE (RTCStatisticsReport) (Private)
-- (instancetype)initWithReport:(const webrtc::RTCStatsReport &)report;
+- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report;
@end
diff --git a/sdk/objc/api/peerconnection/RTCStatisticsReport.h b/sdk/objc/api/peerconnection/RTCStatisticsReport.h
index 6fbd59b112..38d93e8771 100644
--- a/sdk/objc/api/peerconnection/RTCStatisticsReport.h
+++ b/sdk/objc/api/peerconnection/RTCStatisticsReport.h
@@ -10,25 +10,29 @@
#import <Foundation/Foundation.h>
-@class RTCStatistics;
+#import "RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCStatistics);
NS_ASSUME_NONNULL_BEGIN
/** A statistics report. Encapsulates a number of RTCStatistics objects. */
-@interface RTCStatisticsReport : NSObject
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCStatisticsReport) : NSObject
/** The timestamp of the report in microseconds since 1970-01-01T00:00:00Z. */
@property(nonatomic, readonly) CFTimeInterval timestamp_us;
/** RTCStatistics objects by id. */
-@property(nonatomic, readonly) NSDictionary<NSString *, RTCStatistics *> *statistics;
+@property(nonatomic, readonly) NSDictionary<NSString *, RTC_OBJC_TYPE(RTCStatistics) *> *statistics;
- (instancetype)init NS_UNAVAILABLE;
@end
/** A part of a report (a subreport) covering a certain area. */
-@interface RTCStatistics : NSObject
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCStatistics) : NSObject
/** The id of this subreport, e.g. "RTCMediaStreamTrack_receiver_2". */
@property(nonatomic, readonly) NSString *id;
diff --git a/sdk/objc/api/peerconnection/RTCStatisticsReport.mm b/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
index 526976707d..ab8006d9bd 100644
--- a/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
+++ b/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
@@ -100,7 +100,7 @@ NSObject *ValueFromStatsMember(const RTCStatsMemberInterface *member) {
}
} // namespace webrtc
-@implementation RTCStatistics
+@implementation RTC_OBJC_TYPE (RTCStatistics)
@synthesize id = _id;
@synthesize timestamp_us = _timestamp_us;
@@ -139,7 +139,7 @@ NSObject *ValueFromStatsMember(const RTCStatsMemberInterface *member) {
@end
-@implementation RTCStatisticsReport
+@implementation RTC_OBJC_TYPE (RTCStatisticsReport)
@synthesize timestamp_us = _timestamp_us;
@synthesize statistics = _statistics;
@@ -151,16 +151,17 @@ NSObject *ValueFromStatsMember(const RTCStatsMemberInterface *member) {
@end
-@implementation RTCStatisticsReport (Private)
+@implementation RTC_OBJC_TYPE (RTCStatisticsReport) (Private)
-- (instancetype)initWithReport:(const webrtc::RTCStatsReport &)report {
+- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report {
if (self = [super init]) {
_timestamp_us = report.timestamp_us();
NSMutableDictionary *statisticsById =
[NSMutableDictionary dictionaryWithCapacity:report.size()];
for (const auto &stat : report) {
- RTCStatistics *statistics = [[RTCStatistics alloc] initWithStatistics:stat];
+ RTC_OBJC_TYPE(RTCStatistics) *statistics =
+ [[RTC_OBJC_TYPE(RTCStatistics) alloc] initWithStatistics:stat];
statisticsById[statistics.id] = statistics;
}
_statistics = [statisticsById copy];
diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm
index 74b57acd61..520b2d1d37 100644
--- a/sdk/objc/components/audio/RTCAudioSession.mm
+++ b/sdk/objc/components/audio/RTCAudioSession.mm
@@ -16,7 +16,7 @@
#include "rtc_base/atomic_ops.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecated/recursive_critical_section.h"
#import "RTCAudioSessionConfiguration.h"
#import "base/RTCLogging.h"
@@ -35,7 +35,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
// TODO(tkchin): Consider more granular locking. We're not expecting a lot of
// lock contention so coarse locks should be fine for now.
@implementation RTC_OBJC_TYPE (RTCAudioSession) {
- rtc::CriticalSection _crit;
+ rtc::RecursiveCriticalSection _crit;
AVAudioSession *_session;
volatile int _activationCount;
volatile int _lockRecursionCount;
diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm
index b70c4d0e50..9dd4a9da1c 100644
--- a/sdk/objc/native/src/audio/audio_device_ios.mm
+++ b/sdk/objc/native/src/audio/audio_device_ios.mm
@@ -21,7 +21,6 @@
#include "rtc_base/atomic_ops.h"
#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_annotations.h"
diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.h b/sdk/objc/native/src/audio/audio_device_module_ios.h
index 625eec284e..fcd3bd7bc1 100644
--- a/sdk/objc/native/src/audio/audio_device_module_ios.h
+++ b/sdk/objc/native/src/audio/audio_device_module_ios.h
@@ -19,7 +19,6 @@
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/include/audio_device.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
namespace webrtc {
diff --git a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
index 7d19d4095d..14131dc38d 100644
--- a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
+++ b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
@@ -22,7 +22,6 @@ extern "C" {
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "modules/audio_device/include/audio_device.h"
@@ -50,8 +49,7 @@ extern "C" {
nativeVideoEncoderFactory:nullptr
nativeVideoDecoderFactory:nullptr
audioDeviceModule:nullptr
- audioProcessingModule:nullptr
- mediaTransportFactory:nullptr]);
+ audioProcessingModule:nullptr]);
#endif
RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
@@ -72,8 +70,7 @@ extern "C" {
nativeVideoEncoderFactory:nullptr
nativeVideoDecoderFactory:nullptr
audioDeviceModule:nullptr
- audioProcessingModule:nullptr
- mediaTransportFactory:nullptr]);
+ audioProcessingModule:nullptr]);
#endif
RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder];
RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
diff --git a/stats/rtcstats_objects.cc b/stats/rtcstats_objects.cc
index 453acce925..2fe85468c8 100644
--- a/stats/rtcstats_objects.cc
+++ b/stats/rtcstats_objects.cc
@@ -124,7 +124,7 @@ RTCCodecStats::~RTCCodecStats() {}
WEBRTC_RTCSTATS_IMPL(RTCDataChannelStats, RTCStats, "data-channel",
&label,
&protocol,
- &datachannelid,
+ &data_channel_identifier,
&state,
&messages_sent,
&bytes_sent,
@@ -140,7 +140,7 @@ RTCDataChannelStats::RTCDataChannelStats(std::string&& id, int64_t timestamp_us)
: RTCStats(std::move(id), timestamp_us),
label("label"),
protocol("protocol"),
- datachannelid("datachannelid"),
+ data_channel_identifier("dataChannelIdentifier"),
state("state"),
messages_sent("messagesSent"),
bytes_sent("bytesSent"),
@@ -151,7 +151,7 @@ RTCDataChannelStats::RTCDataChannelStats(const RTCDataChannelStats& other)
: RTCStats(other.id(), other.timestamp_us()),
label(other.label),
protocol(other.protocol),
- datachannelid(other.datachannelid),
+ data_channel_identifier(other.data_channel_identifier),
state(other.state),
messages_sent(other.messages_sent),
bytes_sent(other.bytes_sent),
@@ -604,6 +604,18 @@ WEBRTC_RTCSTATS_IMPL(
&packets_lost,
&last_packet_received_timestamp,
&jitter,
+ &jitter_buffer_delay,
+ &jitter_buffer_emitted_count,
+ &total_samples_received,
+ &concealed_samples,
+ &silent_concealed_samples,
+ &concealment_events,
+ &inserted_samples_for_deceleration,
+ &removed_samples_for_acceleration,
+ &audio_level,
+ &total_audio_energy,
+ &total_samples_duration,
+ &frames_received,
&round_trip_time,
&packets_discarded,
&packets_repaired,
@@ -615,8 +627,13 @@ WEBRTC_RTCSTATS_IMPL(
&burst_discard_rate,
&gap_loss_rate,
&gap_discard_rate,
+ &frame_width,
+ &frame_height,
+ &frame_bit_depth,
+ &frames_per_second,
&frames_decoded,
&key_frames_decoded,
+ &frames_dropped,
&total_decode_time,
&total_inter_frame_delay,
&total_squared_inter_frame_delay,
@@ -640,6 +657,18 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id,
packets_lost("packetsLost"),
last_packet_received_timestamp("lastPacketReceivedTimestamp"),
jitter("jitter"),
+ jitter_buffer_delay("jitterBufferDelay"),
+ jitter_buffer_emitted_count("jitterBufferEmittedCount"),
+ total_samples_received("totalSamplesReceived"),
+ concealed_samples("concealedSamples"),
+ silent_concealed_samples("silentConcealedSamples"),
+ concealment_events("concealmentEvents"),
+ inserted_samples_for_deceleration("insertedSamplesForDeceleration"),
+ removed_samples_for_acceleration("removedSamplesForAcceleration"),
+ audio_level("audioLevel"),
+ total_audio_energy("totalAudioEnergy"),
+ total_samples_duration("totalSamplesDuration"),
+ frames_received("framesReceived"),
round_trip_time("roundTripTime"),
packets_discarded("packetsDiscarded"),
packets_repaired("packetsRepaired"),
@@ -651,8 +680,13 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id,
burst_discard_rate("burstDiscardRate"),
gap_loss_rate("gapLossRate"),
gap_discard_rate("gapDiscardRate"),
+ frame_width("frameWidth"),
+ frame_height("frameHeight"),
+ frame_bit_depth("frameBitDepth"),
+ frames_per_second("framesPerSecond"),
frames_decoded("framesDecoded"),
key_frames_decoded("keyFramesDecoded"),
+ frames_dropped("framesDropped"),
total_decode_time("totalDecodeTime"),
total_inter_frame_delay("totalInterFrameDelay"),
total_squared_inter_frame_delay("totalSquaredInterFrameDelay"),
@@ -671,6 +705,19 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(
packets_lost(other.packets_lost),
last_packet_received_timestamp(other.last_packet_received_timestamp),
jitter(other.jitter),
+ jitter_buffer_delay(other.jitter_buffer_delay),
+ jitter_buffer_emitted_count(other.jitter_buffer_emitted_count),
+ total_samples_received(other.total_samples_received),
+ concealed_samples(other.concealed_samples),
+ silent_concealed_samples(other.silent_concealed_samples),
+ concealment_events(other.concealment_events),
+ inserted_samples_for_deceleration(
+ other.inserted_samples_for_deceleration),
+ removed_samples_for_acceleration(other.removed_samples_for_acceleration),
+ audio_level(other.audio_level),
+ total_audio_energy(other.total_audio_energy),
+ total_samples_duration(other.total_samples_duration),
+ frames_received(other.frames_received),
round_trip_time(other.round_trip_time),
packets_discarded(other.packets_discarded),
packets_repaired(other.packets_repaired),
@@ -682,8 +729,13 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(
burst_discard_rate(other.burst_discard_rate),
gap_loss_rate(other.gap_loss_rate),
gap_discard_rate(other.gap_discard_rate),
+ frame_width(other.frame_width),
+ frame_height(other.frame_height),
+ frame_bit_depth(other.frame_bit_depth),
+ frames_per_second(other.frames_per_second),
frames_decoded(other.frames_decoded),
key_frames_decoded(other.key_frames_decoded),
+ frames_dropped(other.frames_dropped),
total_decode_time(other.total_decode_time),
total_inter_frame_delay(other.total_inter_frame_delay),
total_squared_inter_frame_delay(other.total_squared_inter_frame_delay),
@@ -906,7 +958,9 @@ RTCVideoSourceStats::~RTCVideoSourceStats() {}
// clang-format off
WEBRTC_RTCSTATS_IMPL(RTCTransportStats, RTCStats, "transport",
&bytes_sent,
+ &packets_sent,
&bytes_received,
+ &packets_received,
&rtcp_transport_stats_id,
&dtls_state,
&selected_candidate_pair_id,
@@ -925,7 +979,9 @@ RTCTransportStats::RTCTransportStats(const std::string& id,
RTCTransportStats::RTCTransportStats(std::string&& id, int64_t timestamp_us)
: RTCStats(std::move(id), timestamp_us),
bytes_sent("bytesSent"),
+ packets_sent("packetsSent"),
bytes_received("bytesReceived"),
+ packets_received("packetsReceived"),
rtcp_transport_stats_id("rtcpTransportStatsId"),
dtls_state("dtlsState"),
selected_candidate_pair_id("selectedCandidatePairId"),
@@ -939,7 +995,9 @@ RTCTransportStats::RTCTransportStats(std::string&& id, int64_t timestamp_us)
RTCTransportStats::RTCTransportStats(const RTCTransportStats& other)
: RTCStats(other.id(), other.timestamp_us()),
bytes_sent(other.bytes_sent),
+ packets_sent(other.packets_sent),
bytes_received(other.bytes_received),
+ packets_received(other.packets_received),
rtcp_transport_stats_id(other.rtcp_transport_stats_id),
dtls_state(other.dtls_state),
selected_candidate_pair_id(other.selected_candidate_pair_id),
diff --git a/style-guide.md b/style-guide.md
index 901217a86d..80c3302156 100644
--- a/style-guide.md
+++ b/style-guide.md
@@ -31,6 +31,10 @@ WebRTC is written in C++14, but with some restrictions:
[chromium-cpp]: https://chromium-cpp.appspot.com/
+Unlike the Chromium and Google C++ style guides, we do not allow C++20-style
+designated initializers, because we want to stay compatible with compilers that
+do not yet support them.
+
### Abseil
You may use a subset of the utilities provided by the [Abseil][abseil]
diff --git a/system_wrappers/BUILD.gn b/system_wrappers/BUILD.gn
index 1ff2ddd4fd..2d50111be8 100644
--- a/system_wrappers/BUILD.gn
+++ b/system_wrappers/BUILD.gn
@@ -35,11 +35,12 @@ rtc_library("system_wrappers") {
"../api/units:timestamp",
"../modules:module_api_public",
"../rtc_base:checks",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:rw_lock_wrapper",
"../rtc_base/system:arch",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (is_android) {
if (build_with_mozilla) {
@@ -92,8 +93,8 @@ rtc_library("field_trial") {
"../rtc_base:checks",
"../rtc_base:logging",
"../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("metrics") {
@@ -106,6 +107,7 @@ rtc_library("metrics") {
deps = [
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
+ "../rtc_base/synchronization:mutex",
]
}
diff --git a/system_wrappers/source/clock.cc b/system_wrappers/source/clock.cc
index b0356fc40c..e0f4b401e8 100644
--- a/system_wrappers/source/clock.cc
+++ b/system_wrappers/source/clock.cc
@@ -17,7 +17,6 @@
#include <mmsystem.h>
-#include "rtc_base/critical_section.h"
#elif defined(WEBRTC_POSIX)
@@ -26,6 +25,7 @@
#endif // defined(WEBRTC_POSIX)
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/rw_lock_wrapper.h"
#include "rtc_base/time_utils.h"
@@ -150,7 +150,7 @@ class WindowsRealTimeClock : public RealTimeClock {
DWORD t;
LARGE_INTEGER elapsed_ms;
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// time MUST be fetched inside the critical section to avoid non-monotonic
// last_time_ms_ values that'll register as incorrect wraparounds due to
// concurrent calls to GetTime.
@@ -200,7 +200,7 @@ class WindowsRealTimeClock : public RealTimeClock {
return ref;
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
DWORD last_time_ms_;
LONG num_timer_wraps_;
const ReferencePoint ref_point_;
diff --git a/system_wrappers/source/field_trial_unittest.cc b/system_wrappers/source/field_trial_unittest.cc
index fdabe1b7e6..ada6313e67 100644
--- a/system_wrappers/source/field_trial_unittest.cc
+++ b/system_wrappers/source/field_trial_unittest.cc
@@ -32,7 +32,7 @@ TEST(FieldTrialValidationTest, AcceptsValidInputs) {
EXPECT_TRUE(FieldTrialsStringIsValid("Audio/Enabled/B/C/Audio/Enabled/"));
}
-TEST(FieldTrialValidationTest, RejectsBadInputs) {
+TEST(FieldTrialValidationDeathTest, RejectsBadInputs) {
// Bad delimiters
RTC_EXPECT_DEATH(InitFieldTrialsFromString("Audio/EnabledVideo/Disabled/"),
"Invalid field trials string:");
@@ -90,7 +90,7 @@ TEST(FieldTrialMergingTest, MergesValidInput) {
"Audio/Enabled/Video/Enabled/");
}
-TEST(FieldTrialMergingTest, DchecksBadInput) {
+TEST(FieldTrialMergingDeathTest, DchecksBadInput) {
RTC_EXPECT_DEATH(MergeFieldTrialsStrings("Audio/Enabled/", "garbage"),
"Invalid field trials string:");
}
diff --git a/system_wrappers/source/metrics.cc b/system_wrappers/source/metrics.cc
index 2383272887..d42833643d 100644
--- a/system_wrappers/source/metrics.cc
+++ b/system_wrappers/source/metrics.cc
@@ -11,7 +11,8 @@
#include <algorithm>
-#include "rtc_base/critical_section.h"
+#include "rtc_base/constructor_magic.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
// Default implementation of histogram methods for WebRTC clients that do not
@@ -38,7 +39,7 @@ class RtcHistogram {
sample = std::min(sample, max_);
sample = std::max(sample, min_ - 1); // Underflow bucket.
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
if (info_.samples.size() == kMaxSampleMapSize &&
info_.samples.find(sample) == info_.samples.end()) {
return;
@@ -48,7 +49,7 @@ class RtcHistogram {
// Returns a copy (or nullptr if there are no samples) and clears samples.
std::unique_ptr<SampleInfo> GetAndReset() {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
if (info_.samples.empty())
return nullptr;
@@ -64,19 +65,19 @@ class RtcHistogram {
// Functions only for testing.
void Reset() {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
info_.samples.clear();
}
int NumEvents(int sample) const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
const auto it = info_.samples.find(sample);
return (it == info_.samples.end()) ? 0 : it->second;
}
int NumSamples() const {
int num_samples = 0;
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
for (const auto& sample : info_.samples) {
num_samples += sample.second;
}
@@ -84,20 +85,20 @@ class RtcHistogram {
}
int MinSample() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return (info_.samples.empty()) ? -1 : info_.samples.begin()->first;
}
std::map<int, int> Samples() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return info_.samples;
}
private:
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
const int min_;
const int max_;
- SampleInfo info_ RTC_GUARDED_BY(crit_);
+ SampleInfo info_ RTC_GUARDED_BY(mutex_);
RTC_DISALLOW_COPY_AND_ASSIGN(RtcHistogram);
};
@@ -111,7 +112,7 @@ class RtcHistogramMap {
int min,
int max,
int bucket_count) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
const auto& it = map_.find(name);
if (it != map_.end())
return reinterpret_cast<Histogram*>(it->second.get());
@@ -122,7 +123,7 @@ class RtcHistogramMap {
}
Histogram* GetEnumerationHistogram(const std::string& name, int boundary) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
const auto& it = map_.find(name);
if (it != map_.end())
return reinterpret_cast<Histogram*>(it->second.get());
@@ -134,7 +135,7 @@ class RtcHistogramMap {
void GetAndReset(
std::map<std::string, std::unique_ptr<SampleInfo>>* histograms) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
for (const auto& kv : map_) {
std::unique_ptr<SampleInfo> info = kv.second->GetAndReset();
if (info)
@@ -144,39 +145,39 @@ class RtcHistogramMap {
// Functions only for testing.
void Reset() {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
for (const auto& kv : map_)
kv.second->Reset();
}
int NumEvents(const std::string& name, int sample) const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
const auto& it = map_.find(name);
return (it == map_.end()) ? 0 : it->second->NumEvents(sample);
}
int NumSamples(const std::string& name) const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
const auto& it = map_.find(name);
return (it == map_.end()) ? 0 : it->second->NumSamples();
}
int MinSample(const std::string& name) const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
const auto& it = map_.find(name);
return (it == map_.end()) ? -1 : it->second->MinSample();
}
std::map<int, int> Samples(const std::string& name) const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
const auto& it = map_.find(name);
return (it == map_.end()) ? std::map<int, int>() : it->second->Samples();
}
private:
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
std::map<std::string, std::unique_ptr<RtcHistogram>> map_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
RTC_DISALLOW_COPY_AND_ASSIGN(RtcHistogramMap);
};
diff --git a/system_wrappers/source/metrics_unittest.cc b/system_wrappers/source/metrics_unittest.cc
index 9e5bc86ba9..7532b2ad83 100644
--- a/system_wrappers/source/metrics_unittest.cc
+++ b/system_wrappers/source/metrics_unittest.cc
@@ -114,7 +114,8 @@ TEST_F(MetricsTest, RtcHistogramsCounts_AddSample) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(MetricsTest, RtcHistogramsCounts_InvalidIndex) {
+using MetricsDeathTest = MetricsTest;
+TEST_F(MetricsDeathTest, RtcHistogramsCounts_InvalidIndex) {
EXPECT_DEATH(RTC_HISTOGRAMS_COUNTS_1000(-1, "Name", kSample), "");
EXPECT_DEATH(RTC_HISTOGRAMS_COUNTS_1000(3, "Name", kSample), "");
EXPECT_DEATH(RTC_HISTOGRAMS_COUNTS_1000(3u, "Name", kSample), "");
diff --git a/test/BUILD.gn b/test/BUILD.gn
index 34da8894f7..1cad688951 100644
--- a/test/BUILD.gn
+++ b/test/BUILD.gn
@@ -22,13 +22,13 @@ group("test") {
":test_renderer",
":test_support",
":video_test_common",
- "pc/e2e",
]
if (rtc_include_tests) {
deps += [
":test_main",
":test_support_unittests",
+ "pc/e2e",
]
}
}
@@ -67,11 +67,12 @@ rtc_library("frame_generator_impl") {
"../rtc_base:criticalsection",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_event",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/system:file_wrapper",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("frame_utils") {
@@ -125,10 +126,11 @@ rtc_library("video_test_common") {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_task_queue",
"../rtc_base:timeutils",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/task_utils:repeating_task",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
if (!build_with_chromium) {
@@ -160,10 +162,8 @@ if (!build_with_chromium) {
"platform_video_capturer.cc",
"platform_video_capturer.h",
]
- deps = [
- ":video_test_common",
- "//third_party/abseil-cpp/absl/memory",
- ]
+ deps = [ ":video_test_common" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
if (is_mac || is_ios) {
deps += [ ":video_test_mac" ]
} else {
@@ -203,6 +203,7 @@ rtc_library("rtp_test_utils") {
"../rtc_base:criticalsection",
"../rtc_base:logging",
"../rtc_base:macromagic",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/system:arch",
]
}
@@ -218,6 +219,20 @@ rtc_library("field_trial") {
deps = [ "../system_wrappers:field_trial" ]
}
+rtc_library("explicit_key_value_config") {
+ sources = [
+ "explicit_key_value_config.cc",
+ "explicit_key_value_config.h",
+ ]
+
+ deps = [
+ "../api/transport:webrtc_key_value_config",
+ "../rtc_base:checks",
+ "../system_wrappers:field_trial",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ]
+}
+
rtc_library("perf_test") {
visibility = [ "*" ]
testonly = true
@@ -235,9 +250,9 @@ rtc_library("perf_test") {
"../rtc_base:criticalsection",
"../rtc_base:logging",
"../rtc_base:rtc_numerics",
- "//third_party/abseil-cpp/absl/flags:flag",
- "//third_party/abseil-cpp/absl/types:optional",
+ "../rtc_base/synchronization:mutex",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (rtc_enable_protobuf) {
sources += [ "testsupport/perf_test_histogram_writer.cc" ]
deps += [
@@ -262,8 +277,8 @@ if (is_ios) {
deps = [
":perf_test",
"../sdk:helpers_objc",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
configs += [ ":test_support_objc_config" ]
}
@@ -359,8 +374,8 @@ rtc_library("video_test_support") {
"../rtc_base:rtc_event",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/system:file_wrapper",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (!is_ios) {
deps += [ "//third_party:jpeg" ]
@@ -375,6 +390,16 @@ rtc_library("video_test_support") {
}
if (rtc_include_tests) {
+ rtc_library("resources_dir_flag") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [
+ "testsupport/resources_dir_flag.cc",
+ "testsupport/resources_dir_flag.h",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
+ }
+
rtc_library("test_main_lib") {
visibility = [ "*" ]
testonly = true
@@ -394,9 +419,12 @@ if (rtc_include_tests) {
"../rtc_base:rtc_base_approved",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings:strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -406,13 +434,20 @@ if (rtc_include_tests) {
testonly = true
sources = [ "test_main.cc" ]
- deps = [
- ":test_main_lib",
+ deps = [ ":test_main_lib" ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/debugging:failure_signal_handler",
"//third_party/abseil-cpp/absl/debugging:symbolize",
]
}
+ rtc_library("benchmark_main") {
+ testonly = true
+ sources = [ "benchmark_main.cc" ]
+
+ deps = [ "//third_party/google_benchmark" ]
+ }
+
rtc_library("test_support_test_artifacts") {
testonly = true
sources = [
@@ -423,6 +458,8 @@ if (rtc_include_tests) {
":fileutils",
"../rtc_base:logging",
"../rtc_base/system:file_wrapper",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
]
@@ -489,6 +526,7 @@ if (rtc_include_tests) {
"../rtc_base:criticalsection",
"../rtc_base:rtc_event",
"../rtc_base:rtc_task_queue",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/system:file_wrapper",
"../rtc_base/task_utils:to_queued_task",
"pc/e2e:e2e_unittests",
@@ -586,8 +624,8 @@ rtc_library("fileutils") {
":fileutils_override_impl",
"../rtc_base:checks",
"../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (is_ios) {
deps += [ ":fileutils_ios_objc" ]
}
@@ -599,16 +637,6 @@ rtc_library("fileutils") {
}
}
-rtc_library("resources_dir_flag") {
- testonly = true
- visibility = [ "*" ]
- sources = [
- "testsupport/resources_dir_flag.cc",
- "testsupport/resources_dir_flag.h",
- ]
- deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
-}
-
# We separate header into own target to make it possible for downstream
# projects to override implementation.
rtc_source_set("fileutils_override_api") {
@@ -625,8 +653,8 @@ rtc_library("fileutils_override_impl") {
"../rtc_base:checks",
"../rtc_base:macromagic",
"../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (is_ios) {
deps += [ ":fileutils_ios_objc" ]
}
@@ -676,8 +704,8 @@ rtc_library("fileutils_unittests") {
":fileutils",
":test_support",
"../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("direct_transport") {
@@ -697,10 +725,11 @@ rtc_library("direct_transport") {
"../call:simulated_packet_receiver",
"../rtc_base:macromagic",
"../rtc_base:timeutils",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/task_utils:repeating_task",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
public_deps = # no-presubmit-check TODO(webrtc:8603)
[ "../call:fake_network" ]
}
@@ -740,10 +769,11 @@ rtc_library("fake_video_codecs") {
"../rtc_base:macromagic",
"../rtc_base:rtc_task_queue",
"../rtc_base:timeutils",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("null_transport") {
@@ -839,8 +869,8 @@ rtc_library("test_common") {
"../rtc_base/task_utils:to_queued_task",
"../system_wrappers",
"../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (!is_android && !build_with_chromium) {
deps += [ "../modules/video_capture:video_capture_internal_impl" ]
}
@@ -902,7 +932,7 @@ if (is_mac) {
":test_renderer_generic",
"../rtc_base:rtc_base_approved",
]
- libs = [
+ frameworks = [
"Cocoa.framework",
"OpenGL.framework",
"CoreVideo.framework",
@@ -986,8 +1016,8 @@ rtc_library("audio_codec_mocks") {
"../api/audio_codecs:builtin_audio_decoder_factory",
"../rtc_base:checks",
"../rtc_base:refcount",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("copy_to_file_audio_capturer") {
@@ -1001,8 +1031,8 @@ rtc_library("copy_to_file_audio_capturer") {
"../common_audio",
"../modules/audio_device:audio_device_impl",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("copy_to_file_audio_capturer_unittest") {
diff --git a/test/DEPS b/test/DEPS
index 62fd6d3ff7..170c4086d7 100644
--- a/test/DEPS
+++ b/test/DEPS
@@ -72,5 +72,8 @@ specific_include_rules = {
],
".*test_video_capturer_video_track_source.h": [
"+pc",
+ ],
+ "benchmark_main\.cc": [
+ "+benchmark",
]
}
diff --git a/test/android/AndroidManifest.xml b/test/android/AndroidManifest.xml
index c1ddfd4a02..ee2fec8716 100644
--- a/test/android/AndroidManifest.xml
+++ b/test/android/AndroidManifest.xml
@@ -14,7 +14,7 @@ be found in the AUTHORS file in the root of the source tree.
android:versionCode="1"
android:versionName="1.0">
- <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="23" />
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="23" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
<uses-permission android:name="android.permission.BLUETOOTH"/>
<uses-permission android:name="android.permission.BLUETOOTH_ADMIN"/>
diff --git a/test/benchmark_main.cc b/test/benchmark_main.cc
new file mode 100644
index 0000000000..1a79c24913
--- /dev/null
+++ b/test/benchmark_main.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "benchmark/benchmark.h"
+
+int main(int argc, char* argv[]) {
+ benchmark::Initialize(&argc, argv);
+ benchmark::RunSpecifiedBenchmarks();
+ return 0;
+}
diff --git a/test/direct_transport.cc b/test/direct_transport.cc
index 818480b18b..9c7a8f88d0 100644
--- a/test/direct_transport.cc
+++ b/test/direct_transport.cc
@@ -55,7 +55,6 @@ DirectTransport::~DirectTransport() {
}
void DirectTransport::SetReceiver(PacketReceiver* receiver) {
- rtc::CritScope cs(&process_lock_);
fake_network_->SetReceiver(receiver);
}
@@ -84,7 +83,7 @@ void DirectTransport::SendPacket(const uint8_t* data, size_t length) {
int64_t send_time_us = rtc::TimeMicros();
fake_network_->DeliverPacket(media_type, rtc::CopyOnWriteBuffer(data, length),
send_time_us);
- rtc::CritScope cs(&process_lock_);
+ MutexLock lock(&process_lock_);
if (!next_process_task_.Running())
ProcessPackets();
}
@@ -113,7 +112,7 @@ void DirectTransport::ProcessPackets() {
if (auto delay_ms = fake_network_->TimeUntilNextProcess())
return TimeDelta::Millis(*delay_ms);
// Otherwise stop the task.
- rtc::CritScope cs(&process_lock_);
+ MutexLock lock(&process_lock_);
next_process_task_.Stop();
// Since this task is stopped, return value doesn't matter.
return TimeDelta::Zero();
diff --git a/test/direct_transport.h b/test/direct_transport.h
index e0b2251eea..2fc3b7f76b 100644
--- a/test/direct_transport.h
+++ b/test/direct_transport.h
@@ -17,6 +17,7 @@
#include "api/test/simulated_network.h"
#include "call/call.h"
#include "call/simulated_packet_receiver.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/thread_annotations.h"
@@ -66,7 +67,7 @@ class DirectTransport : public Transport {
TaskQueueBase* const task_queue_;
- rtc::CriticalSection process_lock_;
+ Mutex process_lock_;
RepeatingTaskHandle next_process_task_ RTC_GUARDED_BY(&process_lock_);
const Demuxer demuxer_;
diff --git a/test/explicit_key_value_config.cc b/test/explicit_key_value_config.cc
new file mode 100644
index 0000000000..69f725a9e2
--- /dev/null
+++ b/test/explicit_key_value_config.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/explicit_key_value_config.h"
+
+#include "api/transport/webrtc_key_value_config.h"
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+namespace test {
+
+ExplicitKeyValueConfig::ExplicitKeyValueConfig(const std::string& s) {
+ std::string::size_type field_start = 0;
+ while (field_start < s.size()) {
+ std::string::size_type separator_pos = s.find('/', field_start);
+ RTC_CHECK_NE(separator_pos, std::string::npos)
+ << "Missing separator '/' after field trial key.";
+ RTC_CHECK_GT(separator_pos, field_start)
+ << "Field trial key cannot be empty.";
+ std::string key = s.substr(field_start, separator_pos - field_start);
+ field_start = separator_pos + 1;
+
+ RTC_CHECK_LT(field_start, s.size())
+ << "Missing value after field trial key. String ended.";
+ separator_pos = s.find('/', field_start);
+ RTC_CHECK_NE(separator_pos, std::string::npos)
+ << "Missing terminating '/' in field trial string.";
+ RTC_CHECK_GT(separator_pos, field_start)
+ << "Field trial value cannot be empty.";
+ std::string value = s.substr(field_start, separator_pos - field_start);
+ field_start = separator_pos + 1;
+
+ key_value_map_[key] = value;
+ }
+ // This check is technically redundant due to earlier checks.
+ // We nevertheless keep the check to make it clear that the entire
+ // string has been processed, and without indexing past the end.
+ RTC_CHECK_EQ(field_start, s.size());
+}
+
+std::string ExplicitKeyValueConfig::Lookup(absl::string_view key) const {
+ auto it = key_value_map_.find(std::string(key));
+ if (it != key_value_map_.end())
+ return it->second;
+ return "";
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/test/explicit_key_value_config.h b/test/explicit_key_value_config.h
new file mode 100644
index 0000000000..9a3bc84f60
--- /dev/null
+++ b/test/explicit_key_value_config.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_EXPLICIT_KEY_VALUE_CONFIG_H_
+#define TEST_EXPLICIT_KEY_VALUE_CONFIG_H_
+
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/transport/webrtc_key_value_config.h"
+
+namespace webrtc {
+namespace test {
+
+class ExplicitKeyValueConfig : public WebRtcKeyValueConfig {
+ public:
+ explicit ExplicitKeyValueConfig(const std::string& s);
+ std::string Lookup(absl::string_view key) const override;
+
+ private:
+ std::map<std::string, std::string> key_value_map_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_EXPLICIT_KEY_VALUE_CONFIG_H_
diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc
index 64b4a4e9ff..219dafcf16 100644
--- a/test/fake_encoder.cc
+++ b/test/fake_encoder.cc
@@ -67,19 +67,19 @@ void FakeEncoder::SetFecControllerOverride(
void FakeEncoder::SetMaxBitrate(int max_kbps) {
RTC_DCHECK_GE(max_kbps, -1); // max_kbps == -1 disables it.
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
max_target_bitrate_kbps_ = max_kbps;
- SetRates(current_rate_settings_);
+ SetRatesLocked(current_rate_settings_);
}
void FakeEncoder::SetQp(int qp) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
qp_ = qp;
}
int32_t FakeEncoder::InitEncode(const VideoCodec* config,
const Settings& settings) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
config_ = *config;
current_rate_settings_.bitrate.SetBitrate(0, 0, config_.startBitrate * 1000);
current_rate_settings_.framerate_fps = config_.maxFramerate;
@@ -100,7 +100,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
uint32_t counter;
absl::optional<int> qp;
{
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
max_framerate = config_.maxFramerate;
num_simulcast_streams = config_.numberOfSimulcastStreams;
for (int i = 0; i < num_simulcast_streams; ++i) {
@@ -182,7 +182,7 @@ FakeEncoder::FrameInfo FakeEncoder::NextFrame(
}
}
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
for (uint8_t i = 0; i < num_simulcast_streams; ++i) {
if (target_bitrate.GetBitrate(i, 0) > 0) {
int temporal_id = last_frame_info_.layers.size() > i
@@ -232,7 +232,7 @@ FakeEncoder::FrameInfo FakeEncoder::NextFrame(
int32_t FakeEncoder::RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
callback_ = callback;
return 0;
}
@@ -242,7 +242,11 @@ int32_t FakeEncoder::Release() {
}
void FakeEncoder::SetRates(const RateControlParameters& parameters) {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
+ SetRatesLocked(parameters);
+}
+
+void FakeEncoder::SetRatesLocked(const RateControlParameters& parameters) {
current_rate_settings_ = parameters;
int allocated_bitrate_kbps = parameters.bitrate.get_sum_kbps();
@@ -276,7 +280,7 @@ VideoEncoder::EncoderInfo FakeEncoder::GetEncoderInfo() const {
}
int FakeEncoder::GetConfiguredInputFramerate() const {
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
return static_cast<int>(current_rate_settings_.framerate_fps + 0.5);
}
@@ -286,55 +290,56 @@ FakeH264Encoder::FakeH264Encoder(Clock* clock)
std::unique_ptr<RTPFragmentationHeader> FakeH264Encoder::EncodeHook(
EncodedImage* encoded_image,
CodecSpecificInfo* codec_specific) {
+ static constexpr std::array<uint8_t, 3> kStartCode = {0, 0, 1};
const size_t kSpsSize = 8;
const size_t kPpsSize = 11;
const int kIdrFrequency = 10;
int current_idr_counter;
{
- rtc::CritScope cs(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
current_idr_counter = idr_counter_;
++idr_counter_;
}
+ for (size_t i = 0; i < encoded_image->size(); ++i) {
+ encoded_image->data()[i] = static_cast<uint8_t>(i);
+ }
+
auto fragmentation = std::make_unique<RTPFragmentationHeader>();
if (current_idr_counter % kIdrFrequency == 0 &&
- encoded_image->size() > kSpsSize + kPpsSize + 1) {
+ encoded_image->size() > kSpsSize + kPpsSize + 1 + 3 * kStartCode.size()) {
const size_t kNumSlices = 3;
fragmentation->VerifyAndAllocateFragmentationHeader(kNumSlices);
- fragmentation->fragmentationOffset[0] = 0;
+ fragmentation->fragmentationOffset[0] = kStartCode.size();
fragmentation->fragmentationLength[0] = kSpsSize;
- fragmentation->fragmentationOffset[1] = kSpsSize;
+ fragmentation->fragmentationOffset[1] = 2 * kStartCode.size() + kSpsSize;
fragmentation->fragmentationLength[1] = kPpsSize;
- fragmentation->fragmentationOffset[2] = kSpsSize + kPpsSize;
+ fragmentation->fragmentationOffset[2] =
+ 3 * kStartCode.size() + kSpsSize + kPpsSize;
fragmentation->fragmentationLength[2] =
- encoded_image->size() - (kSpsSize + kPpsSize);
+ encoded_image->size() - (3 * kStartCode.size() + kSpsSize + kPpsSize);
const size_t kSpsNalHeader = 0x67;
const size_t kPpsNalHeader = 0x68;
const size_t kIdrNalHeader = 0x65;
- encoded_image->data()[fragmentation->fragmentationOffset[0]] =
- kSpsNalHeader;
- encoded_image->data()[fragmentation->fragmentationOffset[1]] =
- kPpsNalHeader;
- encoded_image->data()[fragmentation->fragmentationOffset[2]] =
- kIdrNalHeader;
+ memcpy(encoded_image->data(), kStartCode.data(), kStartCode.size());
+ encoded_image->data()[fragmentation->Offset(0)] = kSpsNalHeader;
+ memcpy(encoded_image->data() + fragmentation->Offset(1) - kStartCode.size(),
+ kStartCode.data(), kStartCode.size());
+ encoded_image->data()[fragmentation->Offset(1)] = kPpsNalHeader;
+ memcpy(encoded_image->data() + fragmentation->Offset(2) - kStartCode.size(),
+ kStartCode.data(), kStartCode.size());
+ encoded_image->data()[fragmentation->Offset(2)] = kIdrNalHeader;
} else {
const size_t kNumSlices = 1;
fragmentation->VerifyAndAllocateFragmentationHeader(kNumSlices);
- fragmentation->fragmentationOffset[0] = 0;
- fragmentation->fragmentationLength[0] = encoded_image->size();
+ fragmentation->fragmentationOffset[0] = kStartCode.size();
+ fragmentation->fragmentationLength[0] =
+ encoded_image->size() - kStartCode.size();
+ memcpy(encoded_image->data(), kStartCode.data(), kStartCode.size());
const size_t kNalHeader = 0x41;
encoded_image->data()[fragmentation->fragmentationOffset[0]] = kNalHeader;
}
- uint8_t value = 0;
- int fragment_counter = 0;
- for (size_t i = 0; i < encoded_image->size(); ++i) {
- if (fragment_counter == fragmentation->fragmentationVectorSize ||
- i != fragmentation->fragmentationOffset[fragment_counter]) {
- encoded_image->data()[i] = value++;
- } else {
- ++fragment_counter;
- }
- }
+
codec_specific->codecType = kVideoCodecH264;
codec_specific->codecSpecific.H264.packetization_mode =
H264PacketizationMode::NonInterleaved;
diff --git a/test/fake_encoder.h b/test/fake_encoder.h
index 39838d16f1..22c772311c 100644
--- a/test/fake_encoder.h
+++ b/test/fake_encoder.h
@@ -26,7 +26,7 @@
#include "api/video_codecs/video_encoder.h"
#include "modules/include/module_common_types.h"
#include "modules/video_coding/include/video_codec_interface.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
@@ -40,21 +40,23 @@ class FakeEncoder : public VideoEncoder {
virtual ~FakeEncoder() = default;
// Sets max bitrate. Not thread-safe, call before registering the encoder.
- void SetMaxBitrate(int max_kbps);
- void SetQp(int qp);
+ void SetMaxBitrate(int max_kbps) RTC_LOCKS_EXCLUDED(mutex_);
+ void SetQp(int qp) RTC_LOCKS_EXCLUDED(mutex_);
void SetFecControllerOverride(
FecControllerOverride* fec_controller_override) override;
- int32_t InitEncode(const VideoCodec* config,
- const Settings& settings) override;
+ int32_t InitEncode(const VideoCodec* config, const Settings& settings)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
int32_t Encode(const VideoFrame& input_image,
- const std::vector<VideoFrameType>* frame_types) override;
- int32_t RegisterEncodeCompleteCallback(
- EncodedImageCallback* callback) override;
+ const std::vector<VideoFrameType>* frame_types)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t RegisterEncodeCompleteCallback(EncodedImageCallback* callback)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
int32_t Release() override;
- void SetRates(const RateControlParameters& parameters) override;
- int GetConfiguredInputFramerate() const;
+ void SetRates(const RateControlParameters& parameters)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int GetConfiguredInputFramerate() const RTC_LOCKS_EXCLUDED(mutex_);
EncoderInfo GetEncoderInfo() const override;
static const char* kImplementationName;
@@ -79,7 +81,7 @@ class FakeEncoder : public VideoEncoder {
uint8_t num_simulcast_streams,
const VideoBitrateAllocation& target_bitrate,
SimulcastStream simulcast_streams[kMaxSimulcastStreams],
- int framerate);
+ int framerate) RTC_LOCKS_EXCLUDED(mutex_);
// Called before the frame is passed to callback_->OnEncodedImage, to let
// subclasses fill out codec_specific, possibly modify encodedImage.
@@ -88,18 +90,21 @@ class FakeEncoder : public VideoEncoder {
EncodedImage* encoded_image,
CodecSpecificInfo* codec_specific);
- FrameInfo last_frame_info_ RTC_GUARDED_BY(crit_sect_);
+ void SetRatesLocked(const RateControlParameters& parameters)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ FrameInfo last_frame_info_ RTC_GUARDED_BY(mutex_);
Clock* const clock_;
- VideoCodec config_ RTC_GUARDED_BY(crit_sect_);
- EncodedImageCallback* callback_ RTC_GUARDED_BY(crit_sect_);
- RateControlParameters current_rate_settings_ RTC_GUARDED_BY(crit_sect_);
- int max_target_bitrate_kbps_ RTC_GUARDED_BY(crit_sect_);
- bool pending_keyframe_ RTC_GUARDED_BY(crit_sect_);
- uint32_t counter_ RTC_GUARDED_BY(crit_sect_);
- rtc::CriticalSection crit_sect_;
+ VideoCodec config_ RTC_GUARDED_BY(mutex_);
+ EncodedImageCallback* callback_ RTC_GUARDED_BY(mutex_);
+ RateControlParameters current_rate_settings_ RTC_GUARDED_BY(mutex_);
+ int max_target_bitrate_kbps_ RTC_GUARDED_BY(mutex_);
+ bool pending_keyframe_ RTC_GUARDED_BY(mutex_);
+ uint32_t counter_ RTC_GUARDED_BY(mutex_);
+ mutable Mutex mutex_;
bool used_layers_[kMaxSimulcastStreams];
- absl::optional<int> qp_ RTC_GUARDED_BY(crit_sect_);
+ absl::optional<int> qp_ RTC_GUARDED_BY(mutex_);
// Current byte debt to be payed over a number of frames.
// The debt is acquired by keyframes overshooting the bitrate target.
@@ -116,8 +121,8 @@ class FakeH264Encoder : public FakeEncoder {
EncodedImage* encoded_image,
CodecSpecificInfo* codec_specific) override;
- int idr_counter_ RTC_GUARDED_BY(local_crit_sect_);
- rtc::CriticalSection local_crit_sect_;
+ int idr_counter_ RTC_GUARDED_BY(local_mutex_);
+ Mutex local_mutex_;
};
class DelayedEncoder : public test::FakeEncoder {
diff --git a/test/frame_forwarder.cc b/test/frame_forwarder.cc
index d1a2ddb1c2..e89f753bd3 100644
--- a/test/frame_forwarder.cc
+++ b/test/frame_forwarder.cc
@@ -18,32 +18,42 @@ FrameForwarder::FrameForwarder() : sink_(nullptr) {}
FrameForwarder::~FrameForwarder() {}
void FrameForwarder::IncomingCapturedFrame(const VideoFrame& video_frame) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (sink_)
sink_->OnFrame(video_frame);
}
void FrameForwarder::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
+ AddOrUpdateSinkLocked(sink, wants);
+}
+
+void FrameForwarder::AddOrUpdateSinkLocked(
+ rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
RTC_DCHECK(!sink_ || sink_ == sink);
sink_ = sink;
sink_wants_ = wants;
}
void FrameForwarder::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RTC_DCHECK_EQ(sink, sink_);
sink_ = nullptr;
}
rtc::VideoSinkWants FrameForwarder::sink_wants() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
+ return sink_wants_;
+}
+
+rtc::VideoSinkWants FrameForwarder::sink_wants_locked() const {
return sink_wants_;
}
bool FrameForwarder::has_sinks() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return sink_ != nullptr;
}
diff --git a/test/frame_forwarder.h b/test/frame_forwarder.h
index cf29f5f074..bbf11cc939 100644
--- a/test/frame_forwarder.h
+++ b/test/frame_forwarder.h
@@ -12,7 +12,7 @@
#include "api/video/video_frame.h"
#include "api/video/video_source_interface.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
namespace test {
@@ -26,18 +26,26 @@ class FrameForwarder : public rtc::VideoSourceInterface<VideoFrame> {
FrameForwarder();
~FrameForwarder() override;
// Forwards |video_frame| to the registered |sink_|.
- virtual void IncomingCapturedFrame(const VideoFrame& video_frame);
- rtc::VideoSinkWants sink_wants() const;
- bool has_sinks() const;
+ virtual void IncomingCapturedFrame(const VideoFrame& video_frame)
+ RTC_LOCKS_EXCLUDED(mutex_);
+ rtc::VideoSinkWants sink_wants() const RTC_LOCKS_EXCLUDED(mutex_);
+ bool has_sinks() const RTC_LOCKS_EXCLUDED(mutex_);
protected:
+ rtc::VideoSinkWants sink_wants_locked() const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
- const rtc::VideoSinkWants& wants) override;
- void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
+ const rtc::VideoSinkWants& wants)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ void AddOrUpdateSinkLocked(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
- rtc::CriticalSection crit_;
- rtc::VideoSinkInterface<VideoFrame>* sink_ RTC_GUARDED_BY(crit_);
- rtc::VideoSinkWants sink_wants_ RTC_GUARDED_BY(crit_);
+ mutable Mutex mutex_;
+ rtc::VideoSinkInterface<VideoFrame>* sink_ RTC_GUARDED_BY(mutex_);
+ rtc::VideoSinkWants sink_wants_ RTC_GUARDED_BY(mutex_);
};
} // namespace test
diff --git a/test/frame_generator.cc b/test/frame_generator.cc
index 1f998427ac..caaa5e6321 100644
--- a/test/frame_generator.cc
+++ b/test/frame_generator.cc
@@ -46,7 +46,7 @@ SquareGenerator::SquareGenerator(int width,
}
void SquareGenerator::ChangeResolution(size_t width, size_t height) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
width_ = static_cast<int>(width);
height_ = static_cast<int>(height);
RTC_CHECK(width_ > 0);
@@ -65,7 +65,7 @@ rtc::scoped_refptr<I420Buffer> SquareGenerator::CreateI420Buffer(int width,
}
FrameGeneratorInterface::VideoFrameData SquareGenerator::NextFrame() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
rtc::scoped_refptr<VideoFrameBuffer> buffer = nullptr;
switch (type_) {
diff --git a/test/frame_generator.h b/test/frame_generator.h
index 6f59c1ed0b..94e15cb0de 100644
--- a/test/frame_generator.h
+++ b/test/frame_generator.h
@@ -20,8 +20,8 @@
#include "api/video/video_frame.h"
#include "api/video/video_frame_buffer.h"
#include "api/video/video_source_interface.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/random.h"
+#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -57,11 +57,11 @@ class SquareGenerator : public FrameGeneratorInterface {
const uint8_t yuv_a_;
};
- rtc::CriticalSection crit_;
+ Mutex mutex_;
const OutputType type_;
- int width_ RTC_GUARDED_BY(&crit_);
- int height_ RTC_GUARDED_BY(&crit_);
- std::vector<std::unique_ptr<Square>> squares_ RTC_GUARDED_BY(&crit_);
+ int width_ RTC_GUARDED_BY(&mutex_);
+ int height_ RTC_GUARDED_BY(&mutex_);
+ std::vector<std::unique_ptr<Square>> squares_ RTC_GUARDED_BY(&mutex_);
};
class YuvFileGenerator : public FrameGeneratorInterface {
diff --git a/test/frame_generator_capturer.cc b/test/frame_generator_capturer.cc
index 9806c83d83..266cff8734 100644
--- a/test/frame_generator_capturer.cc
+++ b/test/frame_generator_capturer.cc
@@ -20,7 +20,6 @@
#include "absl/strings/match.h"
#include "api/test/create_frame_generator.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/time_utils.h"
@@ -150,13 +149,13 @@ std::unique_ptr<FrameGeneratorCapturer> FrameGeneratorCapturer::Create(
}
void FrameGeneratorCapturer::SetFakeRotation(VideoRotation rotation) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
fake_rotation_ = rotation;
}
void FrameGeneratorCapturer::SetFakeColorSpace(
absl::optional<ColorSpace> color_space) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
fake_color_space_ = color_space;
}
@@ -176,7 +175,7 @@ bool FrameGeneratorCapturer::Init() {
}
void FrameGeneratorCapturer::InsertFrame() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (sending_) {
FrameGeneratorInterface::VideoFrameData frame_data =
frame_generator_->NextFrame();
@@ -205,7 +204,7 @@ void FrameGeneratorCapturer::InsertFrame() {
void FrameGeneratorCapturer::Start() {
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
sending_ = true;
}
if (!frame_task_.Running()) {
@@ -217,17 +216,17 @@ void FrameGeneratorCapturer::Start() {
}
void FrameGeneratorCapturer::Stop() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
sending_ = false;
}
void FrameGeneratorCapturer::ChangeResolution(size_t width, size_t height) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
frame_generator_->ChangeResolution(width, height);
}
void FrameGeneratorCapturer::ChangeFramerate(int target_framerate) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
RTC_CHECK(target_capture_fps_ > 0);
if (target_framerate > source_fps_)
RTC_LOG(LS_WARNING) << "Target framerate clamped from " << target_framerate
@@ -245,7 +244,7 @@ void FrameGeneratorCapturer::ChangeFramerate(int target_framerate) {
}
void FrameGeneratorCapturer::SetSinkWantsObserver(SinkWantsObserver* observer) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
RTC_DCHECK(!sink_wants_observer_);
sink_wants_observer_ = observer;
}
@@ -254,7 +253,7 @@ void FrameGeneratorCapturer::AddOrUpdateSink(
rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
TestVideoCapturer::AddOrUpdateSink(sink, wants);
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (sink_wants_observer_) {
// Tests need to observe unmodified sink wants.
sink_wants_observer_->OnSinkWantsChanged(sink, wants);
@@ -266,7 +265,7 @@ void FrameGeneratorCapturer::RemoveSink(
rtc::VideoSinkInterface<VideoFrame>* sink) {
TestVideoCapturer::RemoveSink(sink);
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
UpdateFps(GetSinkWants().max_framerate_fps);
}
@@ -284,7 +283,7 @@ void FrameGeneratorCapturer::ForceFrame() {
}
int FrameGeneratorCapturer::GetCurrentConfiguredFramerate() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (wanted_fps_ && *wanted_fps_ < target_capture_fps_)
return *wanted_fps_;
return target_capture_fps_;
diff --git a/test/frame_generator_capturer.h b/test/frame_generator_capturer.h
index fd376e2d6f..1e915fca21 100644
--- a/test/frame_generator_capturer.h
+++ b/test/frame_generator_capturer.h
@@ -16,7 +16,7 @@
#include "api/task_queue/task_queue_factory.h"
#include "api/test/frame_generator_interface.h"
#include "api/video/video_frame.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "system_wrappers/include/clock.h"
@@ -157,7 +157,7 @@ class FrameGeneratorCapturer : public TestVideoCapturer {
bool sending_;
SinkWantsObserver* sink_wants_observer_ RTC_GUARDED_BY(&lock_);
- rtc::CriticalSection lock_;
+ Mutex lock_;
std::unique_ptr<FrameGeneratorInterface> frame_generator_;
int source_fps_ RTC_GUARDED_BY(&lock_);
diff --git a/test/frame_generator_capturer_unittest.cc b/test/frame_generator_capturer_unittest.cc
index 7400bbb79b..a76cb95d44 100644
--- a/test/frame_generator_capturer_unittest.cc
+++ b/test/frame_generator_capturer_unittest.cc
@@ -22,8 +22,8 @@ using ::testing::Property;
class MockVideoSinkInterfaceVideoFrame
: public rtc::VideoSinkInterface<VideoFrame> {
public:
- MOCK_METHOD1(OnFrame, void(const VideoFrame& frame));
- MOCK_METHOD0(OnDiscardedFrame, void());
+ MOCK_METHOD(void, OnFrame, (const VideoFrame& frame), (override));
+ MOCK_METHOD(void, OnDiscardedFrame, (), (override));
};
} // namespace
TEST(FrameGeneratorCapturerTest, CreateFromConfig) {
diff --git a/test/fuzzers/BUILD.gn b/test/fuzzers/BUILD.gn
index a7aa058ecb..4975f42a98 100644
--- a/test/fuzzers/BUILD.gn
+++ b/test/fuzzers/BUILD.gn
@@ -40,6 +40,10 @@ rtc_library("fuzz_data_helper") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
}
+set_defaults("webrtc_fuzzer_test") {
+ absl_deps = []
+}
+
template("webrtc_fuzzer_test") {
fuzzer_test(target_name) {
forward_variables_from(invoker, "*")
@@ -47,6 +51,21 @@ template("webrtc_fuzzer_test") {
":fuzz_data_helper",
":webrtc_fuzzer_main",
]
+
+ # If absl_deps is [], no action is needed. If not [], then it needs to be
+ # converted to //third_party/abseil-cpp:absl when build_with_chromium=true
+ # otherwise it just needs to be added to deps.
+ if (absl_deps != []) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (build_with_chromium) {
+ deps += [ "//third_party/abseil-cpp:absl" ]
+ } else {
+ deps += absl_deps
+ }
+ }
+
if (!build_with_chromium && is_clang) {
suppressed_configs = [ "//build/config/clang:find_bad_constructs" ]
}
@@ -194,10 +213,8 @@ webrtc_fuzzer_test("rtcp_receiver_fuzzer") {
webrtc_fuzzer_test("rtp_packet_fuzzer") {
sources = [ "rtp_packet_fuzzer.cc" ]
- deps = [
- "../../modules/rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "../../modules/rtp_rtcp:rtp_rtcp_format" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
seed_corpus = "corpora/rtp-corpus"
}
@@ -240,8 +257,8 @@ rtc_library("audio_decoder_fuzzer") {
"../../modules/rtp_rtcp:rtp_rtcp_format",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
webrtc_fuzzer_test("audio_decoder_ilbc_fuzzer") {
@@ -318,7 +335,7 @@ webrtc_fuzzer_test("audio_encoder_opus_fuzzer") {
}
webrtc_fuzzer_test("audio_encoder_isac_fixed_fuzzer") {
- sources = [ "audio_encoder_isax_fixed_fuzzer.cc" ]
+ sources = [ "audio_encoder_isac_fixed_fuzzer.cc" ]
deps = [
":audio_encoder_fuzzer",
"../../api/audio_codecs/isac:audio_encoder_isac_fix",
@@ -327,7 +344,7 @@ webrtc_fuzzer_test("audio_encoder_isac_fixed_fuzzer") {
}
webrtc_fuzzer_test("audio_encoder_isac_float_fuzzer") {
- sources = [ "audio_encoder_isax_float_fuzzer.cc" ]
+ sources = [ "audio_encoder_isac_float_fuzzer.cc" ]
deps = [
":audio_encoder_fuzzer",
"../../api/audio_codecs/isac:audio_encoder_isac_float",
@@ -373,7 +390,7 @@ webrtc_fuzzer_test("neteq_signal_fuzzer") {
webrtc_fuzzer_test("residual_echo_detector_fuzzer") {
sources = [ "residual_echo_detector_fuzzer.cc" ]
deps = [
- "../../modules/audio_processing",
+ "../../api/audio:echo_detector_creator",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
]
@@ -439,8 +456,8 @@ rtc_library("audio_processing_fuzzer_helper") {
"../../modules/audio_processing:audio_frame_proxies",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
webrtc_fuzzer_test("audio_processing_fuzzer") {
@@ -461,8 +478,8 @@ webrtc_fuzzer_test("audio_processing_fuzzer") {
"../../rtc_base:rtc_task_queue",
"../../rtc_base:safe_minmax",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
seed_corpus = "corpora/audio_processing-corpus"
}
@@ -567,15 +584,6 @@ webrtc_fuzzer_test("string_to_number_fuzzer") {
seed_corpus = "corpora/string_to_number-corpus"
}
-webrtc_fuzzer_test("rtp_rtcp_demuxer_helper_fuzzer") {
- sources = [ "rtp_rtcp_demuxer_helper_fuzzer.cc" ]
- deps = [
- "../../api:array_view",
- "../../call:rtp_receiver",
- ]
- seed_corpus = "corpora/rtcp-corpus"
-}
-
webrtc_fuzzer_test("sctp_utils_fuzzer") {
sources = [ "sctp_utils_fuzzer.cc" ]
deps = [
diff --git a/test/fuzzers/audio_encoder_isax_fixed_fuzzer.cc b/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc
index 5357dc1b3e..5357dc1b3e 100644
--- a/test/fuzzers/audio_encoder_isax_fixed_fuzzer.cc
+++ b/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc
diff --git a/test/fuzzers/audio_encoder_isax_float_fuzzer.cc b/test/fuzzers/audio_encoder_isac_float_fuzzer.cc
index f9e2e0206d..f9e2e0206d 100644
--- a/test/fuzzers/audio_encoder_isax_float_fuzzer.cc
+++ b/test/fuzzers/audio_encoder_isac_float_fuzzer.cc
diff --git a/test/fuzzers/residual_echo_detector_fuzzer.cc b/test/fuzzers/residual_echo_detector_fuzzer.cc
index 99ea06a08e..da4b6ededf 100644
--- a/test/fuzzers/residual_echo_detector_fuzzer.cc
+++ b/test/fuzzers/residual_echo_detector_fuzzer.cc
@@ -15,7 +15,7 @@
#include <bitset>
#include <vector>
-#include "modules/audio_processing/residual_echo_detector.h"
+#include "api/audio/echo_detector_creator.h"
#include "rtc_base/checks.h"
#include "rtc_base/ref_counted_object.h"
@@ -43,8 +43,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
read_idx += 2;
std::bitset<16> call_order(call_order_int);
- rtc::scoped_refptr<ResidualEchoDetector> echo_detector =
- new rtc::RefCountedObject<ResidualEchoDetector>();
+ rtc::scoped_refptr<EchoDetector> echo_detector = CreateEchoDetector();
std::vector<float> input(1);
// Call AnalyzeCaptureAudio once to prevent the flushing of the buffer.
echo_detector->AnalyzeCaptureAudio(input);
diff --git a/test/fuzzers/rtcp_receiver_fuzzer.cc b/test/fuzzers/rtcp_receiver_fuzzer.cc
index 38213c3a6e..8bad9e456a 100644
--- a/test/fuzzers/rtcp_receiver_fuzzer.cc
+++ b/test/fuzzers/rtcp_receiver_fuzzer.cc
@@ -7,9 +7,9 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
#include "modules/rtp_rtcp/source/rtcp_receiver.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/checks.h"
#include "system_wrappers/include/clock.h"
@@ -40,7 +40,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
NullModuleRtpRtcp rtp_rtcp_module;
SimulatedClock clock(1234);
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &clock;
config.rtcp_report_interval_ms = kRtcpIntervalMs;
config.local_media_ssrc = 1;
diff --git a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc
index a9f33feed6..8b19a088de 100644
--- a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc
+++ b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc
@@ -132,7 +132,6 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
break;
}
- reader.CopyTo(&video_header.frame_marking);
video_header.generic = GenerateGenericFrameDependencies(&reader);
// clang-format off
diff --git a/test/fuzzers/rtp_packet_fuzzer.cc b/test/fuzzers/rtp_packet_fuzzer.cc
index 6a4f5e7893..a22c643a44 100644
--- a/test/fuzzers/rtp_packet_fuzzer.cc
+++ b/test/fuzzers/rtp_packet_fuzzer.cc
@@ -112,10 +112,6 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
VideoSendTiming timing;
packet.GetExtension<VideoTimingExtension>(&timing);
break;
- case kRtpExtensionFrameMarking:
- FrameMarking frame_marking;
- packet.GetExtension<FrameMarkingExtension>(&frame_marking);
- break;
case kRtpExtensionRtpStreamId: {
std::string rsid;
packet.GetExtension<RtpStreamId>(&rsid);
diff --git a/test/logging/BUILD.gn b/test/logging/BUILD.gn
index db2a5447ac..1af2ecfdac 100644
--- a/test/logging/BUILD.gn
+++ b/test/logging/BUILD.gn
@@ -27,6 +27,6 @@ rtc_library("log_writer") {
"../../rtc_base:rtc_base_tests_utils",
"../../rtc_base:stringutils",
"../../test:fileutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/test/mock_audio_decoder.h b/test/mock_audio_decoder.h
index 7c6db5acc2..8f44bf891d 100644
--- a/test/mock_audio_decoder.h
+++ b/test/mock_audio_decoder.h
@@ -20,16 +20,18 @@ class MockAudioDecoder : public AudioDecoder {
public:
MockAudioDecoder();
~MockAudioDecoder();
- MOCK_METHOD0(Die, void());
- MOCK_METHOD5(DecodeInternal,
- int(const uint8_t*, size_t, int, int16_t*, SpeechType*));
- MOCK_CONST_METHOD0(HasDecodePlc, bool());
- MOCK_METHOD2(DecodePlc, size_t(size_t, int16_t*));
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD0(ErrorCode, int());
- MOCK_CONST_METHOD2(PacketDuration, int(const uint8_t*, size_t));
- MOCK_CONST_METHOD0(Channels, size_t());
- MOCK_CONST_METHOD0(SampleRateHz, int());
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(int,
+ DecodeInternal,
+ (const uint8_t*, size_t, int, int16_t*, SpeechType*),
+ (override));
+ MOCK_METHOD(bool, HasDecodePlc, (), (const, override));
+ MOCK_METHOD(size_t, DecodePlc, (size_t, int16_t*), (override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int, ErrorCode, (), (override));
+ MOCK_METHOD(int, PacketDuration, (const uint8_t*, size_t), (const, override));
+ MOCK_METHOD(size_t, Channels, (), (const, override));
+ MOCK_METHOD(int, SampleRateHz, (), (const, override));
};
} // namespace webrtc
diff --git a/test/mock_audio_decoder_factory.h b/test/mock_audio_decoder_factory.h
index cdf2919543..cdb03d3f38 100644
--- a/test/mock_audio_decoder_factory.h
+++ b/test/mock_audio_decoder_factory.h
@@ -24,19 +24,23 @@ namespace webrtc {
class MockAudioDecoderFactory : public AudioDecoderFactory {
public:
- MOCK_METHOD0(GetSupportedDecoders, std::vector<AudioCodecSpec>());
- MOCK_METHOD1(IsSupportedDecoder, bool(const SdpAudioFormat&));
+ MOCK_METHOD(std::vector<AudioCodecSpec>,
+ GetSupportedDecoders,
+ (),
+ (override));
+ MOCK_METHOD(bool, IsSupportedDecoder, (const SdpAudioFormat&), (override));
std::unique_ptr<AudioDecoder> MakeAudioDecoder(
const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id) {
+ absl::optional<AudioCodecPairId> codec_pair_id) override {
std::unique_ptr<AudioDecoder> return_value;
MakeAudioDecoderMock(format, codec_pair_id, &return_value);
return return_value;
}
- MOCK_METHOD3(MakeAudioDecoderMock,
- void(const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id,
- std::unique_ptr<AudioDecoder>* return_value));
+ MOCK_METHOD(void,
+ MakeAudioDecoderMock,
+ (const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id,
+ std::unique_ptr<AudioDecoder>*));
// Creates a MockAudioDecoderFactory with no formats and that may not be
// invoked to create a codec - useful for initializing a voice engine, for
diff --git a/test/mock_audio_encoder.h b/test/mock_audio_encoder.h
index 2dfd15ca98..eeb63f1062 100644
--- a/test/mock_audio_encoder.h
+++ b/test/mock_audio_encoder.h
@@ -21,43 +21,46 @@ namespace webrtc {
class MockAudioEncoder : public AudioEncoder {
public:
- // TODO(nisse): Valid overrides commented out, because the gmock
- // methods don't use any override declarations, and we want to avoid
- // warnings from -Winconsistent-missing-override. See
- // http://crbug.com/428099.
MockAudioEncoder();
~MockAudioEncoder();
- MOCK_METHOD1(Mark, void(std::string desc));
- MOCK_CONST_METHOD0(SampleRateHz, int());
- MOCK_CONST_METHOD0(NumChannels, size_t());
- MOCK_CONST_METHOD0(RtpTimestampRateHz, int());
- MOCK_CONST_METHOD0(Num10MsFramesInNextPacket, size_t());
- MOCK_CONST_METHOD0(Max10MsFramesInAPacket, size_t());
- MOCK_CONST_METHOD0(GetTargetBitrate, int());
- MOCK_CONST_METHOD0(GetFrameLengthRange,
- absl::optional<std::pair<TimeDelta, TimeDelta>>());
-
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD1(SetFec, bool(bool enable));
- MOCK_METHOD1(SetDtx, bool(bool enable));
- MOCK_METHOD1(SetApplication, bool(Application application));
- MOCK_METHOD1(SetMaxPlaybackRate, void(int frequency_hz));
- MOCK_METHOD1(SetMaxBitrate, void(int max_bps));
- MOCK_METHOD1(SetMaxPayloadSize, void(int max_payload_size_bytes));
- MOCK_METHOD2(OnReceivedUplinkBandwidth,
- void(int target_audio_bitrate_bps,
- absl::optional<int64_t> probing_interval_ms));
- MOCK_METHOD1(OnReceivedUplinkPacketLossFraction,
- void(float uplink_packet_loss_fraction));
-
- MOCK_METHOD2(EnableAudioNetworkAdaptor,
- bool(const std::string& config_string, RtcEventLog* event_log));
+ MOCK_METHOD(int, SampleRateHz, (), (const, override));
+ MOCK_METHOD(size_t, NumChannels, (), (const, override));
+ MOCK_METHOD(int, RtpTimestampRateHz, (), (const, override));
+ MOCK_METHOD(size_t, Num10MsFramesInNextPacket, (), (const, override));
+ MOCK_METHOD(size_t, Max10MsFramesInAPacket, (), (const, override));
+ MOCK_METHOD(int, GetTargetBitrate, (), (const, override));
+ MOCK_METHOD((absl::optional<std::pair<TimeDelta, TimeDelta>>),
+ GetFrameLengthRange,
+ (),
+ (const, override));
+
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(bool, SetFec, (bool enable), (override));
+ MOCK_METHOD(bool, SetDtx, (bool enable), (override));
+ MOCK_METHOD(bool, SetApplication, (Application application), (override));
+ MOCK_METHOD(void, SetMaxPlaybackRate, (int frequency_hz), (override));
+ MOCK_METHOD(void,
+ OnReceivedUplinkBandwidth,
+ (int target_audio_bitrate_bps,
+ absl::optional<int64_t> probing_interval_ms),
+ (override));
+ MOCK_METHOD(void,
+ OnReceivedUplinkPacketLossFraction,
+ (float uplink_packet_loss_fraction),
+ (override));
+
+ MOCK_METHOD(bool,
+ EnableAudioNetworkAdaptor,
+ (const std::string& config_string, RtcEventLog*),
+ (override));
// Note, we explicitly chose not to create a mock for the Encode method.
- MOCK_METHOD3(EncodeImpl,
- EncodedInfo(uint32_t timestamp,
- rtc::ArrayView<const int16_t> audio,
- rtc::Buffer* encoded));
+ MOCK_METHOD(EncodedInfo,
+ EncodeImpl,
+ (uint32_t timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ rtc::Buffer*),
+ (override));
class FakeEncoding {
public:
diff --git a/test/mock_audio_encoder_factory.h b/test/mock_audio_encoder_factory.h
index 3e774a39e9..392a4c11e2 100644
--- a/test/mock_audio_encoder_factory.h
+++ b/test/mock_audio_encoder_factory.h
@@ -24,23 +24,29 @@ namespace webrtc {
class MockAudioEncoderFactory
: public ::testing::NiceMock<AudioEncoderFactory> {
public:
- MOCK_METHOD0(GetSupportedEncoders, std::vector<AudioCodecSpec>());
- MOCK_METHOD1(QueryAudioEncoder,
- absl::optional<AudioCodecInfo>(const SdpAudioFormat& format));
+ MOCK_METHOD(std::vector<AudioCodecSpec>,
+ GetSupportedEncoders,
+ (),
+ (override));
+ MOCK_METHOD(absl::optional<AudioCodecInfo>,
+ QueryAudioEncoder,
+ (const SdpAudioFormat& format),
+ (override));
std::unique_ptr<AudioEncoder> MakeAudioEncoder(
int payload_type,
const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id) {
+ absl::optional<AudioCodecPairId> codec_pair_id) override {
std::unique_ptr<AudioEncoder> return_value;
MakeAudioEncoderMock(payload_type, format, codec_pair_id, &return_value);
return return_value;
}
- MOCK_METHOD4(MakeAudioEncoderMock,
- void(int payload_type,
- const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id,
- std::unique_ptr<AudioEncoder>* return_value));
+ MOCK_METHOD(void,
+ MakeAudioEncoderMock,
+ (int payload_type,
+ const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id,
+ std::unique_ptr<AudioEncoder>*));
// Creates a MockAudioEncoderFactory with no formats and that may not be
// invoked to create a codec - useful for initializing a voice engine, for
diff --git a/test/mock_transport.h b/test/mock_transport.h
index 5ffc10425b..9c4dc4bf8d 100644
--- a/test/mock_transport.h
+++ b/test/mock_transport.h
@@ -25,7 +25,7 @@ class MockTransport : public Transport {
SendRtp,
(const uint8_t*, size_t, const PacketOptions&),
(override));
- MOCK_METHOD(bool, SendRtcp, (const uint8_t* data, size_t len), (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t len), (override));
};
} // namespace webrtc
diff --git a/test/network/BUILD.gn b/test/network/BUILD.gn
index 4b01479c9b..35673741ce 100644
--- a/test/network/BUILD.gn
+++ b/test/network/BUILD.gn
@@ -48,12 +48,15 @@ rtc_library("emulated_network") {
"../../rtc_base:rtc_task_queue",
"../../rtc_base:safe_minmax",
"../../rtc_base:task_queue_for_test",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/task_utils:repeating_task",
"../../rtc_base/third_party/sigslot",
"../../system_wrappers",
"../scenario:column_printer",
"../time_controller",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
@@ -72,6 +75,7 @@ rtc_library("network_emulation_unittest") {
"../../rtc_base:gunit_helpers",
"../../rtc_base:logging",
"../../rtc_base:rtc_event",
+ "../../rtc_base/synchronization:mutex",
"../../system_wrappers:system_wrappers",
]
}
@@ -114,8 +118,8 @@ rtc_library("cross_traffic_unittest") {
"../../rtc_base:logging",
"../../rtc_base:rtc_event",
"//test/time_controller:time_controller",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("feedback_generator") {
@@ -130,8 +134,8 @@ rtc_library("feedback_generator") {
"../../call:simulated_network",
"../../rtc_base:checks",
"../time_controller",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("feedback_generator_unittest") {
diff --git a/test/network/cross_traffic.cc b/test/network/cross_traffic.cc
index be0c3d3286..56e7635142 100644
--- a/test/network/cross_traffic.cc
+++ b/test/network/cross_traffic.cc
@@ -144,15 +144,16 @@ void TcpMessageRouteImpl::SendMessage(size_t size,
cwnd_ = 10;
ssthresh_ = INFINITY;
}
- size_t data_left = size;
- size_t kMaxPacketSize = 1200;
- size_t kMinPacketSize = 4;
+ int64_t data_left = static_cast<int64_t>(size);
+ int64_t kMaxPacketSize = 1200;
+ int64_t kMinPacketSize = 4;
Message message{std::move(handler)};
while (data_left > 0) {
- size_t packet_size =
- std::max(kMinPacketSize, std::min(data_left, kMaxPacketSize));
+ int64_t packet_size = std::min(data_left, kMaxPacketSize);
int fragment_id = next_fragment_id_++;
- pending_.push_back(MessageFragment{fragment_id, packet_size});
+ pending_.push_back(MessageFragment{
+ fragment_id,
+ static_cast<size_t>(std::max(kMinPacketSize, packet_size))});
message.pending_fragment_ids.insert(fragment_id);
data_left -= packet_size;
}
diff --git a/test/network/emulated_network_manager.h b/test/network/emulated_network_manager.h
index 92555eee23..ca85d0b918 100644
--- a/test/network/emulated_network_manager.h
+++ b/test/network/emulated_network_manager.h
@@ -16,7 +16,6 @@
#include "api/test/network_emulation_manager.h"
#include "api/test/time_controller.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/ip_address.h"
#include "rtc_base/network.h"
#include "rtc_base/socket_server.h"
diff --git a/test/network/fake_network_socket_server.cc b/test/network/fake_network_socket_server.cc
index 60dfbe33d5..bee2846be7 100644
--- a/test/network/fake_network_socket_server.cc
+++ b/test/network/fake_network_socket_server.cc
@@ -280,7 +280,7 @@ EmulatedEndpointImpl* FakeNetworkSocketServer::GetEndpointNode(
}
void FakeNetworkSocketServer::Unregister(FakeNetworkSocket* socket) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
sockets_.erase(absl::c_find(sockets_, socket));
}
@@ -297,7 +297,7 @@ rtc::AsyncSocket* FakeNetworkSocketServer::CreateAsyncSocket(int family,
RTC_DCHECK(thread_) << "must be attached to thread before creating sockets";
FakeNetworkSocket* out = new FakeNetworkSocket(this, thread_);
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
sockets_.push_back(out);
}
return out;
diff --git a/test/network/fake_network_socket_server.h b/test/network/fake_network_socket_server.h
index 3a007588e3..2cf4d7c86d 100644
--- a/test/network/fake_network_socket_server.h
+++ b/test/network/fake_network_socket_server.h
@@ -16,9 +16,9 @@
#include "api/units/timestamp.h"
#include "rtc_base/async_socket.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/socket_server.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "system_wrappers/include/clock.h"
#include "test/network/network_emulation.h"
@@ -58,7 +58,7 @@ class FakeNetworkSocketServer : public rtc::SocketServer,
rtc::Event wakeup_;
rtc::Thread* thread_ = nullptr;
- rtc::CriticalSection lock_;
+ Mutex lock_;
std::vector<FakeNetworkSocket*> sockets_ RTC_GUARDED_BY(lock_);
};
diff --git a/test/network/network_emulation.cc b/test/network/network_emulation.cc
index 3cb3def2f2..f3e29317ba 100644
--- a/test/network/network_emulation.cc
+++ b/test/network/network_emulation.cc
@@ -196,6 +196,7 @@ EmulatedEndpointImpl::EmulatedEndpointImpl(uint64_t id,
network_->AddIP(ip);
enabled_state_checker_.Detach();
+ stats_.local_addresses.push_back(peer_local_addr_);
}
EmulatedEndpointImpl::~EmulatedEndpointImpl() = default;
@@ -290,8 +291,9 @@ void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) {
// process: one peer closed connection, second still sending data.
RTC_LOG(INFO) << "Drop packet: no receiver registered in " << id_
<< " on port " << packet.to.port();
- stats_.packets_dropped++;
- stats_.bytes_dropped += DataSize::Bytes(packet.ip_packet_size());
+ stats_.incoming_stats_per_source[packet.from.ipaddr()].packets_dropped++;
+ stats_.incoming_stats_per_source[packet.from.ipaddr()].bytes_dropped +=
+ DataSize::Bytes(packet.ip_packet_size());
return;
}
// Endpoint assumes frequent calls to bind and unbind methods, so it holds
@@ -325,14 +327,18 @@ EmulatedNetworkStats EmulatedEndpointImpl::stats() {
void EmulatedEndpointImpl::UpdateReceiveStats(const EmulatedIpPacket& packet) {
RTC_DCHECK_RUN_ON(task_queue_);
Timestamp current_time = clock_->CurrentTime();
- if (stats_.first_packet_received_time.IsInfinite()) {
- stats_.first_packet_received_time = current_time;
- stats_.first_received_packet_size =
- DataSize::Bytes(packet.ip_packet_size());
+ if (stats_.incoming_stats_per_source[packet.from.ipaddr()]
+ .first_packet_received_time.IsInfinite()) {
+ stats_.incoming_stats_per_source[packet.from.ipaddr()]
+ .first_packet_received_time = current_time;
+ stats_.incoming_stats_per_source[packet.from.ipaddr()]
+ .first_received_packet_size = DataSize::Bytes(packet.ip_packet_size());
}
- stats_.last_packet_received_time = current_time;
- stats_.packets_received++;
- stats_.bytes_received += DataSize::Bytes(packet.ip_packet_size());
+ stats_.incoming_stats_per_source[packet.from.ipaddr()]
+ .last_packet_received_time = current_time;
+ stats_.incoming_stats_per_source[packet.from.ipaddr()].packets_received++;
+ stats_.incoming_stats_per_source[packet.from.ipaddr()].bytes_received +=
+ DataSize::Bytes(packet.ip_packet_size());
}
EndpointsContainer::EndpointsContainer(
@@ -377,31 +383,34 @@ EmulatedNetworkStats EndpointsContainer::GetStats() const {
EmulatedNetworkStats endpoint_stats = endpoint->stats();
stats.packets_sent += endpoint_stats.packets_sent;
stats.bytes_sent += endpoint_stats.bytes_sent;
- stats.packets_received += endpoint_stats.packets_received;
- stats.bytes_received += endpoint_stats.bytes_received;
- stats.packets_dropped += endpoint_stats.packets_dropped;
- stats.bytes_dropped += endpoint_stats.bytes_dropped;
- if (stats.first_packet_received_time >
- endpoint_stats.first_packet_received_time) {
- stats.first_packet_received_time =
- endpoint_stats.first_packet_received_time;
- stats.first_received_packet_size =
- endpoint_stats.first_received_packet_size;
- }
if (stats.first_packet_sent_time > endpoint_stats.first_packet_sent_time) {
stats.first_packet_sent_time = endpoint_stats.first_packet_sent_time;
stats.first_sent_packet_size = endpoint_stats.first_sent_packet_size;
}
- if (stats.last_packet_received_time.IsInfinite() ||
- stats.last_packet_received_time <
- endpoint_stats.last_packet_received_time) {
- stats.last_packet_received_time =
- endpoint_stats.last_packet_received_time;
- }
- if (stats.last_packet_sent_time.IsInfinite() ||
- stats.last_packet_sent_time < endpoint_stats.last_packet_sent_time) {
+ if (stats.last_packet_sent_time < endpoint_stats.last_packet_sent_time) {
stats.last_packet_sent_time = endpoint_stats.last_packet_sent_time;
}
+ for (const rtc::IPAddress& addr : endpoint_stats.local_addresses) {
+ stats.local_addresses.push_back(addr);
+ }
+ for (auto& entry : endpoint_stats.incoming_stats_per_source) {
+ const EmulatedNetworkIncomingStats& source = entry.second;
+ EmulatedNetworkIncomingStats& in_stats =
+ stats.incoming_stats_per_source[entry.first];
+ in_stats.packets_received += source.packets_received;
+ in_stats.bytes_received += source.bytes_received;
+ in_stats.packets_dropped += source.packets_dropped;
+ in_stats.bytes_dropped += source.bytes_dropped;
+ if (in_stats.first_packet_received_time >
+ source.first_packet_received_time) {
+ in_stats.first_packet_received_time = source.first_packet_received_time;
+ in_stats.first_received_packet_size = source.first_received_packet_size;
+ }
+ if (in_stats.last_packet_received_time <
+ source.last_packet_received_time) {
+ in_stats.last_packet_received_time = source.last_packet_received_time;
+ }
+ }
}
return stats;
}
diff --git a/test/network/network_emulation.h b/test/network/network_emulation.h
index 75e9c2c78a..a811a108ef 100644
--- a/test/network/network_emulation.h
+++ b/test/network/network_emulation.h
@@ -168,7 +168,7 @@ class EmulatedEndpointImpl : public EmulatedEndpoint {
uint16_t NextPort() RTC_EXCLUSIVE_LOCKS_REQUIRED(receiver_lock_);
void UpdateReceiveStats(const EmulatedIpPacket& packet);
- rtc::CriticalSection receiver_lock_;
+ rtc::RecursiveCriticalSection receiver_lock_;
rtc::ThreadChecker enabled_state_checker_;
uint64_t id_;
@@ -200,6 +200,7 @@ class EmulatedRoute {
EmulatedEndpointImpl* to;
bool active;
};
+
class EndpointsContainer {
public:
explicit EndpointsContainer(
diff --git a/test/network/network_emulation_unittest.cc b/test/network/network_emulation_unittest.cc
index 58346abb93..ff8539007d 100644
--- a/test/network/network_emulation_unittest.cc
+++ b/test/network/network_emulation_unittest.cc
@@ -19,6 +19,7 @@
#include "call/simulated_network.h"
#include "rtc_base/event.h"
#include "rtc_base/gunit.h"
+#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/sleep.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -28,6 +29,8 @@ namespace webrtc {
namespace test {
namespace {
+using ::testing::ElementsAreArray;
+
constexpr TimeDelta kNetworkPacketWaitTimeout = TimeDelta::Millis(100);
constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1);
constexpr int kOverheadIpv4Udp = 20 + 8;
@@ -48,12 +51,12 @@ class SocketReader : public sigslot::has_slots<> {
int64_t timestamp;
len_ = socket_->Recv(buf_, size_, &timestamp);
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
received_count_++;
}
int ReceivedCount() {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return received_count_;
}
@@ -64,13 +67,13 @@ class SocketReader : public sigslot::has_slots<> {
size_t size_;
int len_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
int received_count_ RTC_GUARDED_BY(lock_) = 0;
};
class MockReceiver : public EmulatedNetworkReceiverInterface {
public:
- MOCK_METHOD1(OnPacketReceived, void(EmulatedIpPacket packet));
+ MOCK_METHOD(void, OnPacketReceived, (EmulatedIpPacket packet), (override));
};
class NetworkEmulationManagerThreeNodesRoutingTest : public ::testing::Test {
@@ -247,19 +250,56 @@ TEST(NetworkEmulationManagerTest, Run) {
nt1->GetStats([&](EmulatedNetworkStats st) {
EXPECT_EQ(st.packets_sent, 2000l);
EXPECT_EQ(st.bytes_sent.bytes(), single_packet_size * 2000l);
- EXPECT_EQ(st.packets_received, 2000l);
- EXPECT_EQ(st.bytes_received.bytes(), single_packet_size * 2000l);
- EXPECT_EQ(st.packets_dropped, 0l);
- EXPECT_EQ(st.bytes_dropped.bytes(), 0l);
+ EXPECT_THAT(st.local_addresses,
+ ElementsAreArray({alice_endpoint->GetPeerLocalAddress()}));
+ EXPECT_EQ(st.PacketsReceived(), 2000l);
+ EXPECT_EQ(st.BytesReceived().bytes(), single_packet_size * 2000l);
+ EXPECT_EQ(st.PacketsDropped(), 0l);
+ EXPECT_EQ(st.BytesDropped().bytes(), 0l);
+
+ EXPECT_EQ(st.incoming_stats_per_source[bob_endpoint->GetPeerLocalAddress()]
+ .packets_received,
+ 2000l);
+ EXPECT_EQ(st.incoming_stats_per_source[bob_endpoint->GetPeerLocalAddress()]
+ .bytes_received.bytes(),
+ single_packet_size * 2000l);
+ EXPECT_EQ(st.incoming_stats_per_source[bob_endpoint->GetPeerLocalAddress()]
+ .packets_dropped,
+ 0l);
+ EXPECT_EQ(st.incoming_stats_per_source[bob_endpoint->GetPeerLocalAddress()]
+ .bytes_dropped.bytes(),
+ 0l);
received_stats_count++;
});
nt2->GetStats([&](EmulatedNetworkStats st) {
EXPECT_EQ(st.packets_sent, 2000l);
EXPECT_EQ(st.bytes_sent.bytes(), single_packet_size * 2000l);
- EXPECT_EQ(st.packets_received, 2000l);
- EXPECT_EQ(st.bytes_received.bytes(), single_packet_size * 2000l);
- EXPECT_EQ(st.packets_dropped, 0l);
- EXPECT_EQ(st.bytes_dropped.bytes(), 0l);
+ EXPECT_THAT(st.local_addresses,
+ ElementsAreArray({bob_endpoint->GetPeerLocalAddress()}));
+ EXPECT_EQ(st.PacketsReceived(), 2000l);
+ EXPECT_EQ(st.BytesReceived().bytes(), single_packet_size * 2000l);
+ EXPECT_EQ(st.PacketsDropped(), 0l);
+ EXPECT_EQ(st.BytesDropped().bytes(), 0l);
+ EXPECT_GT(st.FirstReceivedPacketSize(), DataSize::Zero());
+ EXPECT_TRUE(st.FirstPacketReceivedTime().IsFinite());
+ EXPECT_TRUE(st.LastPacketReceivedTime().IsFinite());
+
+ EXPECT_EQ(
+ st.incoming_stats_per_source[alice_endpoint->GetPeerLocalAddress()]
+ .packets_received,
+ 2000l);
+ EXPECT_EQ(
+ st.incoming_stats_per_source[alice_endpoint->GetPeerLocalAddress()]
+ .bytes_received.bytes(),
+ single_packet_size * 2000l);
+ EXPECT_EQ(
+ st.incoming_stats_per_source[alice_endpoint->GetPeerLocalAddress()]
+ .packets_dropped,
+ 0l);
+ EXPECT_EQ(
+ st.incoming_stats_per_source[alice_endpoint->GetPeerLocalAddress()]
+ .bytes_dropped.bytes(),
+ 0l);
received_stats_count++;
});
ASSERT_EQ_SIMULATED_WAIT(received_stats_count.load(), 2,
diff --git a/test/pc/e2e/BUILD.gn b/test/pc/e2e/BUILD.gn
index 182bbfd307..396552e88f 100644
--- a/test/pc/e2e/BUILD.gn
+++ b/test/pc/e2e/BUILD.gn
@@ -8,627 +8,728 @@
import("../../../webrtc.gni")
-group("e2e") {
- testonly = true
-
- deps = [
- ":default_encoded_image_data_injector",
- ":encoded_image_data_injector_api",
- ":example_video_quality_analyzer",
- ":id_generator",
- ":quality_analyzing_video_decoder",
- ":quality_analyzing_video_encoder",
- ":single_process_encoded_image_data_injector",
- ]
- if (rtc_include_tests) {
- deps += [
- ":peerconnection_quality_test",
- ":test_peer",
- ":video_quality_analyzer_injection_helper",
- ]
- }
-}
-
-if (rtc_include_tests) {
- group("e2e_unittests") {
+if (!build_with_chromium) {
+ group("e2e") {
testonly = true
deps = [
- ":default_encoded_image_data_injector_unittest",
- ":default_video_quality_analyzer_test",
- ":peer_connection_e2e_smoke_test",
- ":single_process_encoded_image_data_injector_unittest",
+ ":default_encoded_image_data_injector",
+ ":encoded_image_data_injector_api",
+ ":example_video_quality_analyzer",
+ ":id_generator",
+ ":quality_analyzing_video_decoder",
+ ":quality_analyzing_video_encoder",
+ ":single_process_encoded_image_data_injector",
]
+ if (rtc_include_tests) {
+ deps += [
+ ":peerconnection_quality_test",
+ ":test_peer",
+ ":video_quality_analyzer_injection_helper",
+ ]
+ }
}
-}
-
-rtc_library("peer_connection_quality_test_params") {
- visibility = [ "*" ]
- testonly = true
- sources = [ "peer_connection_quality_test_params.h" ]
-
- deps = [
- "../../../api:callfactory_api",
- "../../../api:fec_controller_api",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:packet_socket_factory",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api/rtc_event_log",
- "../../../api/task_queue",
- "../../../api/transport:network_control",
- "../../../api/transport/media:media_transport_interface",
- "../../../api/video_codecs:video_codecs_api",
- "../../../rtc_base",
- ]
-}
-
-rtc_library("encoded_image_data_injector_api") {
- visibility = [ "*" ]
- testonly = true
- sources = [ "analyzer/video/encoded_image_data_injector.h" ]
-
- deps = [ "../../../api/video:encoded_image" ]
-}
-rtc_library("default_encoded_image_data_injector") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/default_encoded_image_data_injector.cc",
- "analyzer/video/default_encoded_image_data_injector.h",
- ]
-
- deps = [
- ":encoded_image_data_injector_api",
- "../../../api/video:encoded_image",
- "../../../rtc_base:checks",
- "../../../rtc_base:criticalsection",
- "//third_party/abseil-cpp/absl/memory",
- ]
-}
+ if (rtc_include_tests) {
+ group("e2e_unittests") {
+ testonly = true
-rtc_library("single_process_encoded_image_data_injector") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/single_process_encoded_image_data_injector.cc",
- "analyzer/video/single_process_encoded_image_data_injector.h",
- ]
-
- deps = [
- ":encoded_image_data_injector_api",
- "../../../api/video:encoded_image",
- "../../../rtc_base:checks",
- "../../../rtc_base:criticalsection",
- "//third_party/abseil-cpp/absl/memory",
- ]
-}
+ deps = [
+ ":default_encoded_image_data_injector_unittest",
+ ":default_video_quality_analyzer_test",
+ ":multi_head_queue_test",
+ ":peer_connection_e2e_smoke_test",
+ ":single_process_encoded_image_data_injector_unittest",
+ ]
+ }
+ }
-rtc_library("id_generator") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/id_generator.cc",
- "analyzer/video/id_generator.h",
- ]
- deps = []
-}
+ rtc_library("peer_connection_quality_test_params") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [ "peer_connection_quality_test_params.h" ]
-rtc_library("simulcast_dummy_buffer_helper") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/simulcast_dummy_buffer_helper.cc",
- "analyzer/video/simulcast_dummy_buffer_helper.h",
- ]
- deps = [
- "../../../api/video:video_frame",
- "../../../api/video:video_frame_i420",
- ]
-}
+ deps = [
+ "../../../api:callfactory_api",
+ "../../../api:fec_controller_api",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:packet_socket_factory",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/rtc_event_log",
+ "../../../api/task_queue",
+ "../../../api/transport:network_control",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../rtc_base",
+ ]
+ }
-rtc_library("quality_analyzing_video_decoder") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/quality_analyzing_video_decoder.cc",
- "analyzer/video/quality_analyzing_video_decoder.h",
- ]
- deps = [
- ":encoded_image_data_injector_api",
- ":id_generator",
- ":simulcast_dummy_buffer_helper",
- "../../../api:video_quality_analyzer_api",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_frame_i420",
- "../../../api/video:video_rtp_headers",
- "../../../api/video_codecs:video_codecs_api",
- "../../../modules/video_coding:video_codec_interface",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
-}
+ rtc_library("encoded_image_data_injector_api") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [ "analyzer/video/encoded_image_data_injector.h" ]
-rtc_library("quality_analyzing_video_encoder") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/quality_analyzing_video_encoder.cc",
- "analyzer/video/quality_analyzing_video_encoder.h",
- ]
- deps = [
- ":encoded_image_data_injector_api",
- ":id_generator",
- "../../../api:video_quality_analyzer_api",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_rtp_headers",
- "../../../api/video_codecs:video_codecs_api",
- "../../../modules/video_coding:video_codec_interface",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- ]
-}
+ deps = [ "../../../api/video:encoded_image" ]
+ }
-if (rtc_include_tests) {
- rtc_library("video_quality_analyzer_injection_helper") {
+ rtc_library("default_encoded_image_data_injector") {
visibility = [ "*" ]
testonly = true
sources = [
- "analyzer/video/video_quality_analyzer_injection_helper.cc",
- "analyzer/video/video_quality_analyzer_injection_helper.h",
+ "analyzer/video/default_encoded_image_data_injector.cc",
+ "analyzer/video/default_encoded_image_data_injector.h",
]
+
deps = [
":encoded_image_data_injector_api",
- ":id_generator",
- ":quality_analyzing_video_decoder",
- ":quality_analyzing_video_encoder",
- ":simulcast_dummy_buffer_helper",
- "../..:test_renderer",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api:stats_observer_interface",
- "../../../api:video_quality_analyzer_api",
- "../../../api/video:video_frame",
- "../../../api/video:video_rtp_headers",
- "../../../api/video_codecs:video_codecs_api",
+ "../../../api/video:encoded_image",
+ "../../../rtc_base:checks",
"../../../rtc_base:criticalsection",
- "../../../test:video_test_common",
- "../../../test:video_test_support",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
- rtc_library("echo_emulation") {
+ rtc_library("single_process_encoded_image_data_injector") {
visibility = [ "*" ]
testonly = true
sources = [
- "echo/echo_emulation.cc",
- "echo/echo_emulation.h",
+ "analyzer/video/single_process_encoded_image_data_injector.cc",
+ "analyzer/video/single_process_encoded_image_data_injector.h",
]
+
deps = [
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../modules/audio_device:audio_device_impl",
- "../../../rtc_base:rtc_base_approved",
+ ":encoded_image_data_injector_api",
+ "../../../api/video:encoded_image",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
+ }
+
+ rtc_library("id_generator") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "analyzer/video/id_generator.cc",
+ "analyzer/video/id_generator.h",
]
+ deps = []
}
- rtc_library("test_peer") {
+ rtc_library("simulcast_dummy_buffer_helper") {
visibility = [ "*" ]
testonly = true
sources = [
- "test_peer.cc",
- "test_peer.h",
+ "analyzer/video/simulcast_dummy_buffer_helper.cc",
+ "analyzer/video/simulcast_dummy_buffer_helper.h",
]
deps = [
- ":peer_connection_quality_test_params",
- "../../../api:frame_generator_api",
- "../../../api:scoped_refptr",
- "../../../modules/audio_processing:api",
- "../../../pc:peerconnection_wrapper",
- "//third_party/abseil-cpp/absl/memory",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_frame_i420",
]
}
- rtc_library("test_peer_factory") {
+ rtc_library("quality_analyzing_video_decoder") {
visibility = [ "*" ]
testonly = true
sources = [
- "test_peer_factory.cc",
- "test_peer_factory.h",
+ "analyzer/video/quality_analyzing_video_decoder.cc",
+ "analyzer/video/quality_analyzing_video_decoder.h",
]
deps = [
- ":echo_emulation",
- ":peer_configurer",
- ":peer_connection_quality_test_params",
- ":quality_analyzing_video_encoder",
- ":test_peer",
- ":video_quality_analyzer_injection_helper",
- "../..:copy_to_file_audio_capturer",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api/rtc_event_log:rtc_event_log_factory",
- "../../../api/task_queue:default_task_queue_factory",
- "../../../api/video_codecs:builtin_video_decoder_factory",
- "../../../api/video_codecs:builtin_video_encoder_factory",
- "../../../media:rtc_audio_video",
- "../../../media:rtc_media_engine_defaults",
- "../../../modules/audio_device:audio_device_impl",
- "../../../modules/audio_processing/aec_dump",
- "../../../p2p:rtc_p2p",
- "../../../rtc_base:rtc_task_queue",
- "//third_party/abseil-cpp/absl/memory",
+ ":encoded_image_data_injector_api",
+ ":id_generator",
+ ":simulcast_dummy_buffer_helper",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/video:encoded_image",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_frame_i420",
+ "../../../api/video:video_rtp_headers",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../modules/video_coding:video_codec_interface",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
+ "../../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
- rtc_library("media_helper") {
+ rtc_library("quality_analyzing_video_encoder") {
visibility = [ "*" ]
testonly = true
sources = [
- "media/media_helper.cc",
- "media/media_helper.h",
- "media/test_video_capturer_video_track_source.h",
+ "analyzer/video/quality_analyzing_video_encoder.cc",
+ "analyzer/video/quality_analyzing_video_encoder.h",
]
deps = [
- ":test_peer",
- ":video_quality_analyzer_injection_helper",
- "../..:fileutils",
- "../..:platform_video_capturer",
- "../..:video_test_common",
- "../../../api:create_frame_generator",
- "../../../api:frame_generator_api",
- "../../../api:media_stream_interface",
- "../../../api:peer_connection_quality_test_fixture_api",
+ ":encoded_image_data_injector_api",
+ ":id_generator",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/video:encoded_image",
"../../../api/video:video_frame",
- "../../../pc:peerconnection",
+ "../../../api/video:video_rtp_headers",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../modules/video_coding:video_codec_interface",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
+ "../../../rtc_base/synchronization:mutex",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
- rtc_library("peer_configurer") {
+ if (rtc_include_tests) {
+ rtc_library("video_quality_analyzer_injection_helper") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "analyzer/video/video_quality_analyzer_injection_helper.cc",
+ "analyzer/video/video_quality_analyzer_injection_helper.h",
+ ]
+ deps = [
+ ":encoded_image_data_injector_api",
+ ":id_generator",
+ ":quality_analyzing_video_decoder",
+ ":quality_analyzing_video_encoder",
+ ":simulcast_dummy_buffer_helper",
+ "../..:test_renderer",
+ "../../../api:array_view",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:stats_observer_interface",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_rtp_headers",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../test:video_test_common",
+ "../../../test:video_test_support",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+ }
+
+ rtc_library("echo_emulation") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "echo/echo_emulation.cc",
+ "echo/echo_emulation.h",
+ ]
+ deps = [
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../modules/audio_device:audio_device_impl",
+ "../../../rtc_base:rtc_base_approved",
+ ]
+ }
+
+ rtc_library("test_peer") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "test_peer.cc",
+ "test_peer.h",
+ ]
+ deps = [
+ ":peer_configurer",
+ ":peer_connection_quality_test_params",
+ "../../../api:frame_generator_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:scoped_refptr",
+ "../../../modules/audio_processing:api",
+ "../../../pc:peerconnection_wrapper",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:variant",
+ ]
+ }
+
+ rtc_library("test_peer_factory") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "test_peer_factory.cc",
+ "test_peer_factory.h",
+ ]
+ deps = [
+ ":echo_emulation",
+ ":peer_configurer",
+ ":peer_connection_quality_test_params",
+ ":quality_analyzing_video_encoder",
+ ":test_peer",
+ ":video_quality_analyzer_injection_helper",
+ "../..:copy_to_file_audio_capturer",
+ "../../../api:create_time_controller",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:time_controller",
+ "../../../api/rtc_event_log:rtc_event_log_factory",
+ "../../../api/task_queue:default_task_queue_factory",
+ "../../../api/video_codecs:builtin_video_decoder_factory",
+ "../../../api/video_codecs:builtin_video_encoder_factory",
+ "../../../media:rtc_audio_video",
+ "../../../media:rtc_media_engine_defaults",
+ "../../../modules/audio_device:audio_device_impl",
+ "../../../modules/audio_processing/aec_dump",
+ "../../../p2p:rtc_p2p",
+ "../../../rtc_base:rtc_task_queue",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+ }
+
+ rtc_library("media_helper") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "media/media_helper.cc",
+ "media/media_helper.h",
+ "media/test_video_capturer_video_track_source.h",
+ ]
+ deps = [
+ ":peer_configurer",
+ ":test_peer",
+ ":video_quality_analyzer_injection_helper",
+ "../..:fileutils",
+ "../..:platform_video_capturer",
+ "../..:video_test_common",
+ "../../../api:create_frame_generator",
+ "../../../api:frame_generator_api",
+ "../../../api:media_stream_interface",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/video:video_frame",
+ "../../../pc:peerconnection",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:variant" ]
+ }
+
+ rtc_library("peer_configurer") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "peer_configurer.cc",
+ "peer_configurer.h",
+ ]
+ deps = [
+ ":peer_connection_quality_test_params",
+ "../..:fileutils",
+ "../../../api:callfactory_api",
+ "../../../api:create_peer_connection_quality_test_frame_generator",
+ "../../../api:fec_controller_api",
+ "../../../api:packet_socket_factory",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/rtc_event_log",
+ "../../../api/task_queue",
+ "../../../api/transport:network_control",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../rtc_base",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("test_activities_executor") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "test_activities_executor.cc",
+ "test_activities_executor.h",
+ ]
+ deps = [
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../rtc_base:task_queue_for_test",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../rtc_base/task_utils:repeating_task",
+ "../../../system_wrappers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("peerconnection_quality_test") {
+ visibility = [ "*" ]
+ testonly = true
+
+ sources = [
+ "peer_connection_quality_test.cc",
+ "peer_connection_quality_test.h",
+ ]
+ deps = [
+ ":analyzer_helper",
+ ":cross_media_metrics_reporter",
+ ":default_audio_quality_analyzer",
+ ":default_video_quality_analyzer",
+ ":media_helper",
+ ":peer_configurer",
+ ":peer_connection_quality_test_params",
+ ":sdp_changer",
+ ":single_process_encoded_image_data_injector",
+ ":stats_poller",
+ ":test_activities_executor",
+ ":test_peer",
+ ":test_peer_factory",
+ ":video_quality_analyzer_injection_helper",
+ ":video_quality_metrics_reporter",
+ "../..:field_trial",
+ "../..:fileutils",
+ "../..:perf_test",
+ "../../../api:audio_quality_analyzer_api",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:media_stream_interface",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtc_event_log_output_file",
+ "../../../api:scoped_refptr",
+ "../../../api:time_controller",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/rtc_event_log",
+ "../../../api/task_queue",
+ "../../../api/task_queue:default_task_queue_factory",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../pc:pc_test_utils",
+ "../../../pc:peerconnection",
+ "../../../rtc_base",
+ "../../../rtc_base:gunit_helpers",
+ "../../../rtc_base:macromagic",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../rtc_base:safe_conversions",
+ "../../../rtc_base:task_queue_for_test",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../system_wrappers",
+ "../../../system_wrappers:field_trial",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("single_process_encoded_image_data_injector_unittest") {
+ testonly = true
+ sources = [
+ "analyzer/video/single_process_encoded_image_data_injector_unittest.cc",
+ ]
+ deps = [
+ ":single_process_encoded_image_data_injector",
+ "../../../api/video:encoded_image",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../test:test_support",
+ ]
+ }
+
+ rtc_library("default_encoded_image_data_injector_unittest") {
+ testonly = true
+ sources =
+ [ "analyzer/video/default_encoded_image_data_injector_unittest.cc" ]
+ deps = [
+ ":default_encoded_image_data_injector",
+ "../../../api/video:encoded_image",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../test:test_support",
+ ]
+ }
+
+ peer_connection_e2e_smoke_test_resources = [
+ "../../../resources/pc_quality_smoke_test_alice_source.wav",
+ "../../../resources/pc_quality_smoke_test_bob_source.wav",
+ ]
+ if (is_ios) {
+ bundle_data("peer_connection_e2e_smoke_test_resources_bundle_data") {
+ testonly = true
+ sources = peer_connection_e2e_smoke_test_resources
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+ }
+
+ rtc_library("peer_connection_e2e_smoke_test") {
+ testonly = true
+
+ sources = [ "peer_connection_e2e_smoke_test.cc" ]
+ deps = [
+ ":default_audio_quality_analyzer",
+ ":default_video_quality_analyzer",
+ ":network_quality_metrics_reporter",
+ "../../../api:callfactory_api",
+ "../../../api:create_network_emulation_manager",
+ "../../../api:create_peer_connection_quality_test_frame_generator",
+ "../../../api:create_peerconnection_quality_test_fixture",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:media_stream_interface",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:scoped_refptr",
+ "../../../api:simulated_network_api",
+ "../../../api/audio_codecs:builtin_audio_decoder_factory",
+ "../../../api/audio_codecs:builtin_audio_encoder_factory",
+ "../../../api/video_codecs:builtin_video_decoder_factory",
+ "../../../api/video_codecs:builtin_video_encoder_factory",
+ "../../../call:simulated_network",
+ "../../../media:rtc_audio_video",
+ "../../../modules/audio_device:audio_device_impl",
+ "../../../p2p:rtc_p2p",
+ "../../../pc:pc_test_utils",
+ "../../../pc:peerconnection_wrapper",
+ "../../../rtc_base",
+ "../../../rtc_base:gunit_helpers",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:rtc_event",
+ "../../../system_wrappers:field_trial",
+ "../../../test:field_trial",
+ "../../../test:fileutils",
+ "../../../test:test_support",
+ ]
+ data = peer_connection_e2e_smoke_test_resources
+ if (is_ios) {
+ deps += [ ":peer_connection_e2e_smoke_test_resources_bundle_data" ]
+ }
+ }
+
+ rtc_library("stats_poller") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "stats_poller.cc",
+ "stats_poller.h",
+ ]
+ deps = [
+ ":test_peer",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:stats_observer_interface",
+ "../../../rtc_base:logging",
+ ]
+ }
+
+ rtc_library("default_video_quality_analyzer_test") {
+ testonly = true
+ sources = [ "analyzer/video/default_video_quality_analyzer_test.cc" ]
+ deps = [
+ ":default_video_quality_analyzer",
+ "../..:test_support",
+ "../../../api:create_frame_generator",
+ "../../../api:rtp_packet_info",
+ "../../../api/video:encoded_image",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_frame_i420",
+ "../../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../../rtc_base:stringutils",
+ "../../../system_wrappers",
+ ]
+ }
+
+ rtc_library("multi_head_queue_test") {
+ testonly = true
+ sources = [ "analyzer/video/multi_head_queue_test.cc" ]
+ deps = [
+ ":multi_head_queue",
+ "../../../test:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+ }
+
+ rtc_library("analyzer_helper") {
visibility = [ "*" ]
- testonly = true
sources = [
- "peer_configurer.cc",
- "peer_configurer.h",
+ "analyzer_helper.cc",
+ "analyzer_helper.h",
]
deps = [
- ":peer_connection_quality_test_params",
- "../..:fileutils",
- "../../../api:callfactory_api",
- "../../../api:create_peer_connection_quality_test_frame_generator",
- "../../../api:fec_controller_api",
- "../../../api:packet_socket_factory",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api/rtc_event_log",
- "../../../api/task_queue",
- "../../../api/transport:network_control",
- "../../../api/transport/media:media_transport_interface",
- "../../../api/video_codecs:video_codecs_api",
- "../../../rtc_base",
- "//third_party/abseil-cpp/absl/strings",
+ "../../../api:track_id_stream_info_map",
+ "../../../rtc_base:macromagic",
+ "../../../rtc_base/synchronization:sequence_checker",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
- rtc_library("test_activities_executor") {
+ rtc_library("default_audio_quality_analyzer") {
visibility = [ "*" ]
testonly = true
sources = [
- "test_activities_executor.cc",
- "test_activities_executor.h",
+ "analyzer/audio/default_audio_quality_analyzer.cc",
+ "analyzer/audio/default_audio_quality_analyzer.h",
]
+
deps = [
+ "../..:perf_test",
+ "../../../api:audio_quality_analyzer_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:stats_observer_interface",
+ "../../../api:track_id_stream_info_map",
"../../../api/units:time_delta",
"../../../api/units:timestamp",
- "../../../rtc_base:checks",
"../../../rtc_base:criticalsection",
"../../../rtc_base:logging",
- "../../../rtc_base:rtc_base_approved",
- "../../../rtc_base:task_queue_for_test",
- "../../../rtc_base/task_utils:repeating_task",
- "../../../system_wrappers",
- "//third_party/abseil-cpp/absl/memory",
- "//third_party/abseil-cpp/absl/types:optional",
+ "../../../rtc_base:rtc_numerics",
+ "../../../rtc_base/synchronization:mutex",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
- rtc_library("peerconnection_quality_test") {
+ rtc_library("example_video_quality_analyzer") {
visibility = [ "*" ]
testonly = true
+ sources = [
+ "analyzer/video/example_video_quality_analyzer.cc",
+ "analyzer/video/example_video_quality_analyzer.h",
+ ]
+
+ deps = [
+ "../../../api:array_view",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/video:encoded_image",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_rtp_headers",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
+ "../../../rtc_base/synchronization:mutex",
+ ]
+ }
+ rtc_library("video_quality_metrics_reporter") {
+ visibility = [ "*" ]
+
+ testonly = true
sources = [
- "peer_connection_quality_test.cc",
- "peer_connection_quality_test.h",
+ "analyzer/video/video_quality_metrics_reporter.cc",
+ "analyzer/video/video_quality_metrics_reporter.h",
]
deps = [
- ":analyzer_helper",
- ":default_audio_quality_analyzer",
- ":default_video_quality_analyzer",
- ":media_helper",
- ":peer_configurer",
- ":peer_connection_quality_test_params",
- ":sdp_changer",
- ":single_process_encoded_image_data_injector",
- ":stats_poller",
- ":test_activities_executor",
- ":test_peer",
- ":test_peer_factory",
- ":video_quality_analyzer_injection_helper",
- ":video_quality_metrics_reporter",
- "../..:field_trial",
- "../..:fileutils",
"../..:perf_test",
- "../../../api:audio_quality_analyzer_api",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:media_stream_interface",
"../../../api:peer_connection_quality_test_fixture_api",
- "../../../api:rtc_event_log_output_file",
- "../../../api:scoped_refptr",
- "../../../api:video_quality_analyzer_api",
- "../../../api/rtc_event_log",
- "../../../api/task_queue",
- "../../../api/task_queue:default_task_queue_factory",
+ "../../../api:rtc_stats_api",
+ "../../../api:track_id_stream_info_map",
+ "../../../api/units:data_rate",
+ "../../../api/units:data_size",
"../../../api/units:time_delta",
"../../../api/units:timestamp",
- "../../../pc:pc_test_utils",
- "../../../pc:peerconnection",
- "../../../rtc_base",
- "../../../rtc_base:gunit_helpers",
- "../../../rtc_base:macromagic",
- "../../../rtc_base:rtc_base_approved",
- "../../../rtc_base:safe_conversions",
- "../../../rtc_base:task_queue_for_test",
- "../../../system_wrappers",
- "../../../system_wrappers:field_trial",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:rtc_numerics",
+ "../../../rtc_base/synchronization:mutex",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
- rtc_library("single_process_encoded_image_data_injector_unittest") {
+ rtc_library("default_video_quality_analyzer") {
+ visibility = [ "*" ]
+
testonly = true
sources = [
- "analyzer/video/single_process_encoded_image_data_injector_unittest.cc",
+ "analyzer/video/default_video_quality_analyzer.cc",
+ "analyzer/video/default_video_quality_analyzer.h",
]
+
deps = [
- ":single_process_encoded_image_data_injector",
+ ":multi_head_queue",
+ "../..:perf_test",
+ "../../../api:array_view",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
"../../../api/video:encoded_image",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_frame_i420",
+ "../../../api/video:video_rtp_headers",
+ "../../../common_video",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
"../../../rtc_base:rtc_base_approved",
- "../../../test:test_support",
+ "../../../rtc_base:rtc_base_tests_utils",
+ "../../../rtc_base:rtc_event",
+ "../../../rtc_base:rtc_numerics",
+ "../../../rtc_base:timeutils",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../system_wrappers",
]
}
- rtc_library("default_encoded_image_data_injector_unittest") {
+ rtc_library("network_quality_metrics_reporter") {
+ visibility = [ "*" ]
testonly = true
- sources =
- [ "analyzer/video/default_encoded_image_data_injector_unittest.cc" ]
+ sources = [
+ "network_quality_metrics_reporter.cc",
+ "network_quality_metrics_reporter.h",
+ ]
deps = [
- ":default_encoded_image_data_injector",
- "../../../api/video:encoded_image",
- "../../../rtc_base:rtc_base_approved",
- "../../../test:test_support",
+ "../..:perf_test",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:track_id_stream_info_map",
+ "../../../api/units:data_size",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:rtc_event",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../system_wrappers:field_trial",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
- peer_connection_e2e_smoke_test_resources = [
- "../../../resources/pc_quality_smoke_test_alice_source.wav",
- "../../../resources/pc_quality_smoke_test_bob_source.wav",
- ]
- if (is_ios) {
- bundle_data("peer_connection_e2e_smoke_test_resources_bundle_data") {
- testonly = true
- sources = peer_connection_e2e_smoke_test_resources
- outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
- }
- }
-
- rtc_library("peer_connection_e2e_smoke_test") {
+ rtc_library("cross_media_metrics_reporter") {
+ visibility = [ "*" ]
testonly = true
-
- sources = [ "peer_connection_e2e_smoke_test.cc" ]
+ sources = [
+ "cross_media_metrics_reporter.cc",
+ "cross_media_metrics_reporter.h",
+ ]
deps = [
- ":default_audio_quality_analyzer",
- ":default_video_quality_analyzer",
- ":network_quality_metrics_reporter",
- "../../../api:callfactory_api",
- "../../../api:create_network_emulation_manager",
- "../../../api:create_peer_connection_quality_test_frame_generator",
- "../../../api:create_peerconnection_quality_test_fixture",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:media_stream_interface",
+ "../..:perf_test",
"../../../api:network_emulation_manager_api",
"../../../api:peer_connection_quality_test_fixture_api",
- "../../../api:scoped_refptr",
- "../../../api:simulated_network_api",
- "../../../api/audio_codecs:builtin_audio_decoder_factory",
- "../../../api/audio_codecs:builtin_audio_encoder_factory",
- "../../../api/video_codecs:builtin_video_decoder_factory",
- "../../../api/video_codecs:builtin_video_encoder_factory",
- "../../../call:simulated_network",
- "../../../media:rtc_audio_video",
- "../../../modules/audio_device:audio_device_impl",
- "../../../p2p:rtc_p2p",
- "../../../pc:pc_test_utils",
- "../../../pc:peerconnection_wrapper",
- "../../../rtc_base",
- "../../../rtc_base:gunit_helpers",
- "../../../rtc_base:logging",
+ "../../../api:rtc_stats_api",
+ "../../../api:track_id_stream_info_map",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:criticalsection",
"../../../rtc_base:rtc_event",
+ "../../../rtc_base:rtc_numerics",
+ "../../../rtc_base/synchronization:mutex",
"../../../system_wrappers:field_trial",
- "../../../test:field_trial",
- "../../../test:fileutils",
- "../../../test:test_support",
]
- data = peer_connection_e2e_smoke_test_resources
- if (is_ios) {
- deps += [ ":peer_connection_e2e_smoke_test_resources_bundle_data" ]
- }
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
- rtc_library("stats_poller") {
+ rtc_library("sdp_changer") {
visibility = [ "*" ]
testonly = true
sources = [
- "stats_poller.cc",
- "stats_poller.h",
+ "sdp/sdp_changer.cc",
+ "sdp/sdp_changer.h",
]
deps = [
- ":test_peer",
+ "../../../api:array_view",
"../../../api:libjingle_peerconnection_api",
- "../../../api:stats_observer_interface",
- "../../../rtc_base:logging",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtp_parameters",
+ "../../../media:rtc_media_base",
+ "../../../p2p:rtc_p2p",
+ "../../../pc:peerconnection",
+ "../../../pc:rtc_pc_base",
+ "../../../rtc_base:stringutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
- rtc_library("default_video_quality_analyzer_test") {
+ rtc_library("multi_head_queue") {
+ visibility = [ "*" ]
testonly = true
- sources = [ "analyzer/video/default_video_quality_analyzer_test.cc" ]
- deps = [
- ":default_video_quality_analyzer",
- "../..:test_support",
- "../../../api:create_frame_generator",
- "../../../api:rtp_packet_info",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_frame_i420",
- "../../../modules/rtp_rtcp:rtp_rtcp_format",
- "../../../system_wrappers",
- ]
+ sources = [ "analyzer/video/multi_head_queue.h" ]
+ deps = [ "../../../rtc_base:checks" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
-
-rtc_library("analyzer_helper") {
- visibility = [ "*" ]
- sources = [
- "analyzer_helper.cc",
- "analyzer_helper.h",
- ]
- deps = [
- "../../../api:track_id_stream_label_map",
- "../../../rtc_base:macromagic",
- "../../../rtc_base/synchronization:sequence_checker",
- ]
-}
-
-rtc_library("default_audio_quality_analyzer") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/audio/default_audio_quality_analyzer.cc",
- "analyzer/audio/default_audio_quality_analyzer.h",
- ]
-
- deps = [
- "../..:perf_test",
- "../../../api:audio_quality_analyzer_api",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:stats_observer_interface",
- "../../../api:track_id_stream_label_map",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- "../../../rtc_base:rtc_numerics",
- ]
-}
-
-rtc_library("example_video_quality_analyzer") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/example_video_quality_analyzer.cc",
- "analyzer/video/example_video_quality_analyzer.h",
- ]
-
- deps = [
- "../../../api:video_quality_analyzer_api",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_rtp_headers",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- ]
-}
-
-rtc_library("video_quality_metrics_reporter") {
- visibility = [ "*" ]
-
- testonly = true
- sources = [
- "analyzer/video/video_quality_metrics_reporter.cc",
- "analyzer/video/video_quality_metrics_reporter.h",
- ]
- deps = [
- "../..:perf_test",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:rtc_numerics",
- ]
-}
-
-rtc_library("default_video_quality_analyzer") {
- visibility = [ "*" ]
-
- testonly = true
- sources = [
- "analyzer/video/default_video_quality_analyzer.cc",
- "analyzer/video/default_video_quality_analyzer.h",
- ]
-
- deps = [
- "../..:perf_test",
- "../../../api:video_quality_analyzer_api",
- "../../../api/units:time_delta",
- "../../../api/units:timestamp",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_frame_i420",
- "../../../api/video:video_rtp_headers",
- "../../../common_video",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- "../../../rtc_base:rtc_base_approved",
- "../../../rtc_base:rtc_base_tests_utils",
- "../../../rtc_base:rtc_event",
- "../../../rtc_base:rtc_numerics",
- "../../../rtc_base:timeutils",
- "../../../system_wrappers",
- ]
-}
-
-rtc_library("network_quality_metrics_reporter") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "network_quality_metrics_reporter.cc",
- "network_quality_metrics_reporter.h",
- ]
- deps = [
- "../..:perf_test",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:network_emulation_manager_api",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:rtc_event",
- "../../../system_wrappers:field_trial",
- ]
-}
-
-rtc_library("sdp_changer") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "sdp/sdp_changer.cc",
- "sdp/sdp_changer.h",
- ]
- deps = [
- "../../../api:array_view",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api:rtp_parameters",
- "../../../media:rtc_media_base",
- "../../../p2p:rtc_p2p",
- "../../../pc:peerconnection",
- "../../../pc:rtc_pc_base",
- "../../../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/memory",
- "//third_party/abseil-cpp/absl/strings:strings",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
-}
diff --git a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
index b8f1740e46..8830436b09 100644
--- a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
+++ b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
@@ -10,87 +10,103 @@
#include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h"
-#include "api/stats_types.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
#include "rtc_base/logging.h"
namespace webrtc {
namespace webrtc_pc_e2e {
-namespace {
-static const char kStatsAudioMediaType[] = "audio";
-
-} // namespace
-
-void DefaultAudioQualityAnalyzer::Start(
- std::string test_case_name,
- TrackIdStreamLabelMap* analyzer_helper) {
+void DefaultAudioQualityAnalyzer::Start(std::string test_case_name,
+ TrackIdStreamInfoMap* analyzer_helper) {
test_case_name_ = std::move(test_case_name);
analyzer_helper_ = analyzer_helper;
}
void DefaultAudioQualityAnalyzer::OnStatsReports(
- const std::string& pc_label,
- const StatsReports& stats_reports) {
- for (const StatsReport* stats_report : stats_reports) {
- // NetEq stats are only present in kStatsReportTypeSsrc reports, so all
- // other reports are just ignored.
- if (stats_report->type() != StatsReport::StatsType::kStatsReportTypeSsrc) {
- continue;
- }
- // Ignoring stats reports of "video" SSRC.
- const webrtc::StatsReport::Value* media_type = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameMediaType);
- RTC_CHECK(media_type);
- if (strcmp(media_type->static_string_val(), kStatsAudioMediaType) != 0) {
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ // TODO(https://crbug.com/webrtc/11683): use "inbound-rtp" instead of "track"
+ // stats when required audio metrics moved there
+ auto stats = report->GetStatsOfType<RTCMediaStreamTrackStats>();
+
+ for (auto& stat : stats) {
+ if (!stat->kind.is_defined() ||
+ !(*stat->kind == RTCMediaStreamTrackKind::kAudio) ||
+ !*stat->remote_source) {
continue;
}
- if (stats_report->FindValue(
- webrtc::StatsReport::kStatsValueNameBytesSent)) {
- // If kStatsValueNameBytesSent is present, it means it's a send stream,
- // but we need audio metrics for receive stream, so skip it.
- continue;
+
+ StatsSample sample;
+ sample.total_samples_received =
+ stat->total_samples_received.ValueOrDefault(0ul);
+ sample.concealed_samples = stat->concealed_samples.ValueOrDefault(0ul);
+ sample.removed_samples_for_acceleration =
+ stat->removed_samples_for_acceleration.ValueOrDefault(0ul);
+ sample.inserted_samples_for_deceleration =
+ stat->inserted_samples_for_deceleration.ValueOrDefault(0ul);
+ sample.silent_concealed_samples =
+ stat->silent_concealed_samples.ValueOrDefault(0ul);
+ sample.jitter_buffer_delay =
+ TimeDelta::Seconds(stat->jitter_buffer_delay.ValueOrDefault(0.));
+ sample.jitter_buffer_target_delay =
+ TimeDelta::Seconds(stat->jitter_buffer_target_delay.ValueOrDefault(0.));
+ sample.jitter_buffer_emitted_count =
+ stat->jitter_buffer_emitted_count.ValueOrDefault(0ul);
+
+ const std::string stream_label = std::string(
+ analyzer_helper_->GetStreamLabelFromTrackId(*stat->track_identifier));
+
+ MutexLock lock(&lock_);
+ StatsSample prev_sample = last_stats_sample_[stream_label];
+ RTC_CHECK_GE(sample.total_samples_received,
+ prev_sample.total_samples_received);
+ double total_samples_diff = static_cast<double>(
+ sample.total_samples_received - prev_sample.total_samples_received);
+ if (total_samples_diff == 0) {
+ return;
}
- const webrtc::StatsReport::Value* expand_rate = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameExpandRate);
- const webrtc::StatsReport::Value* accelerate_rate = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameAccelerateRate);
- const webrtc::StatsReport::Value* preemptive_rate = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNamePreemptiveExpandRate);
- const webrtc::StatsReport::Value* speech_expand_rate =
- stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameSpeechExpandRate);
- const webrtc::StatsReport::Value* preferred_buffer_size_ms =
- stats_report->FindValue(StatsReport::StatsValueName::
- kStatsValueNamePreferredJitterBufferMs);
- RTC_CHECK(expand_rate);
- RTC_CHECK(accelerate_rate);
- RTC_CHECK(preemptive_rate);
- RTC_CHECK(speech_expand_rate);
- RTC_CHECK(preferred_buffer_size_ms);
-
- const std::string& stream_label =
- GetStreamLabelFromStatsReport(stats_report);
-
- rtc::CritScope crit(&lock_);
AudioStreamStats& audio_stream_stats = streams_stats_[stream_label];
- audio_stream_stats.expand_rate.AddSample(expand_rate->float_val());
- audio_stream_stats.accelerate_rate.AddSample(accelerate_rate->float_val());
- audio_stream_stats.preemptive_rate.AddSample(preemptive_rate->float_val());
+ audio_stream_stats.expand_rate.AddSample(
+ (sample.concealed_samples - prev_sample.concealed_samples) /
+ total_samples_diff);
+ audio_stream_stats.accelerate_rate.AddSample(
+ (sample.removed_samples_for_acceleration -
+ prev_sample.removed_samples_for_acceleration) /
+ total_samples_diff);
+ audio_stream_stats.preemptive_rate.AddSample(
+ (sample.inserted_samples_for_deceleration -
+ prev_sample.inserted_samples_for_deceleration) /
+ total_samples_diff);
+
+ int64_t speech_concealed_samples =
+ sample.concealed_samples - sample.silent_concealed_samples;
+ int64_t prev_speech_concealed_samples =
+ prev_sample.concealed_samples - prev_sample.silent_concealed_samples;
audio_stream_stats.speech_expand_rate.AddSample(
- speech_expand_rate->float_val());
- audio_stream_stats.preferred_buffer_size_ms.AddSample(
- preferred_buffer_size_ms->int_val());
- }
-}
+ (speech_concealed_samples - prev_speech_concealed_samples) /
+ total_samples_diff);
+
+ int64_t jitter_buffer_emitted_count_diff =
+ sample.jitter_buffer_emitted_count -
+ prev_sample.jitter_buffer_emitted_count;
+ if (jitter_buffer_emitted_count_diff > 0) {
+ TimeDelta jitter_buffer_delay_diff =
+ sample.jitter_buffer_delay - prev_sample.jitter_buffer_delay;
+ TimeDelta jitter_buffer_target_delay_diff =
+ sample.jitter_buffer_target_delay -
+ prev_sample.jitter_buffer_target_delay;
+ audio_stream_stats.average_jitter_buffer_delay_ms.AddSample(
+ jitter_buffer_delay_diff.ms<double>() /
+ jitter_buffer_emitted_count_diff);
+ audio_stream_stats.preferred_buffer_size_ms.AddSample(
+ jitter_buffer_target_delay_diff.ms<double>() /
+ jitter_buffer_emitted_count_diff);
+ }
-const std::string& DefaultAudioQualityAnalyzer::GetStreamLabelFromStatsReport(
- const StatsReport* stats_report) const {
- const webrtc::StatsReport::Value* report_track_id = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameTrackId);
- RTC_CHECK(report_track_id);
- return analyzer_helper_->GetStreamLabelFromTrackId(
- report_track_id->string_val());
+ last_stats_sample_[stream_label] = sample;
+ }
}
std::string DefaultAudioQualityAnalyzer::GetTestCaseName(
@@ -100,7 +116,7 @@ std::string DefaultAudioQualityAnalyzer::GetTestCaseName(
void DefaultAudioQualityAnalyzer::Stop() {
using ::webrtc::test::ImproveDirection;
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
for (auto& item : streams_stats_) {
ReportResult("expand_rate", item.first, item.second.expand_rate, "unitless",
ImproveDirection::kSmallerIsBetter);
@@ -111,6 +127,9 @@ void DefaultAudioQualityAnalyzer::Stop() {
ReportResult("speech_expand_rate", item.first,
item.second.speech_expand_rate, "unitless",
ImproveDirection::kSmallerIsBetter);
+ ReportResult("average_jitter_buffer_delay_ms", item.first,
+ item.second.average_jitter_buffer_delay_ms, "ms",
+ ImproveDirection::kNone);
ReportResult("preferred_buffer_size_ms", item.first,
item.second.preferred_buffer_size_ms, "ms",
ImproveDirection::kNone);
@@ -119,7 +138,7 @@ void DefaultAudioQualityAnalyzer::Stop() {
std::map<std::string, AudioStreamStats>
DefaultAudioQualityAnalyzer::GetAudioStreamsStats() const {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return streams_stats_;
}
diff --git a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h
index 33aaefd4c3..dd6280ecfe 100644
--- a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h
+++ b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h
@@ -14,11 +14,12 @@
#include <map>
#include <string>
-#include "api/stats_types.h"
+#include "absl/strings/string_view.h"
#include "api/test/audio_quality_analyzer_interface.h"
-#include "api/test/track_id_stream_label_map.h"
-#include "rtc_base/critical_section.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "api/units/time_delta.h"
#include "rtc_base/numerics/samples_stats_counter.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/testsupport/perf_test.h"
namespace webrtc {
@@ -29,25 +30,34 @@ struct AudioStreamStats {
SamplesStatsCounter accelerate_rate;
SamplesStatsCounter preemptive_rate;
SamplesStatsCounter speech_expand_rate;
+ SamplesStatsCounter average_jitter_buffer_delay_ms;
SamplesStatsCounter preferred_buffer_size_ms;
};
-// TODO(bugs.webrtc.org/10430): Migrate to the new GetStats as soon as
-// bugs.webrtc.org/10428 is fixed.
class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface {
public:
void Start(std::string test_case_name,
- TrackIdStreamLabelMap* analyzer_helper) override;
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& stats_reports) override;
+ TrackIdStreamInfoMap* analyzer_helper) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
void Stop() override;
// Returns audio quality stats per stream label.
std::map<std::string, AudioStreamStats> GetAudioStreamsStats() const;
private:
- const std::string& GetStreamLabelFromStatsReport(
- const StatsReport* stats_report) const;
+ struct StatsSample {
+ uint64_t total_samples_received = 0;
+ uint64_t concealed_samples = 0;
+ uint64_t removed_samples_for_acceleration = 0;
+ uint64_t inserted_samples_for_deceleration = 0;
+ uint64_t silent_concealed_samples = 0;
+ TimeDelta jitter_buffer_delay = TimeDelta::Zero();
+ TimeDelta jitter_buffer_target_delay = TimeDelta::Zero();
+ uint64_t jitter_buffer_emitted_count = 0;
+ };
+
std::string GetTestCaseName(const std::string& stream_label) const;
void ReportResult(const std::string& metric_name,
const std::string& stream_label,
@@ -56,10 +66,11 @@ class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface {
webrtc::test::ImproveDirection improve_direction) const;
std::string test_case_name_;
- TrackIdStreamLabelMap* analyzer_helper_;
+ TrackIdStreamInfoMap* analyzer_helper_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
std::map<std::string, AudioStreamStats> streams_stats_ RTC_GUARDED_BY(lock_);
+ std::map<std::string, StatsSample> last_stats_sample_ RTC_GUARDED_BY(lock_);
};
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.h b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.h
index f4bd81ce90..6ed87f5ec4 100644
--- a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.h
+++ b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.h
@@ -19,7 +19,6 @@
#include <vector>
#include "api/video/encoded_image.h"
-#include "rtc_base/critical_section.h"
#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
namespace webrtc {
diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
index 786509ddb7..000d1654a6 100644
--- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
+++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
@@ -14,11 +14,13 @@
#include <memory>
#include <utility>
+#include "api/array_view.h"
#include "api/units/time_delta.h"
#include "api/video/i420_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "rtc_base/cpu_time.h"
#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
#include "rtc_base/time_utils.h"
namespace webrtc {
@@ -35,6 +37,7 @@ void LogFrameCounters(const std::string& name, const FrameCounters& counters) {
RTC_LOG(INFO) << "[" << name << "] Pre encoded : " << counters.pre_encoded;
RTC_LOG(INFO) << "[" << name << "] Encoded : " << counters.encoded;
RTC_LOG(INFO) << "[" << name << "] Received : " << counters.received;
+ RTC_LOG(INFO) << "[" << name << "] Decoded : " << counters.decoded;
RTC_LOG(INFO) << "[" << name << "] Rendered : " << counters.rendered;
RTC_LOG(INFO) << "[" << name << "] Dropped : " << counters.dropped;
}
@@ -46,6 +49,15 @@ void LogStreamInternalStats(const std::string& name, const StreamStats& stats) {
<< stats.dropped_before_encoder;
}
+template <typename T>
+absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
+ auto it = map.find(key);
+ if (it == map.end()) {
+ return absl::nullopt;
+ }
+ return it->second;
+}
+
} // namespace
void RateCounter::AddEvent(Timestamp event_time) {
@@ -65,20 +77,72 @@ double RateCounter::GetEventsPerSecond() const {
(event_last_time_ - event_first_time_).us() * kMicrosPerSecond;
}
+std::string StatsKey::ToString() const {
+ rtc::StringBuilder out;
+ out << stream_label << "_" << sender << "_" << receiver;
+ return out.str();
+}
+
+bool operator<(const StatsKey& a, const StatsKey& b) {
+ if (a.stream_label != b.stream_label) {
+ return a.stream_label < b.stream_label;
+ }
+ if (a.sender != b.sender) {
+ return a.sender < b.sender;
+ }
+ return a.receiver < b.receiver;
+}
+
+bool operator==(const StatsKey& a, const StatsKey& b) {
+ return a.stream_label == b.stream_label && a.sender == b.sender &&
+ a.receiver == b.receiver;
+}
+
+std::string InternalStatsKey::ToString() const {
+ rtc::StringBuilder out;
+ out << "stream=" << stream << "_sender=" << sender
+ << "_receiver=" << receiver;
+ return out.str();
+}
+
+bool operator<(const InternalStatsKey& a, const InternalStatsKey& b) {
+ if (a.stream != b.stream) {
+ return a.stream < b.stream;
+ }
+ if (a.sender != b.sender) {
+ return a.sender < b.sender;
+ }
+ return a.receiver < b.receiver;
+}
+
+bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) {
+ return a.stream == b.stream && a.sender == b.sender &&
+ a.receiver == b.receiver;
+}
+
+DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
+ webrtc::Clock* clock,
+ DefaultVideoQualityAnalyzerOptions options)
+ : options_(options), clock_(clock) {}
DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
bool heavy_metrics_computation_enabled,
- int max_frames_in_flight_per_stream_count)
- : heavy_metrics_computation_enabled_(heavy_metrics_computation_enabled),
- max_frames_in_flight_per_stream_count_(
- max_frames_in_flight_per_stream_count),
- clock_(Clock::GetRealTimeClock()) {}
+ size_t max_frames_in_flight_per_stream_count)
+ : clock_(Clock::GetRealTimeClock()) {
+ options_.heavy_metrics_computation_enabled =
+ heavy_metrics_computation_enabled;
+ options_.max_frames_in_flight_per_stream_count =
+ max_frames_in_flight_per_stream_count;
+}
DefaultVideoQualityAnalyzer::~DefaultVideoQualityAnalyzer() {
Stop();
}
-void DefaultVideoQualityAnalyzer::Start(std::string test_case_name,
- int max_threads_count) {
+void DefaultVideoQualityAnalyzer::Start(
+ std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {
test_label_ = std::move(test_case_name);
+ peers_ = std::make_unique<NamesCollection>(peer_names);
for (int i = 0; i < max_threads_count; i++) {
auto thread = std::make_unique<rtc::PlatformThread>(
&DefaultVideoQualityAnalyzer::ProcessComparisonsThread, this,
@@ -88,7 +152,7 @@ void DefaultVideoQualityAnalyzer::Start(std::string test_case_name,
thread_pool_.push_back(std::move(thread));
}
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
RTC_CHECK(start_time_.IsMinusInfinity());
state_ = State::kActive;
@@ -98,73 +162,116 @@ void DefaultVideoQualityAnalyzer::Start(std::string test_case_name,
}
uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
+ absl::string_view peer_name,
const std::string& stream_label,
const webrtc::VideoFrame& frame) {
// |next_frame_id| is atomic, so we needn't lock here.
uint16_t frame_id = next_frame_id_++;
Timestamp start_time = Timestamp::MinusInfinity();
+ size_t peer_index = peers_->index(peer_name);
+ size_t stream_index;
{
- rtc::CritScope crit(&lock_);
- // Create a local copy of start_time_ to access it under |comparison_lock_|
- // without holding a |lock_|
+ MutexLock lock(&lock_);
+ // Create a local copy of start_time_ to access it under
+ // |comparison_lock_| without holding a |lock_|
start_time = start_time_;
+ stream_index = streams_.AddIfAbsent(stream_label);
}
{
// Ensure stats for this stream exists.
- rtc::CritScope crit(&comparison_lock_);
- if (stream_stats_.find(stream_label) == stream_stats_.end()) {
- stream_stats_.insert({stream_label, StreamStats()});
- // Assume that the first freeze was before first stream frame captured.
- // This way time before the first freeze would be counted as time between
- // freezes.
- stream_last_freeze_end_time_.insert({stream_label, start_time});
+ MutexLock lock(&comparison_lock_);
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i == peer_index) {
+ continue;
+ }
+ InternalStatsKey stats_key(stream_index, peer_index, i);
+ if (stream_stats_.find(stats_key) == stream_stats_.end()) {
+ stream_stats_.insert({stats_key, StreamStats()});
+ // Assume that the first freeze was before first stream frame captured.
+ // This way time before the first freeze would be counted as time
+ // between freezes.
+ stream_last_freeze_end_time_.insert({stats_key, start_time});
+ } else {
+ // When we see some |stream_label| for the first time we need to create
+ // stream stats object for it and set up some states, but we need to do
+ // it only once and for all receivers, so on the next frame on the same
+ // |stream_label| we can be sure, that it's already done and we needn't
+ // to scan though all peers again.
+ break;
+ }
}
}
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
+ stream_to_sender_[stream_index] = peer_index;
frame_counters_.captured++;
- stream_frame_counters_[stream_label].captured++;
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i != peer_index) {
+ InternalStatsKey key(stream_index, peer_index, i);
+ stream_frame_counters_[key].captured++;
+ }
+ }
- StreamState* state = &stream_states_[stream_label];
+ auto state_it = stream_states_.find(stream_index);
+ if (state_it == stream_states_.end()) {
+ stream_states_.emplace(stream_index,
+ StreamState(peer_index, peers_->size()));
+ }
+ StreamState* state = &stream_states_.at(stream_index);
state->PushBack(frame_id);
// Update frames in flight info.
auto it = captured_frames_in_flight_.find(frame_id);
if (it != captured_frames_in_flight_.end()) {
- // We overflow uint16_t and hit previous frame id and this frame is still
- // in flight. It means that this stream wasn't rendered for long time and
- // we need to process existing frame as dropped.
- auto stats_it = frame_stats_.find(frame_id);
- RTC_DCHECK(stats_it != frame_stats_.end());
-
- uint16_t oldest_frame_id = state->PopFront();
- RTC_DCHECK_EQ(frame_id, oldest_frame_id);
- frame_counters_.dropped++;
- stream_frame_counters_[stream_label].dropped++;
- AddComparison(it->second, absl::nullopt, true, stats_it->second);
+ // If we overflow uint16_t and hit previous frame id and this frame is
+ // still in flight, it means that this stream wasn't rendered for long
+ // time and we need to process existing frame as dropped.
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i == peer_index) {
+ continue;
+ }
+
+ uint16_t oldest_frame_id = state->PopFront(i);
+ RTC_DCHECK_EQ(frame_id, oldest_frame_id);
+ frame_counters_.dropped++;
+ InternalStatsKey key(stream_index, peer_index, i);
+ stream_frame_counters_.at(key).dropped++;
+
+ MutexLock lock1(&comparison_lock_);
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ captured_frames_in_flight_.size());
+ AddComparison(InternalStatsKey(stream_index, peer_index, i),
+ it->second.frame(), absl::nullopt, true,
+ it->second.GetStatsForPeer(i));
+ }
captured_frames_in_flight_.erase(it);
- frame_stats_.erase(stats_it);
}
- captured_frames_in_flight_.insert(
- std::pair<uint16_t, VideoFrame>(frame_id, frame));
+ captured_frames_in_flight_.emplace(
+ frame_id,
+ FrameInFlight(stream_index, frame,
+ /*captured_time=*/Now(), peer_index, peers_->size()));
// Set frame id on local copy of the frame
- captured_frames_in_flight_.at(frame_id).set_id(frame_id);
- frame_stats_.insert(std::pair<uint16_t, FrameStats>(
- frame_id, FrameStats(stream_label, /*captured_time=*/Now())));
+ captured_frames_in_flight_.at(frame_id).SetFrameId(frame_id);
// Update history stream<->frame mapping
for (auto it = stream_to_frame_id_history_.begin();
it != stream_to_frame_id_history_.end(); ++it) {
it->second.erase(frame_id);
}
- stream_to_frame_id_history_[stream_label].insert(frame_id);
+ stream_to_frame_id_history_[stream_index].insert(frame_id);
// If state has too many frames that are in flight => remove the oldest
// queued frame in order to avoid to use too much memory.
- if (state->GetAliveFramesCount() > max_frames_in_flight_per_stream_count_) {
+ if (state->GetAliveFramesCount() >
+ options_.max_frames_in_flight_per_stream_count) {
uint16_t frame_id_to_remove = state->MarkNextAliveFrameAsDead();
- auto removed_count = captured_frames_in_flight_.erase(frame_id_to_remove);
- RTC_DCHECK_EQ(removed_count, 1)
+ auto it = captured_frames_in_flight_.find(frame_id_to_remove);
+ RTC_CHECK(it != captured_frames_in_flight_.end())
+ << "Frame with ID " << frame_id_to_remove
+ << " is expected to be in flight, but hasn't been found in "
+ << "|captured_frames_in_flight_|";
+ bool is_removed = it->second.RemoveFrame();
+ RTC_DCHECK(is_removed)
<< "Invalid stream state: alive frame is removed already";
}
}
@@ -172,52 +279,76 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
}
void DefaultVideoQualityAnalyzer::OnFramePreEncode(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame) {
- rtc::CritScope crit(&lock_);
- auto it = frame_stats_.find(frame.id());
- RTC_DCHECK(it != frame_stats_.end())
+ MutexLock lock(&lock_);
+ auto it = captured_frames_in_flight_.find(frame.id());
+ RTC_DCHECK(it != captured_frames_in_flight_.end())
<< "Frame id=" << frame.id() << " not found";
frame_counters_.pre_encoded++;
- stream_frame_counters_[it->second.stream_label].pre_encoded++;
- it->second.pre_encode_time = Now();
+ size_t peer_index = peers_->index(peer_name);
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i != peer_index) {
+ InternalStatsKey key(it->second.stream(), peer_index, i);
+ stream_frame_counters_.at(key).pre_encoded++;
+ }
+ }
+ it->second.SetPreEncodeTime(Now());
}
void DefaultVideoQualityAnalyzer::OnFrameEncoded(
+ absl::string_view peer_name,
uint16_t frame_id,
const webrtc::EncodedImage& encoded_image,
const EncoderStats& stats) {
- rtc::CritScope crit(&lock_);
- auto it = frame_stats_.find(frame_id);
- RTC_DCHECK(it != frame_stats_.end());
+ MutexLock lock(&lock_);
+ auto it = captured_frames_in_flight_.find(frame_id);
+ RTC_DCHECK(it != captured_frames_in_flight_.end());
// For SVC we can receive multiple encoded images for one frame, so to cover
// all cases we have to pick the last encode time.
- if (it->second.encoded_time.IsInfinite()) {
+ if (!it->second.HasEncodedTime()) {
// Increase counters only when we meet this frame first time.
frame_counters_.encoded++;
- stream_frame_counters_[it->second.stream_label].encoded++;
+ size_t peer_index = peers_->index(peer_name);
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i != peer_index) {
+ InternalStatsKey key(it->second.stream(), peer_index, i);
+ stream_frame_counters_.at(key).encoded++;
+ }
+ }
}
- it->second.encoded_time = Now();
- it->second.encoded_image_size = encoded_image.size();
- it->second.target_encode_bitrate += stats.target_encode_bitrate;
+ it->second.OnFrameEncoded(Now(), encoded_image.size(),
+ stats.target_encode_bitrate);
}
void DefaultVideoQualityAnalyzer::OnFrameDropped(
+ absl::string_view peer_name,
webrtc::EncodedImageCallback::DropReason reason) {
// Here we do nothing, because we will see this drop on renderer side.
}
void DefaultVideoQualityAnalyzer::OnFramePreDecode(
+ absl::string_view peer_name,
uint16_t frame_id,
const webrtc::EncodedImage& input_image) {
- rtc::CritScope crit(&lock_);
- auto it = frame_stats_.find(frame_id);
- RTC_DCHECK(it != frame_stats_.end());
- RTC_DCHECK(it->second.received_time.IsInfinite())
- << "Received multiple spatial layers for stream_label="
- << it->second.stream_label;
+ MutexLock lock(&lock_);
+ size_t peer_index = peers_->index(peer_name);
+
+ auto it = captured_frames_in_flight_.find(frame_id);
+ if (it == captured_frames_in_flight_.end() ||
+ it->second.HasReceivedTime(peer_index)) {
+ // It means this frame was predecoded before, so we can skip it. It may
+ // happen when we have multiple simulcast streams in one track and received
+ // the same picture from two different streams because SFU can't reliably
+ // correlate two simulcast streams and started relaying the second stream
+ // from the same frame it has relayed right before for the first stream.
+ return;
+ }
+
frame_counters_.received++;
- stream_frame_counters_[it->second.stream_label].received++;
- it->second.decode_start_time = Now();
+ InternalStatsKey key(it->second.stream(),
+ stream_to_sender_.at(it->second.stream()), peer_index);
+ stream_frame_counters_.at(key).received++;
// Determine the time of the last received packet of this video frame.
RTC_DCHECK(!input_image.PacketInfos().empty());
int64_t last_receive_time =
@@ -227,112 +358,139 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode(
return a.receive_time_ms() < b.receive_time_ms();
})
->receive_time_ms();
- it->second.received_time = Timestamp::Millis(last_receive_time);
+ it->second.OnFramePreDecode(
+ peer_index,
+ /*received_time=*/Timestamp::Millis(last_receive_time),
+ /*decode_start_time=*/Now());
}
void DefaultVideoQualityAnalyzer::OnFrameDecoded(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame,
const DecoderStats& stats) {
- rtc::CritScope crit(&lock_);
- auto it = frame_stats_.find(frame.id());
- RTC_DCHECK(it != frame_stats_.end());
+ MutexLock lock(&lock_);
+ size_t peer_index = peers_->index(peer_name);
+
+ auto it = captured_frames_in_flight_.find(frame.id());
+ if (it == captured_frames_in_flight_.end() ||
+ it->second.HasDecodeEndTime(peer_index)) {
+ // It means this frame was decoded before, so we can skip it. It may happen
+ // when we have multiple simulcast streams in one track and received
+ // the same picture from two different streams because SFU can't reliably
+ // correlate two simulcast streams and started relaying the second stream
+ // from the same frame it has relayed right before for the first stream.
+ return;
+ }
frame_counters_.decoded++;
- stream_frame_counters_[it->second.stream_label].decoded++;
- it->second.decode_end_time = Now();
+ InternalStatsKey key(it->second.stream(),
+ stream_to_sender_.at(it->second.stream()), peer_index);
+ stream_frame_counters_.at(key).decoded++;
+ it->second.SetDecodeEndTime(peer_index, Now());
}
void DefaultVideoQualityAnalyzer::OnFrameRendered(
- const webrtc::VideoFrame& raw_frame) {
- // Copy entire video frame including video buffer to ensure that analyzer
- // won't hold any WebRTC internal buffers.
- VideoFrame frame = raw_frame;
- frame.set_video_frame_buffer(
- I420Buffer::Copy(*raw_frame.video_frame_buffer()->ToI420()));
-
- rtc::CritScope crit(&lock_);
- auto stats_it = frame_stats_.find(frame.id());
- RTC_DCHECK(stats_it != frame_stats_.end());
- FrameStats* frame_stats = &stats_it->second;
+ absl::string_view peer_name,
+ const webrtc::VideoFrame& frame) {
+ MutexLock lock(&lock_);
+ size_t peer_index = peers_->index(peer_name);
+
+ auto frame_it = captured_frames_in_flight_.find(frame.id());
+ if (frame_it == captured_frames_in_flight_.end() ||
+ frame_it->second.HasRenderedTime(peer_index)) {
+ // It means this frame was rendered before, so we can skip it. It may happen
+ // when we have multiple simulcast streams in one track and received
+ // the same picture from two different streams because SFU can't reliably
+ // correlate two simulcast streams and started relaying the second stream
+ // from the same frame it has relayed right before for the first stream.
+ return;
+ }
+
+ // Find corresponding captured frame.
+ FrameInFlight* frame_in_flight = &frame_it->second;
+ absl::optional<VideoFrame> captured_frame = frame_in_flight->frame();
+
+ const size_t stream_index = frame_in_flight->stream();
+ StreamState* state = &stream_states_.at(stream_index);
+ const InternalStatsKey stats_key(stream_index, state->owner(), peer_index);
+
// Update frames counters.
frame_counters_.rendered++;
- stream_frame_counters_[frame_stats->stream_label].rendered++;
+ stream_frame_counters_.at(stats_key).rendered++;
// Update current frame stats.
- frame_stats->rendered_time = Now();
- frame_stats->rendered_frame_width = frame.width();
- frame_stats->rendered_frame_height = frame.height();
-
- // Find corresponding captured frame.
- auto frame_it = captured_frames_in_flight_.find(frame.id());
- absl::optional<VideoFrame> captured_frame =
- frame_it != captured_frames_in_flight_.end()
- ? absl::optional<VideoFrame>(frame_it->second)
- : absl::nullopt;
+ frame_in_flight->OnFrameRendered(peer_index, Now(), frame.width(),
+ frame.height());
// After we received frame here we need to check if there are any dropped
// frames between this one and last one, that was rendered for this video
// stream.
-
- const std::string& stream_label = frame_stats->stream_label;
- StreamState* state = &stream_states_[stream_label];
int dropped_count = 0;
- while (!state->Empty() && state->Front() != frame.id()) {
+ while (!state->IsEmpty(peer_index) &&
+ state->Front(peer_index) != frame.id()) {
dropped_count++;
- uint16_t dropped_frame_id = state->PopFront();
+ uint16_t dropped_frame_id = state->PopFront(peer_index);
// Frame with id |dropped_frame_id| was dropped. We need:
// 1. Update global and stream frame counters
// 2. Extract corresponding frame from |captured_frames_in_flight_|
- // 3. Extract corresponding frame stats from |frame_stats_|
- // 4. Send extracted frame to comparison with dropped=true
- // 5. Cleanup dropped frame
+ // 3. Send extracted frame to comparison with dropped=true
+ // 4. Cleanup dropped frame
frame_counters_.dropped++;
- stream_frame_counters_[stream_label].dropped++;
+ stream_frame_counters_.at(stats_key).dropped++;
- auto dropped_frame_stats_it = frame_stats_.find(dropped_frame_id);
- RTC_DCHECK(dropped_frame_stats_it != frame_stats_.end());
auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id);
- absl::optional<VideoFrame> dropped_frame =
- dropped_frame_it != captured_frames_in_flight_.end()
- ? absl::optional<VideoFrame>(dropped_frame_it->second)
- : absl::nullopt;
+ RTC_DCHECK(dropped_frame_it != captured_frames_in_flight_.end());
+ absl::optional<VideoFrame> dropped_frame = dropped_frame_it->second.frame();
+ dropped_frame_it->second.MarkDropped(peer_index);
- AddComparison(dropped_frame, absl::nullopt, true,
- dropped_frame_stats_it->second);
+ {
+ MutexLock lock1(&comparison_lock_);
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ captured_frames_in_flight_.size());
+ AddComparison(stats_key, dropped_frame, absl::nullopt, true,
+ dropped_frame_it->second.GetStatsForPeer(peer_index));
+ }
- frame_stats_.erase(dropped_frame_stats_it);
- if (dropped_frame_it != captured_frames_in_flight_.end()) {
+ if (dropped_frame_it->second.HaveAllPeersReceived()) {
captured_frames_in_flight_.erase(dropped_frame_it);
}
}
- RTC_DCHECK(!state->Empty());
- state->PopFront();
+ RTC_DCHECK(!state->IsEmpty(peer_index));
+ state->PopFront(peer_index);
- if (state->last_rendered_frame_time()) {
- frame_stats->prev_frame_rendered_time =
- state->last_rendered_frame_time().value();
+ if (state->last_rendered_frame_time(peer_index)) {
+ frame_in_flight->SetPrevFrameRenderedTime(
+ peer_index, state->last_rendered_frame_time(peer_index).value());
+ }
+ state->SetLastRenderedFrameTime(peer_index,
+ frame_in_flight->rendered_time(peer_index));
+ {
+ MutexLock cr(&comparison_lock_);
+ stream_stats_[stats_key].skipped_between_rendered.AddSample(dropped_count);
}
- state->set_last_rendered_frame_time(frame_stats->rendered_time);
+
{
- rtc::CritScope cr(&comparison_lock_);
- stream_stats_[stream_label].skipped_between_rendered.AddSample(
- dropped_count);
+ MutexLock lock(&comparison_lock_);
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ captured_frames_in_flight_.size());
+ AddComparison(stats_key, captured_frame, frame, false,
+ frame_in_flight->GetStatsForPeer(peer_index));
}
- AddComparison(captured_frame, frame, false, *frame_stats);
- if (frame_it != captured_frames_in_flight_.end()) {
+ if (frame_it->second.HaveAllPeersReceived()) {
captured_frames_in_flight_.erase(frame_it);
}
- frame_stats_.erase(stats_it);
}
void DefaultVideoQualityAnalyzer::OnEncoderError(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame,
int32_t error_code) {
RTC_LOG(LS_ERROR) << "Encoder error for frame.id=" << frame.id()
<< ", code=" << error_code;
}
-void DefaultVideoQualityAnalyzer::OnDecoderError(uint16_t frame_id,
+void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
int32_t error_code) {
RTC_LOG(LS_ERROR) << "Decoder error for frame_id=" << frame_id
<< ", code=" << error_code;
@@ -341,7 +499,7 @@ void DefaultVideoQualityAnalyzer::OnDecoderError(uint16_t frame_id,
void DefaultVideoQualityAnalyzer::Stop() {
StopMeasuringCpuProcessTime();
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
if (state_ == State::kStopped) {
return;
}
@@ -360,94 +518,116 @@ void DefaultVideoQualityAnalyzer::Stop() {
// Time between freezes.
// Count time since the last freeze to the end of the call as time
// between freezes.
- rtc::CritScope crit1(&lock_);
- rtc::CritScope crit2(&comparison_lock_);
- for (auto& item : stream_stats_) {
- const StreamState& state = stream_states_[item.first];
- // If there are no freezes in the call we have to report
- // time_between_freezes_ms as call duration and in such case
- // |stream_last_freeze_end_time_| for this stream will be |start_time_|.
- // If there is freeze, then we need add time from last rendered frame
- // to last freeze end as time between freezes.
- if (state.last_rendered_frame_time()) {
- item.second.time_between_freezes_ms.AddSample(
- (state.last_rendered_frame_time().value() -
- stream_last_freeze_end_time_.at(item.first))
- .ms());
+ MutexLock lock1(&lock_);
+ MutexLock lock2(&comparison_lock_);
+ for (auto& state_entry : stream_states_) {
+ const size_t stream_index = state_entry.first;
+ const StreamState& stream_state = state_entry.second;
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i == static_cast<size_t>(stream_state.owner())) {
+ continue;
+ }
+
+ InternalStatsKey stats_key(stream_index, stream_state.owner(), i);
+
+ // If there are no freezes in the call we have to report
+ // time_between_freezes_ms as call duration and in such case
+ // |stream_last_freeze_end_time_| for this stream will be |start_time_|.
+ // If there is freeze, then we need add time from last rendered frame
+ // to last freeze end as time between freezes.
+ if (stream_state.last_rendered_frame_time(i)) {
+ stream_stats_[stats_key].time_between_freezes_ms.AddSample(
+ stream_state.last_rendered_frame_time(i).value().ms() -
+ stream_last_freeze_end_time_.at(stats_key).ms());
+ }
}
}
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ captured_frames_in_flight_.size());
}
ReportResults();
}
std::string DefaultVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) {
- rtc::CritScope crit1(&lock_);
- auto it = frame_stats_.find(frame_id);
- if (it != frame_stats_.end()) {
- return it->second.stream_label;
+ MutexLock lock1(&lock_);
+ auto it = captured_frames_in_flight_.find(frame_id);
+ if (it != captured_frames_in_flight_.end()) {
+ return streams_.name(it->second.stream());
}
for (auto hist_it = stream_to_frame_id_history_.begin();
hist_it != stream_to_frame_id_history_.end(); ++hist_it) {
auto hist_set_it = hist_it->second.find(frame_id);
if (hist_set_it != hist_it->second.end()) {
- return hist_it->first;
+ return streams_.name(hist_it->first);
}
}
RTC_CHECK(false) << "Unknown frame_id=" << frame_id;
}
-std::set<std::string> DefaultVideoQualityAnalyzer::GetKnownVideoStreams()
- const {
- rtc::CritScope crit2(&comparison_lock_);
- std::set<std::string> out;
+std::set<StatsKey> DefaultVideoQualityAnalyzer::GetKnownVideoStreams() const {
+ MutexLock lock1(&lock_);
+ MutexLock lock2(&comparison_lock_);
+ std::set<StatsKey> out;
for (auto& item : stream_stats_) {
- out.insert(item.first);
+ RTC_LOG(INFO) << item.first.ToString() << " ==> "
+ << ToStatsKey(item.first).ToString();
+ out.insert(ToStatsKey(item.first));
}
return out;
}
const FrameCounters& DefaultVideoQualityAnalyzer::GetGlobalCounters() const {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return frame_counters_;
}
-const std::map<std::string, FrameCounters>&
+std::map<StatsKey, FrameCounters>
DefaultVideoQualityAnalyzer::GetPerStreamCounters() const {
- rtc::CritScope crit(&lock_);
- return stream_frame_counters_;
+ MutexLock lock(&lock_);
+ std::map<StatsKey, FrameCounters> out;
+ for (auto& item : stream_frame_counters_) {
+ out.emplace(ToStatsKey(item.first), item.second);
+ }
+ return out;
}
-std::map<std::string, StreamStats> DefaultVideoQualityAnalyzer::GetStats()
- const {
- rtc::CritScope cri(&comparison_lock_);
- return stream_stats_;
+std::map<StatsKey, StreamStats> DefaultVideoQualityAnalyzer::GetStats() const {
+ MutexLock lock1(&lock_);
+ MutexLock lock2(&comparison_lock_);
+ std::map<StatsKey, StreamStats> out;
+ for (auto& item : stream_stats_) {
+ out.emplace(ToStatsKey(item.first), item.second);
+ }
+ return out;
}
AnalyzerStats DefaultVideoQualityAnalyzer::GetAnalyzerStats() const {
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
return analyzer_stats_;
}
void DefaultVideoQualityAnalyzer::AddComparison(
+ InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats) {
StartExcludingCpuThreadTime();
- rtc::CritScope crit(&comparison_lock_);
analyzer_stats_.comparisons_queue_size.AddSample(comparisons_.size());
// If there too many computations waiting in the queue, we won't provide
// frames itself to make future computations lighter.
if (comparisons_.size() >= kMaxActiveComparisons) {
- comparisons_.emplace_back(absl::nullopt, absl::nullopt, dropped,
- frame_stats, OverloadReason::kCpu);
+ comparisons_.emplace_back(std::move(stats_key), absl::nullopt,
+ absl::nullopt, dropped, std::move(frame_stats),
+ OverloadReason::kCpu);
} else {
OverloadReason overload_reason = OverloadReason::kNone;
if (!captured && !dropped) {
overload_reason = OverloadReason::kMemory;
}
- comparisons_.emplace_back(std::move(captured), std::move(rendered), dropped,
- frame_stats, overload_reason);
+ comparisons_.emplace_back(std::move(stats_key), std::move(captured),
+ std::move(rendered), dropped,
+ std::move(frame_stats), overload_reason);
}
comparison_available_event_.Set();
StopExcludingCpuThreadTime();
@@ -462,7 +642,7 @@ void DefaultVideoQualityAnalyzer::ProcessComparisons() {
// Try to pick next comparison to perform from the queue.
absl::optional<FrameComparison> comparison = absl::nullopt;
{
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
if (!comparisons_.empty()) {
comparison = comparisons_.front();
comparisons_.pop_front();
@@ -476,7 +656,7 @@ void DefaultVideoQualityAnalyzer::ProcessComparisons() {
{
// If there are no comparisons and state is stopped =>
// no more frames expected.
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
more_frames_expected = state_ != State::kStopped;
}
if (!more_frames_expected) {
@@ -498,7 +678,7 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
// Perform expensive psnr and ssim calculations while not holding lock.
double psnr = -1.0;
double ssim = -1.0;
- if (heavy_metrics_computation_enabled_ && comparison.captured &&
+ if (options_.heavy_metrics_computation_enabled && comparison.captured &&
!comparison.dropped) {
psnr = I420PSNR(&*comparison.captured, &*comparison.rendered);
ssim = I420SSIM(&*comparison.captured, &*comparison.rendered);
@@ -506,9 +686,9 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
const FrameStats& frame_stats = comparison.frame_stats;
- rtc::CritScope crit(&comparison_lock_);
- auto stats_it = stream_stats_.find(frame_stats.stream_label);
- RTC_CHECK(stats_it != stream_stats_.end());
+ MutexLock lock(&comparison_lock_);
+ auto stats_it = stream_stats_.find(comparison.stats_key);
+ RTC_CHECK(stats_it != stream_stats_.end()) << comparison.stats_key.ToString();
StreamStats* stats = &stats_it->second;
analyzer_stats_.comparisons_done++;
if (comparison.overload_reason == OverloadReason::kCpu) {
@@ -561,7 +741,7 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
3 * average_time_between_rendered_frames_ms)) {
stats->freeze_time_ms.AddSample(time_between_rendered_frames.ms());
auto freeze_end_it =
- stream_last_freeze_end_time_.find(frame_stats.stream_label);
+ stream_last_freeze_end_time_.find(comparison.stats_key);
RTC_DCHECK(freeze_end_it != stream_last_freeze_end_time_.end());
stats->time_between_freezes_ms.AddSample(
(frame_stats.prev_frame_rendered_time - freeze_end_it->second)
@@ -575,18 +755,19 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
void DefaultVideoQualityAnalyzer::ReportResults() {
using ::webrtc::test::ImproveDirection;
- rtc::CritScope crit1(&lock_);
- rtc::CritScope crit2(&comparison_lock_);
+ MutexLock lock1(&lock_);
+ MutexLock lock2(&comparison_lock_);
for (auto& item : stream_stats_) {
- ReportResults(GetTestCaseName(item.first), item.second,
- stream_frame_counters_.at(item.first));
+ ReportResults(GetTestCaseName(StatsKeyToMetricName(ToStatsKey(item.first))),
+ item.second, stream_frame_counters_.at(item.first));
}
test::PrintResult("cpu_usage", "", test_label_.c_str(), GetCpuUsagePercent(),
"%", false, ImproveDirection::kSmallerIsBetter);
LogFrameCounters("Global", frame_counters_);
for (auto& item : stream_stats_) {
- LogFrameCounters(item.first, stream_frame_counters_.at(item.first));
- LogStreamInternalStats(item.first, item.second);
+ LogFrameCounters(ToStatsKey(item.first).ToString(),
+ stream_frame_counters_.at(item.first));
+ LogStreamInternalStats(ToStatsKey(item.first).ToString(), item.second);
}
if (!analyzer_stats_.comparisons_queue_size.IsEmpty()) {
RTC_LOG(INFO) << "comparisons_queue_size min="
@@ -714,62 +895,249 @@ Timestamp DefaultVideoQualityAnalyzer::Now() {
return clock_->CurrentTime();
}
+StatsKey DefaultVideoQualityAnalyzer::ToStatsKey(
+ const InternalStatsKey& key) const {
+ return StatsKey(streams_.name(key.stream), peers_->name(key.sender),
+ peers_->name(key.receiver));
+}
+
+std::string DefaultVideoQualityAnalyzer::StatsKeyToMetricName(
+ const StatsKey& key) {
+ if (peers_->size() <= 2) {
+ return key.stream_label;
+ }
+ return key.ToString();
+}
+
void DefaultVideoQualityAnalyzer::StartMeasuringCpuProcessTime() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
cpu_time_ -= rtc::GetProcessCpuTimeNanos();
wallclock_time_ -= rtc::SystemTimeNanos();
}
void DefaultVideoQualityAnalyzer::StopMeasuringCpuProcessTime() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
cpu_time_ += rtc::GetProcessCpuTimeNanos();
wallclock_time_ += rtc::SystemTimeNanos();
}
void DefaultVideoQualityAnalyzer::StartExcludingCpuThreadTime() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
cpu_time_ += rtc::GetThreadCpuTimeNanos();
}
void DefaultVideoQualityAnalyzer::StopExcludingCpuThreadTime() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
cpu_time_ -= rtc::GetThreadCpuTimeNanos();
}
double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
return static_cast<double>(cpu_time_) / wallclock_time_ * 100.0;
}
-DefaultVideoQualityAnalyzer::FrameStats::FrameStats(std::string stream_label,
- Timestamp captured_time)
- : stream_label(std::move(stream_label)), captured_time(captured_time) {}
-
DefaultVideoQualityAnalyzer::FrameComparison::FrameComparison(
+ InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats,
OverloadReason overload_reason)
- : captured(std::move(captured)),
+ : stats_key(std::move(stats_key)),
+ captured(std::move(captured)),
rendered(std::move(rendered)),
dropped(dropped),
frame_stats(std::move(frame_stats)),
overload_reason(overload_reason) {}
-uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront() {
- uint16_t frame_id = frame_ids_.front();
- frame_ids_.pop_front();
- if (dead_frames_count_ > 0) {
- dead_frames_count_--;
+uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront(size_t peer) {
+ absl::optional<uint16_t> frame_id = frame_ids_.PopFront(peer);
+ RTC_DCHECK(frame_id.has_value());
+
+ // If alive's frame queue is longer than all others, than also pop frame from
+ // it, because that frame is received by all receivers.
+ size_t owner_size = frame_ids_.size(owner_);
+ size_t other_size = 0;
+ for (size_t i = 0; i < frame_ids_.readers_count(); ++i) {
+ size_t cur_size = frame_ids_.size(i);
+ if (i != owner_ && cur_size > other_size) {
+ other_size = cur_size;
+ }
}
- return frame_id;
+ if (owner_size > other_size) {
+ absl::optional<uint16_t> alive_frame_id = frame_ids_.PopFront(owner_);
+ RTC_DCHECK(alive_frame_id.has_value());
+ RTC_DCHECK_EQ(frame_id.value(), alive_frame_id.value());
+ }
+
+ return frame_id.value();
}
uint16_t DefaultVideoQualityAnalyzer::StreamState::MarkNextAliveFrameAsDead() {
- uint16_t frame_id = frame_ids_[dead_frames_count_];
- dead_frames_count_++;
- return frame_id;
+ absl::optional<uint16_t> frame_id = frame_ids_.PopFront(owner_);
+ RTC_DCHECK(frame_id.has_value());
+ return frame_id.value();
+}
+
+void DefaultVideoQualityAnalyzer::StreamState::SetLastRenderedFrameTime(
+ size_t peer,
+ Timestamp time) {
+ auto it = last_rendered_frame_time_.find(peer);
+ if (it == last_rendered_frame_time_.end()) {
+ last_rendered_frame_time_.insert({peer, time});
+ } else {
+ it->second = time;
+ }
+}
+
+absl::optional<Timestamp>
+DefaultVideoQualityAnalyzer::StreamState::last_rendered_frame_time(
+ size_t peer) const {
+ return MaybeGetValue(last_rendered_frame_time_, peer);
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::RemoveFrame() {
+ if (!frame_) {
+ return false;
+ }
+ frame_ = absl::nullopt;
+ return true;
+}
+
+void DefaultVideoQualityAnalyzer::FrameInFlight::SetFrameId(uint16_t id) {
+ if (frame_) {
+ frame_->set_id(id);
+ }
+}
+
+std::vector<size_t>
+DefaultVideoQualityAnalyzer::FrameInFlight::GetPeersWhichDidntReceive() const {
+ std::vector<size_t> out;
+ for (size_t i = 0; i < peers_count_; ++i) {
+ auto it = receiver_stats_.find(i);
+ if (i != owner_ && it != receiver_stats_.end() &&
+ it->second.rendered_time.IsInfinite()) {
+ out.push_back(i);
+ }
+ }
+ return out;
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::HaveAllPeersReceived() const {
+ for (size_t i = 0; i < peers_count_; ++i) {
+ if (i == owner_) {
+ continue;
+ }
+
+ auto it = receiver_stats_.find(i);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+
+ if (!it->second.dropped && it->second.rendered_time.IsInfinite()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameEncoded(
+ webrtc::Timestamp time,
+ int64_t encoded_image_size,
+ uint32_t target_encode_bitrate) {
+ encoded_time_ = time;
+ encoded_image_size_ = encoded_image_size;
+ target_encode_bitrate_ += target_encode_bitrate;
+}
+
+void DefaultVideoQualityAnalyzer::FrameInFlight::OnFramePreDecode(
+ size_t peer,
+ webrtc::Timestamp received_time,
+ webrtc::Timestamp decode_start_time) {
+ receiver_stats_[peer].received_time = received_time;
+ receiver_stats_[peer].decode_start_time = decode_start_time;
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::HasReceivedTime(
+ size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.received_time.IsFinite();
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::HasDecodeEndTime(
+ size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.decode_end_time.IsFinite();
+}
+
+void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameRendered(
+ size_t peer,
+ webrtc::Timestamp time,
+ int width,
+ int height) {
+ receiver_stats_[peer].rendered_time = time;
+ receiver_stats_[peer].rendered_frame_width = width;
+ receiver_stats_[peer].rendered_frame_height = height;
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::HasRenderedTime(
+ size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.rendered_time.IsFinite();
+}
+
+DefaultVideoQualityAnalyzer::FrameStats
+DefaultVideoQualityAnalyzer::FrameInFlight::GetStatsForPeer(size_t peer) const {
+ FrameStats stats(captured_time_);
+ stats.pre_encode_time = pre_encode_time_;
+ stats.encoded_time = encoded_time_;
+ stats.target_encode_bitrate = target_encode_bitrate_;
+ stats.encoded_image_size = encoded_image_size_;
+
+ absl::optional<ReceiverFrameStats> receiver_stats =
+ MaybeGetValue<ReceiverFrameStats>(receiver_stats_, peer);
+ if (receiver_stats.has_value()) {
+ stats.received_time = receiver_stats->received_time;
+ stats.decode_start_time = receiver_stats->decode_start_time;
+ stats.decode_end_time = receiver_stats->decode_end_time;
+ stats.rendered_time = receiver_stats->rendered_time;
+ stats.prev_frame_rendered_time = receiver_stats->prev_frame_rendered_time;
+ stats.rendered_frame_width = receiver_stats->rendered_frame_width;
+ stats.rendered_frame_height = receiver_stats->rendered_frame_height;
+ }
+ return stats;
+}
+
+size_t DefaultVideoQualityAnalyzer::NamesCollection::AddIfAbsent(
+ absl::string_view name) {
+ auto it = index_.find(name);
+ if (it != index_.end()) {
+ return it->second;
+ }
+ size_t out = names_.size();
+ size_t old_capacity = names_.capacity();
+ names_.emplace_back(name);
+ size_t new_capacity = names_.capacity();
+
+ if (old_capacity == new_capacity) {
+ index_.emplace(names_[out], out);
+ } else {
+ // Reallocation happened in the vector, so we need to rebuild |index_|
+ index_.clear();
+ for (size_t i = 0; i < names_.size(); ++i) {
+ index_.emplace(names_[i], i);
+ }
+ }
+ return out;
}
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
index 6bebb0f02b..51ded74f6b 100644
--- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
+++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
@@ -19,15 +19,17 @@
#include <string>
#include <vector>
+#include "api/array_view.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/units/timestamp.h"
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/numerics/samples_stats_counter.h"
#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/clock.h"
+#include "test/pc/e2e/analyzer/video/multi_head_queue.h"
#include "test/testsupport/perf_test.h"
namespace webrtc {
@@ -36,7 +38,7 @@ namespace webrtc_pc_e2e {
// WebRTC will request a key frame after 3 seconds if no frames were received.
// We assume max frame rate ~60 fps, so 270 frames will cover max freeze without
// key frame request.
-constexpr int kDefaultMaxFramesInFlightPerStream = 270;
+constexpr size_t kDefaultMaxFramesInFlightPerStream = 270;
class RateCounter {
public:
@@ -124,52 +126,118 @@ struct AnalyzerStats {
// it is queued when its captured frame was already removed due to high memory
// usage for that video stream.
int64_t memory_overloaded_comparisons_done = 0;
+ // Count of frames in flight in analyzer measured when new comparison is added
+ // and after analyzer was stopped.
+ SamplesStatsCounter frames_in_flight_left_count;
+};
+
+struct StatsKey {
+ StatsKey(std::string stream_label, std::string sender, std::string receiver)
+ : stream_label(std::move(stream_label)),
+ sender(std::move(sender)),
+ receiver(std::move(receiver)) {}
+
+ std::string ToString() const;
+
+ // Label of video stream to which stats belongs to.
+ std::string stream_label;
+ // Name of the peer which send this stream.
+ std::string sender;
+ // Name of the peer on which stream was received.
+ std::string receiver;
+};
+
+// Required to use StatsKey as std::map key.
+bool operator<(const StatsKey& a, const StatsKey& b);
+bool operator==(const StatsKey& a, const StatsKey& b);
+
+struct InternalStatsKey {
+ InternalStatsKey(size_t stream, size_t sender, size_t receiver)
+ : stream(stream), sender(sender), receiver(receiver) {}
+
+ std::string ToString() const;
+
+ size_t stream;
+ size_t sender;
+ size_t receiver;
+};
+
+// Required to use InternalStatsKey as std::map key.
+bool operator<(const InternalStatsKey& a, const InternalStatsKey& b);
+bool operator==(const InternalStatsKey& a, const InternalStatsKey& b);
+
+struct DefaultVideoQualityAnalyzerOptions {
+ // Tells DefaultVideoQualityAnalyzer if heavy metrics like PSNR and SSIM have
+ // to be computed or not.
+ bool heavy_metrics_computation_enabled = true;
+ // Amount of frames that are queued in the DefaultVideoQualityAnalyzer from
+ // the point they were captured to the point they were rendered on all
+ // receivers per stream.
+ size_t max_frames_in_flight_per_stream_count =
+ kDefaultMaxFramesInFlightPerStream;
};
class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
public:
explicit DefaultVideoQualityAnalyzer(
+ webrtc::Clock* clock,
+ DefaultVideoQualityAnalyzerOptions options =
+ DefaultVideoQualityAnalyzerOptions());
+ // Keep for backward compatibility during migration. Will be removed soon.
+ explicit DefaultVideoQualityAnalyzer(
bool heavy_metrics_computation_enabled = true,
- int max_frames_in_flight_per_stream_count =
+ size_t max_frames_in_flight_per_stream_count =
kDefaultMaxFramesInFlightPerStream);
~DefaultVideoQualityAnalyzer() override;
- void Start(std::string test_case_name, int max_threads_count) override;
- uint16_t OnFrameCaptured(const std::string& stream_label,
+ void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) override;
+ uint16_t OnFrameCaptured(absl::string_view peer_name,
+ const std::string& stream_label,
const VideoFrame& frame) override;
- void OnFramePreEncode(const VideoFrame& frame) override;
- void OnFrameEncoded(uint16_t frame_id,
+ void OnFramePreEncode(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image,
const EncoderStats& stats) override;
- void OnFrameDropped(EncodedImageCallback::DropReason reason) override;
- void OnFramePreDecode(uint16_t frame_id,
+ void OnFrameDropped(absl::string_view peer_name,
+ EncodedImageCallback::DropReason reason) override;
+ void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& input_image) override;
- void OnFrameDecoded(const VideoFrame& frame,
+ void OnFrameDecoded(absl::string_view peer_name,
+ const VideoFrame& frame,
const DecoderStats& stats) override;
- void OnFrameRendered(const VideoFrame& frame) override;
- void OnEncoderError(const VideoFrame& frame, int32_t error_code) override;
- void OnDecoderError(uint16_t frame_id, int32_t error_code) override;
+ void OnFrameRendered(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnEncoderError(absl::string_view peer_name,
+ const VideoFrame& frame,
+ int32_t error_code) override;
+ void OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code) override;
void Stop() override;
std::string GetStreamLabel(uint16_t frame_id) override;
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& stats_reports) override {}
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {}
// Returns set of stream labels, that were met during test call.
- std::set<std::string> GetKnownVideoStreams() const;
+ std::set<StatsKey> GetKnownVideoStreams() const;
const FrameCounters& GetGlobalCounters() const;
// Returns frame counter per stream label. Valid stream labels can be obtained
// by calling GetKnownVideoStreams()
- const std::map<std::string, FrameCounters>& GetPerStreamCounters() const;
+ std::map<StatsKey, FrameCounters> GetPerStreamCounters() const;
// Returns video quality stats per stream label. Valid stream labels can be
// obtained by calling GetKnownVideoStreams()
- std::map<std::string, StreamStats> GetStats() const;
+ std::map<StatsKey, StreamStats> GetStats() const;
AnalyzerStats GetAnalyzerStats() const;
private:
struct FrameStats {
- FrameStats(std::string stream_label, Timestamp captured_time);
-
- std::string stream_label;
+ FrameStats(Timestamp captured_time) : captured_time(captured_time) {}
// Frame events timestamp.
Timestamp captured_time;
@@ -182,12 +250,11 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
Timestamp rendered_time = Timestamp::MinusInfinity();
Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
+ int64_t encoded_image_size = 0;
uint32_t target_encode_bitrate = 0;
absl::optional<int> rendered_frame_width = absl::nullopt;
absl::optional<int> rendered_frame_height = absl::nullopt;
-
- int64_t encoded_image_size = 0;
};
// Describes why comparison was done in overloaded mode (without calculating
@@ -209,12 +276,14 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// because there were too many comparisons in the queue. |dropped| can be
// true or false showing was frame dropped or not.
struct FrameComparison {
- FrameComparison(absl::optional<VideoFrame> captured,
+ FrameComparison(InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats,
OverloadReason overload_reason);
+ InternalStatsKey stats_key;
// Frames can be omitted if there too many computations waiting in the
// queue.
absl::optional<VideoFrame> captured;
@@ -230,49 +299,175 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// Represents a current state of video stream.
class StreamState {
public:
- void PushBack(uint16_t frame_id) { frame_ids_.emplace_back(frame_id); }
+ StreamState(size_t owner, size_t peers_count)
+ : owner_(owner), frame_ids_(peers_count) {}
- uint16_t PopFront();
+ size_t owner() const { return owner_; }
- bool Empty() { return frame_ids_.empty(); }
-
- uint16_t Front() { return frame_ids_.front(); }
-
- int GetAliveFramesCount() { return frame_ids_.size() - dead_frames_count_; }
+ void PushBack(uint16_t frame_id) { frame_ids_.PushBack(frame_id); }
+ // Crash if state is empty.
+ uint16_t PopFront(size_t peer);
+ bool IsEmpty(size_t peer) const { return frame_ids_.IsEmpty(peer); }
+ // Crash if state is empty.
+ uint16_t Front(size_t peer) const { return frame_ids_.Front(peer).value(); }
+ size_t GetAliveFramesCount() { return frame_ids_.size(owner_); }
uint16_t MarkNextAliveFrameAsDead();
- void set_last_rendered_frame_time(Timestamp time) {
- last_rendered_frame_time_ = time;
- }
- absl::optional<Timestamp> last_rendered_frame_time() const {
- return last_rendered_frame_time_;
- }
+ void SetLastRenderedFrameTime(size_t peer, Timestamp time);
+ absl::optional<Timestamp> last_rendered_frame_time(size_t peer) const;
private:
+ // Index of the owner. Owner's queue in |frame_ids_| will keep alive frames.
+ const size_t owner_;
// To correctly determine dropped frames we have to know sequence of frames
// in each stream so we will keep a list of frame ids inside the stream.
- // When the frame is rendered, we will pop ids from the list for until id
- // will match with rendered one. All ids before matched one can be
- // considered as dropped:
+ // This list is represented by multi head queue of frame ids with separate
+ // head for each receiver. When the frame is rendered, we will pop ids from
+ // the corresponding head until id will match with rendered one. All ids
+ // before matched one can be considered as dropped:
//
// | frame_id1 |->| frame_id2 |->| frame_id3 |->| frame_id4 |
//
// If we received frame with id frame_id3, then we will pop frame_id1 and
// frame_id2 and consider that frames as dropped and then compare received
// frame with the one from |captured_frames_in_flight_| with id frame_id3.
- std::deque<uint16_t> frame_ids_;
- // Count of dead frames in the beginning of the deque.
- int dead_frames_count_;
- absl::optional<Timestamp> last_rendered_frame_time_ = absl::nullopt;
+ //
+ // To track alive frames (frames that contains frame's payload in
+ // |captured_frames_in_flight_|) the head which corresponds to |owner_| will
+ // be used. So that head will point to the first alive frame in frames list.
+ MultiHeadQueue<uint16_t> frame_ids_;
+ std::map<size_t, Timestamp> last_rendered_frame_time_;
};
enum State { kNew, kActive, kStopped };
- void AddComparison(absl::optional<VideoFrame> captured,
+ struct ReceiverFrameStats {
+ // Time when last packet of a frame was received.
+ Timestamp received_time = Timestamp::MinusInfinity();
+ Timestamp decode_start_time = Timestamp::MinusInfinity();
+ Timestamp decode_end_time = Timestamp::MinusInfinity();
+ Timestamp rendered_time = Timestamp::MinusInfinity();
+ Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
+
+ absl::optional<int> rendered_frame_width = absl::nullopt;
+ absl::optional<int> rendered_frame_height = absl::nullopt;
+
+ bool dropped = false;
+ };
+
+ class FrameInFlight {
+ public:
+ FrameInFlight(size_t stream,
+ VideoFrame frame,
+ Timestamp captured_time,
+ size_t owner,
+ size_t peers_count)
+ : stream_(stream),
+ owner_(owner),
+ peers_count_(peers_count),
+ frame_(std::move(frame)),
+ captured_time_(captured_time) {}
+
+ size_t stream() const { return stream_; }
+ const absl::optional<VideoFrame>& frame() const { return frame_; }
+ // Returns was frame removed or not.
+ bool RemoveFrame();
+ void SetFrameId(uint16_t id);
+
+ std::vector<size_t> GetPeersWhichDidntReceive() const;
+ bool HaveAllPeersReceived() const;
+
+ void SetPreEncodeTime(webrtc::Timestamp time) { pre_encode_time_ = time; }
+
+ void OnFrameEncoded(webrtc::Timestamp time,
+ int64_t encoded_image_size,
+ uint32_t target_encode_bitrate);
+
+ bool HasEncodedTime() const { return encoded_time_.IsFinite(); }
+
+ void OnFramePreDecode(size_t peer,
+ webrtc::Timestamp received_time,
+ webrtc::Timestamp decode_start_time);
+
+ bool HasReceivedTime(size_t peer) const;
+
+ void SetDecodeEndTime(size_t peer, webrtc::Timestamp time) {
+ receiver_stats_[peer].decode_end_time = time;
+ }
+
+ bool HasDecodeEndTime(size_t peer) const;
+
+ void OnFrameRendered(size_t peer,
+ webrtc::Timestamp time,
+ int width,
+ int height);
+
+ bool HasRenderedTime(size_t peer) const;
+
+ // Crash if rendered time is not set for specified |peer|.
+ webrtc::Timestamp rendered_time(size_t peer) const {
+ return receiver_stats_.at(peer).rendered_time;
+ }
+
+ void MarkDropped(size_t peer) { receiver_stats_[peer].dropped = true; }
+
+ void SetPrevFrameRenderedTime(size_t peer, webrtc::Timestamp time) {
+ receiver_stats_[peer].prev_frame_rendered_time = time;
+ }
+
+ FrameStats GetStatsForPeer(size_t peer) const;
+
+ private:
+ const size_t stream_;
+ const size_t owner_;
+ const size_t peers_count_;
+ absl::optional<VideoFrame> frame_;
+
+ // Frame events timestamp.
+ Timestamp captured_time_;
+ Timestamp pre_encode_time_ = Timestamp::MinusInfinity();
+ Timestamp encoded_time_ = Timestamp::MinusInfinity();
+ int64_t encoded_image_size_ = 0;
+ uint32_t target_encode_bitrate_ = 0;
+ std::map<size_t, ReceiverFrameStats> receiver_stats_;
+ };
+
+ class NamesCollection {
+ public:
+ NamesCollection() = default;
+ explicit NamesCollection(rtc::ArrayView<const std::string> names) {
+ names_ = std::vector<std::string>(names.begin(), names.end());
+ for (size_t i = 0; i < names_.size(); ++i) {
+ index_.emplace(names_[i], i);
+ }
+ }
+
+ size_t size() const { return names_.size(); }
+
+ size_t index(absl::string_view name) const { return index_.at(name); }
+
+ const std::string& name(size_t index) const { return names_[index]; }
+
+ bool HasName(absl::string_view name) const {
+ return index_.find(name) != index_.end();
+ }
+
+ // Add specified |name| to the collection if it isn't presented.
+ // Returns index which corresponds to specified |name|.
+ size_t AddIfAbsent(absl::string_view name);
+
+ private:
+ std::vector<std::string> names_;
+ std::map<absl::string_view, size_t> index_;
+ };
+
+ void AddComparison(InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
- FrameStats frame_stats);
+ FrameStats frame_stats)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(comparison_lock_);
static void ProcessComparisonsThread(void* obj);
void ProcessComparisons();
void ProcessComparison(const FrameComparison& comparison);
@@ -292,6 +487,11 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// Returns name of current test case for reporting.
std::string GetTestCaseName(const std::string& stream_label) const;
Timestamp Now();
+ StatsKey ToStatsKey(const InternalStatsKey& key) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+ // Returns string representation of stats key for metrics naming. Used for
+ // backward compatibility by metrics naming for 2 peers cases.
+ std::string StatsKeyToMetricName(const StatsKey& key);
void StartMeasuringCpuProcessTime();
void StopMeasuringCpuProcessTime();
@@ -299,16 +499,20 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
void StopExcludingCpuThreadTime();
double GetCpuUsagePercent();
- const bool heavy_metrics_computation_enabled_;
- const int max_frames_in_flight_per_stream_count_;
+ // TODO(titovartem) restore const when old constructor will be removed.
+ DefaultVideoQualityAnalyzerOptions options_;
webrtc::Clock* const clock_;
std::atomic<uint16_t> next_frame_id_{0};
std::string test_label_;
+ std::unique_ptr<NamesCollection> peers_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
State state_ RTC_GUARDED_BY(lock_) = State::kNew;
Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity();
+ // Mapping from stream label to unique size_t value to use in stats and avoid
+ // extra string copying.
+ NamesCollection streams_ RTC_GUARDED_BY(lock_);
// Frames that were captured by all streams and still aren't rendered by any
// stream or deemed dropped. Frame with id X can be removed from this map if:
// 1. The frame with id X was received in OnFrameRendered
@@ -316,27 +520,29 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// 3. Next available frame id for newly captured frame is X
// 4. There too many frames in flight for current video stream and X is the
// oldest frame id in this stream.
- std::map<uint16_t, VideoFrame> captured_frames_in_flight_
+ std::map<uint16_t, FrameInFlight> captured_frames_in_flight_
RTC_GUARDED_BY(lock_);
// Global frames count for all video streams.
FrameCounters frame_counters_ RTC_GUARDED_BY(lock_);
- // Frame counters per each stream.
- std::map<std::string, FrameCounters> stream_frame_counters_
+ // Frame counters per each stream per each receiver.
+ std::map<InternalStatsKey, FrameCounters> stream_frame_counters_
RTC_GUARDED_BY(lock_);
- std::map<uint16_t, FrameStats> frame_stats_ RTC_GUARDED_BY(lock_);
- std::map<std::string, StreamState> stream_states_ RTC_GUARDED_BY(lock_);
-
- // Stores history mapping between stream labels and frame ids. Updated when
- // frame id overlap. It required to properly return stream label after 1st
- // frame from simulcast streams was already rendered and last is still
- // encoding.
- std::map<std::string, std::set<uint16_t>> stream_to_frame_id_history_
+ // Map from stream index in |streams_| to its StreamState.
+ std::map<size_t, StreamState> stream_states_ RTC_GUARDED_BY(lock_);
+ // Map from stream index in |streams_| to sender peer index in |peers_|.
+ std::map<size_t, size_t> stream_to_sender_ RTC_GUARDED_BY(lock_);
+
+ // Stores history mapping between stream index in |streams_| and frame ids.
+ // Updated when frame id overlap. It required to properly return stream label
+ // after 1st frame from simulcast streams was already rendered and last is
+ // still encoding.
+ std::map<size_t, std::set<uint16_t>> stream_to_frame_id_history_
RTC_GUARDED_BY(lock_);
- rtc::CriticalSection comparison_lock_;
- std::map<std::string, StreamStats> stream_stats_
+ mutable Mutex comparison_lock_;
+ std::map<InternalStatsKey, StreamStats> stream_stats_
RTC_GUARDED_BY(comparison_lock_);
- std::map<std::string, Timestamp> stream_last_freeze_end_time_
+ std::map<InternalStatsKey, Timestamp> stream_last_freeze_end_time_
RTC_GUARDED_BY(comparison_lock_);
std::deque<FrameComparison> comparisons_ RTC_GUARDED_BY(comparison_lock_);
AnalyzerStats analyzer_stats_ RTC_GUARDED_BY(comparison_lock_);
@@ -344,7 +550,7 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
std::vector<std::unique_ptr<rtc::PlatformThread>> thread_pool_;
rtc::Event comparison_available_event_;
- rtc::CriticalSection cpu_measurement_lock_;
+ Mutex cpu_measurement_lock_;
int64_t cpu_time_ RTC_GUARDED_BY(cpu_measurement_lock_) = 0;
int64_t wallclock_time_ RTC_GUARDED_BY(cpu_measurement_lock_) = 0;
};
diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
index 1bc29c5f09..9c9a19fc91 100644
--- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
+++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
@@ -18,6 +18,7 @@
#include "api/video/encoded_image.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
+#include "rtc_base/strings/string_builder.h"
#include "system_wrappers/include/sleep.h"
#include "test/gtest.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
@@ -26,11 +27,22 @@ namespace webrtc {
namespace webrtc_pc_e2e {
namespace {
+using StatsSample = ::webrtc::SamplesStatsCounter::StatsSample;
+
constexpr int kAnalyzerMaxThreadsCount = 1;
constexpr int kMaxFramesInFlightPerStream = 10;
constexpr int kFrameWidth = 320;
constexpr int kFrameHeight = 240;
constexpr char kStreamLabel[] = "video-stream";
+constexpr char kSenderPeerName[] = "alice";
+constexpr char kReceiverPeerName[] = "bob";
+
+DefaultVideoQualityAnalyzerOptions AnalyzerOptionsForTest() {
+ DefaultVideoQualityAnalyzerOptions options;
+ options.heavy_metrics_computation_enabled = false;
+ options.max_frames_in_flight_per_stream_count = kMaxFramesInFlightPerStream;
+ return options;
+}
VideoFrame NextFrame(test::FrameGeneratorInterface* frame_generator,
int64_t timestamp_us) {
@@ -64,6 +76,24 @@ VideoFrame DeepCopy(const VideoFrame& frame) {
return copy;
}
+std::vector<StatsSample> GetSortedSamples(const SamplesStatsCounter& counter) {
+ rtc::ArrayView<const StatsSample> view = counter.GetTimedSamples();
+ std::vector<StatsSample> out(view.begin(), view.end());
+ std::sort(out.begin(), out.end(),
+ [](const StatsSample& a, const StatsSample& b) {
+ return a.time < b.time;
+ });
+ return out;
+}
+
+std::string ToString(const std::vector<StatsSample>& values) {
+ rtc::StringBuilder out;
+ for (const auto& v : values) {
+ out << "{ time_ms=" << v.time.ms() << "; value=" << v.value << "}, ";
+ }
+ return out.str();
+}
+
TEST(DefaultVideoQualityAnalyzerTest,
MemoryOverloadedAndThenAllFramesReceived) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
@@ -71,28 +101,32 @@ TEST(DefaultVideoQualityAnalyzerTest,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
- DefaultVideoQualityAnalyzer analyzer(
- /*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
- analyzer.Start("test_case", kAnalyzerMaxThreadsCount);
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
- frame.set_id(analyzer.OnFrameCaptured(kStreamLabel, frame));
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
- analyzer.OnFramePreEncode(frame);
- analyzer.OnFrameEncoded(frame.id(), FakeEncode(frame),
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
for (const uint16_t& frame_id : frames_order) {
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
- analyzer.OnFramePreDecode(received_frame.id(), FakeEncode(received_frame));
- analyzer.OnFrameDecoded(received_frame,
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
- analyzer.OnFrameRendered(received_frame);
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
@@ -112,35 +146,120 @@ TEST(DefaultVideoQualityAnalyzerTest,
}
TEST(DefaultVideoQualityAnalyzerTest,
+ FillMaxMemoryReceiveAllMemoryOverloadedAndThenAllFramesReceived) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ // Feel analyzer's memory up to limit
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+ }
+
+ // Receive all frames.
+ for (const uint16_t& frame_id : frames_order) {
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+ frames_order.clear();
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+
+ // Overload analyzer's memory up to limit
+ for (int i = 0; i < 2 * kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+ }
+
+ // Receive all frames.
+ for (const uint16_t& frame_id : frames_order) {
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done,
+ kMaxFramesInFlightPerStream);
+ EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 3);
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 3);
+ EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream * 3);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
MemoryOverloadedHalfDroppedAndThenHalfFramesReceived) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
- DefaultVideoQualityAnalyzer analyzer(
- /*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
- analyzer.Start("test_case", kAnalyzerMaxThreadsCount);
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
- frame.set_id(analyzer.OnFrameCaptured(kStreamLabel, frame));
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
- analyzer.OnFramePreEncode(frame);
- analyzer.OnFrameEncoded(frame.id(), FakeEncode(frame),
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
for (size_t i = kMaxFramesInFlightPerStream; i < frames_order.size(); ++i) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
- analyzer.OnFramePreDecode(received_frame.id(), FakeEncode(received_frame));
- analyzer.OnFrameDecoded(received_frame,
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
- analyzer.OnFrameRendered(received_frame);
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
@@ -164,29 +283,33 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
- DefaultVideoQualityAnalyzer analyzer(
- /*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
- analyzer.Start("test_case", kAnalyzerMaxThreadsCount);
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
- frame.set_id(analyzer.OnFrameCaptured(kStreamLabel, frame));
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
- analyzer.OnFramePreEncode(frame);
- analyzer.OnFrameEncoded(frame.id(), FakeEncode(frame),
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
for (size_t i = 1; i < frames_order.size(); i += 2) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
- analyzer.OnFramePreDecode(received_frame.id(), FakeEncode(received_frame));
- analyzer.OnFrameDecoded(received_frame,
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
- analyzer.OnFrameRendered(received_frame);
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
@@ -199,6 +322,11 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream);
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
+ << ToString(frames_in_flight_sizes);
+
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream / 2);
@@ -207,6 +335,231 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream / 2);
}
+TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwice) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
+ captured_frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, captured_frame));
+ analyzer.OnFramePreEncode(kSenderPeerName, captured_frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, captured_frame.id(),
+ FakeEncode(captured_frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+
+ VideoFrame received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+
+ received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, 1);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 1);
+ EXPECT_EQ(frame_counters.received, 1);
+ EXPECT_EQ(frame_counters.decoded, 1);
+ EXPECT_EQ(frame_counters.rendered, 1);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ constexpr char kAlice[] = "alice";
+ constexpr char kBob[] = "bob";
+ constexpr char kCharlie[] = "charlie";
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case", std::vector<std::string>{kAlice, kBob, kCharlie},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(analyzer.OnFrameCaptured(kAlice, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kAlice, frame);
+ SleepMs(20);
+ analyzer.OnFrameEncoded(kAlice, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+ }
+
+ SleepMs(50);
+
+ for (size_t i = 1; i < frames_order.size(); i += 2) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ SleepMs(30);
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ SleepMs(10);
+ analyzer.OnFrameRendered(kBob, received_frame);
+ }
+
+ for (size_t i = 1; i < frames_order.size(); i += 2) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kCharlie, received_frame.id(),
+ FakeEncode(received_frame));
+ SleepMs(40);
+ analyzer.OnFrameDecoded(kCharlie, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ SleepMs(5);
+ analyzer.OnFrameRendered(kCharlie, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(analyzer_stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(analyzer_stats.comparisons_done, kMaxFramesInFlightPerStream * 2);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.decoded, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(analyzer.GetKnownVideoStreams().size(), 2lu);
+ for (auto stream_key : analyzer.GetKnownVideoStreams()) {
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(stream_key);
+ // On some devices the pipeline can be too slow, so we actually can't
+ // force real constraints here. Lets just check, that at least 1
+ // frame passed whole pipeline.
+ EXPECT_GE(stream_conters.captured, 10);
+ EXPECT_GE(stream_conters.pre_encoded, 10);
+ EXPECT_GE(stream_conters.encoded, 10);
+ EXPECT_GE(stream_conters.received, 5);
+ EXPECT_GE(stream_conters.decoded, 5);
+ EXPECT_GE(stream_conters.rendered, 5);
+ EXPECT_GE(stream_conters.dropped, 5);
+ }
+
+ std::map<StatsKey, StreamStats> stats = analyzer.GetStats();
+ const StatsKey kAliceBobStats(kStreamLabel, kAlice, kBob);
+ const StatsKey kAliceCharlieStats(kStreamLabel, kAlice, kCharlie);
+ EXPECT_EQ(stats.size(), 2lu);
+ {
+ auto it = stats.find(kAliceBobStats);
+ EXPECT_FALSE(it == stats.end());
+ ASSERT_FALSE(it->second.encode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.encode_time_ms.GetMin(), 20);
+ ASSERT_FALSE(it->second.decode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.decode_time_ms.GetMin(), 30);
+ ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty());
+ EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(),
+ kFrameWidth * kFrameHeight - 1);
+ EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(),
+ kFrameWidth * kFrameHeight + 1);
+ }
+ {
+ auto it = stats.find(kAliceCharlieStats);
+ EXPECT_FALSE(it == stats.end());
+ ASSERT_FALSE(it->second.encode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.encode_time_ms.GetMin(), 20);
+ ASSERT_FALSE(it->second.decode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.decode_time_ms.GetMin(), 30);
+ ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty());
+ EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(),
+ kFrameWidth * kFrameHeight - 1);
+ EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(),
+ kFrameWidth * kFrameHeight + 1);
+ }
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwiceWith2Receivers) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ constexpr char kAlice[] = "alice";
+ constexpr char kBob[] = "bob";
+ constexpr char kCharlie[] = "charlie";
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case", std::vector<std::string>{kAlice, kBob, kCharlie},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
+ captured_frame.set_id(
+ analyzer.OnFrameCaptured(kAlice, kStreamLabel, captured_frame));
+ analyzer.OnFramePreEncode(kAlice, captured_frame);
+ analyzer.OnFrameEncoded(kAlice, captured_frame.id(),
+ FakeEncode(captured_frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+
+ VideoFrame received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, received_frame);
+
+ received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, received_frame);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, 1);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 1);
+ EXPECT_EQ(frame_counters.received, 1);
+ EXPECT_EQ(frame_counters.decoded, 1);
+ EXPECT_EQ(frame_counters.rendered, 1);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
} // namespace
} // namespace webrtc_pc_e2e
} // namespace webrtc
diff --git a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc
index d1d1bface7..198a6cb42f 100644
--- a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc
+++ b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc
@@ -10,6 +10,7 @@
#include "test/pc/e2e/analyzer/video/example_video_quality_analyzer.h"
+#include "api/array_view.h"
#include "rtc_base/logging.h"
namespace webrtc {
@@ -18,13 +19,16 @@ namespace webrtc_pc_e2e {
ExampleVideoQualityAnalyzer::ExampleVideoQualityAnalyzer() = default;
ExampleVideoQualityAnalyzer::~ExampleVideoQualityAnalyzer() = default;
-void ExampleVideoQualityAnalyzer::Start(std::string test_case_name,
- int max_threads_count) {}
+void ExampleVideoQualityAnalyzer::Start(
+ std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {}
uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured(
+ absl::string_view peer_name,
const std::string& stream_label,
const webrtc::VideoFrame& frame) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
uint16_t frame_id = next_frame_id_++;
auto it = frames_in_flight_.find(frame_id);
if (it == frames_in_flight_.end()) {
@@ -45,69 +49,77 @@ uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured(
}
void ExampleVideoQualityAnalyzer::OnFramePreEncode(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
++frames_pre_encoded_;
}
void ExampleVideoQualityAnalyzer::OnFrameEncoded(
+ absl::string_view peer_name,
uint16_t frame_id,
const webrtc::EncodedImage& encoded_image,
const EncoderStats& stats) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
++frames_encoded_;
}
void ExampleVideoQualityAnalyzer::OnFrameDropped(
+ absl::string_view peer_name,
webrtc::EncodedImageCallback::DropReason reason) {
RTC_LOG(INFO) << "Frame dropped by encoder";
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
++frames_dropped_;
}
void ExampleVideoQualityAnalyzer::OnFramePreDecode(
+ absl::string_view peer_name,
uint16_t frame_id,
const webrtc::EncodedImage& encoded_image) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
++frames_received_;
}
void ExampleVideoQualityAnalyzer::OnFrameDecoded(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame,
const DecoderStats& stats) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
++frames_decoded_;
}
void ExampleVideoQualityAnalyzer::OnFrameRendered(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
frames_in_flight_.erase(frame.id());
++frames_rendered_;
}
void ExampleVideoQualityAnalyzer::OnEncoderError(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame,
int32_t error_code) {
RTC_LOG(LS_ERROR) << "Failed to encode frame " << frame.id()
<< ". Code: " << error_code;
}
-void ExampleVideoQualityAnalyzer::OnDecoderError(uint16_t frame_id,
+void ExampleVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
int32_t error_code) {
RTC_LOG(LS_ERROR) << "Failed to decode frame " << frame_id
<< ". Code: " << error_code;
}
void ExampleVideoQualityAnalyzer::Stop() {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
RTC_LOG(INFO) << "There are " << frames_in_flight_.size()
<< " frames in flight, assuming all of them are dropped";
frames_dropped_ += frames_in_flight_.size();
}
std::string ExampleVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
auto it = frames_to_stream_label_.find(frame_id);
RTC_DCHECK(it != frames_to_stream_label_.end())
<< "Unknown frame_id=" << frame_id;
@@ -115,37 +127,37 @@ std::string ExampleVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) {
}
uint64_t ExampleVideoQualityAnalyzer::frames_captured() const {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return frames_captured_;
}
uint64_t ExampleVideoQualityAnalyzer::frames_pre_encoded() const {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return frames_pre_encoded_;
}
uint64_t ExampleVideoQualityAnalyzer::frames_encoded() const {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return frames_encoded_;
}
uint64_t ExampleVideoQualityAnalyzer::frames_received() const {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return frames_received_;
}
uint64_t ExampleVideoQualityAnalyzer::frames_decoded() const {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return frames_decoded_;
}
uint64_t ExampleVideoQualityAnalyzer::frames_rendered() const {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return frames_rendered_;
}
uint64_t ExampleVideoQualityAnalyzer::frames_dropped() const {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
return frames_dropped_;
}
diff --git a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h
index 0d6169f9fa..9f004396ae 100644
--- a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h
+++ b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h
@@ -16,10 +16,11 @@
#include <set>
#include <string>
+#include "api/array_view.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@@ -33,21 +34,34 @@ class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
ExampleVideoQualityAnalyzer();
~ExampleVideoQualityAnalyzer() override;
- void Start(std::string test_case_name, int max_threads_count) override;
- uint16_t OnFrameCaptured(const std::string& stream_label,
+ void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) override;
+ uint16_t OnFrameCaptured(absl::string_view peer_name,
+ const std::string& stream_label,
const VideoFrame& frame) override;
- void OnFramePreEncode(const VideoFrame& frame) override;
- void OnFrameEncoded(uint16_t frame_id,
+ void OnFramePreEncode(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image,
const EncoderStats& stats) override;
- void OnFrameDropped(EncodedImageCallback::DropReason reason) override;
- void OnFramePreDecode(uint16_t frame_id,
+ void OnFrameDropped(absl::string_view peer_name,
+ EncodedImageCallback::DropReason reason) override;
+ void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image) override;
- void OnFrameDecoded(const VideoFrame& frame,
+ void OnFrameDecoded(absl::string_view peer_name,
+ const VideoFrame& frame,
const DecoderStats& stats) override;
- void OnFrameRendered(const VideoFrame& frame) override;
- void OnEncoderError(const VideoFrame& frame, int32_t error_code) override;
- void OnDecoderError(uint16_t frame_id, int32_t error_code) override;
+ void OnFrameRendered(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnEncoderError(absl::string_view peer_name,
+ const VideoFrame& frame,
+ int32_t error_code) override;
+ void OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code) override;
void Stop() override;
std::string GetStreamLabel(uint16_t frame_id) override;
@@ -65,7 +79,7 @@ class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// when it will be received in peer B, so we need to guard it with lock.
// Also because analyzer will serve for all video streams it can be called
// from different threads inside one peer.
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
// Stores frame ids, that are currently going from one peer to another. We
// need to keep them to correctly determine dropped frames and also correctly
// process frame id overlap.
diff --git a/test/pc/e2e/analyzer/video/multi_head_queue.h b/test/pc/e2e/analyzer/video/multi_head_queue.h
new file mode 100644
index 0000000000..52314a60d5
--- /dev/null
+++ b/test/pc/e2e/analyzer/video/multi_head_queue.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_MULTI_HEAD_QUEUE_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_MULTI_HEAD_QUEUE_H_
+
+#include <deque>
+#include <memory>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// A queue that allows more than one reader. Readers are independent, and all
+// readers will see all elements; an inserted element stays in the queue until
+// all readers have extracted it. Elements are copied and copying is assumed to
+// be cheap.
+template <typename T>
+class MultiHeadQueue {
+ public:
+ // Creates queue with exactly |readers_count| readers.
+ explicit MultiHeadQueue(size_t readers_count) {
+ for (size_t i = 0; i < readers_count; ++i) {
+ queues_.push_back(std::deque<T>());
+ }
+ }
+
+ // Add value to the end of the queue. Complexity O(readers_count).
+ void PushBack(T value) {
+ for (auto& queue : queues_) {
+ queue.push_back(value);
+ }
+ }
+
+ // Extract element from specified head. Complexity O(1).
+ absl::optional<T> PopFront(size_t index) {
+ RTC_CHECK_LT(index, queues_.size());
+ if (queues_[index].empty()) {
+ return absl::nullopt;
+ }
+ T out = queues_[index].front();
+ queues_[index].pop_front();
+ return out;
+ }
+
+ // Returns element at specified head. Complexity O(1).
+ absl::optional<T> Front(size_t index) const {
+ RTC_CHECK_LT(index, queues_.size());
+ if (queues_[index].empty()) {
+ return absl::nullopt;
+ }
+ return queues_[index].front();
+ }
+
+ // Returns true if for specified head there are no more elements in the queue
+ // or false otherwise. Complexity O(1).
+ bool IsEmpty(size_t index) const {
+ RTC_CHECK_LT(index, queues_.size());
+ return queues_[index].empty();
+ }
+
+ // Returns size of the longest queue between all readers.
+ // Complexity O(readers_count).
+ size_t size() const {
+ size_t size = 0;
+ for (auto& queue : queues_) {
+ if (queue.size() > size) {
+ size = queue.size();
+ }
+ }
+ return size;
+ }
+
+ // Returns size of the specified queue. Complexity O(1).
+ size_t size(size_t index) const {
+ RTC_CHECK_LT(index, queues_.size());
+ return queues_[index].size();
+ }
+
+ size_t readers_count() const { return queues_.size(); }
+
+ private:
+ std::vector<std::deque<T>> queues_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_MULTI_HEAD_QUEUE_H_
diff --git a/test/pc/e2e/analyzer/video/multi_head_queue_test.cc b/test/pc/e2e/analyzer/video/multi_head_queue_test.cc
new file mode 100644
index 0000000000..3a4ab6cdbb
--- /dev/null
+++ b/test/pc/e2e/analyzer/video/multi_head_queue_test.cc
@@ -0,0 +1,103 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/multi_head_queue.h"
+#include "absl/types/optional.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+TEST(MultiHeadQueueTest, GetOnEmpty) {
+ MultiHeadQueue<int> queue = MultiHeadQueue<int>(10);
+ EXPECT_TRUE(queue.IsEmpty(0));
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_FALSE(queue.PopFront(i).has_value());
+ EXPECT_FALSE(queue.Front(i).has_value());
+ }
+}
+
+TEST(MultiHeadQueueTest, SingleHeadOneAddOneRemove) {
+ MultiHeadQueue<int> queue = MultiHeadQueue<int>(1);
+ queue.PushBack(1);
+ EXPECT_EQ(queue.size(), 1lu);
+ EXPECT_TRUE(queue.Front(0).has_value());
+ EXPECT_EQ(queue.Front(0).value(), 1);
+ absl::optional<int> value = queue.PopFront(0);
+ EXPECT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), 1);
+ EXPECT_EQ(queue.size(), 0lu);
+ EXPECT_TRUE(queue.IsEmpty(0));
+}
+
+TEST(MultiHeadQueueTest, SingleHead) {
+ MultiHeadQueue<size_t> queue = MultiHeadQueue<size_t>(1);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ EXPECT_EQ(queue.size(), i + 1);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(0);
+ EXPECT_EQ(queue.size(), 10 - i - 1);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+}
+
+TEST(MultiHeadQueueTest, ThreeHeadsAddAllRemoveAllPerHead) {
+ MultiHeadQueue<size_t> queue = MultiHeadQueue<size_t>(3);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ EXPECT_EQ(queue.size(), i + 1);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(0);
+ EXPECT_EQ(queue.size(), 10lu);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(1);
+ EXPECT_EQ(queue.size(), 10lu);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(2);
+ EXPECT_EQ(queue.size(), 10 - i - 1);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+}
+
+TEST(MultiHeadQueueTest, ThreeHeadsAddAllRemoveAll) {
+ MultiHeadQueue<size_t> queue = MultiHeadQueue<size_t>(3);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ EXPECT_EQ(queue.size(), i + 1);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value1 = queue.PopFront(0);
+ absl::optional<size_t> value2 = queue.PopFront(1);
+ absl::optional<size_t> value3 = queue.PopFront(2);
+ EXPECT_EQ(queue.size(), 10 - i - 1);
+ ASSERT_TRUE(value1.has_value());
+ ASSERT_TRUE(value2.has_value());
+ ASSERT_TRUE(value3.has_value());
+ EXPECT_EQ(value1.value(), i);
+ EXPECT_EQ(value2.value(), i);
+ EXPECT_EQ(value3.value(), i);
+ }
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
index 228ab8ac02..196f900bd3 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
@@ -15,6 +15,7 @@
#include <memory>
#include <utility>
+#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/video/i420_buffer.h"
#include "modules/video_coding/include/video_error_codes.h"
@@ -26,10 +27,12 @@ namespace webrtc_pc_e2e {
QualityAnalyzingVideoDecoder::QualityAnalyzingVideoDecoder(
int id,
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoder> delegate,
EncodedImageDataExtractor* extractor,
VideoQualityAnalyzerInterface* analyzer)
: id_(id),
+ peer_name_(peer_name),
implementation_name_("AnalyzingDecoder-" +
std::string(delegate->ImplementationName())),
delegate_(std::move(delegate)),
@@ -76,7 +79,7 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image,
EncodedImage* origin_image;
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
// Store id to be able to retrieve it in analyzing callback.
timestamp_to_frame_id_.insert({input_image.Timestamp(), out.id});
// Store encoded image to prevent its destruction while it is used in
@@ -87,17 +90,17 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image,
// We can safely dereference |origin_image|, because it can be removed from
// the map only after |delegate_| Decode method will be invoked. Image will be
// removed inside DecodedImageCallback, which can be done on separate thread.
- analyzer_->OnFramePreDecode(out.id, *origin_image);
+ analyzer_->OnFramePreDecode(peer_name_, out.id, *origin_image);
int32_t result =
delegate_->Decode(*origin_image, missing_frames, render_time_ms);
if (result != WEBRTC_VIDEO_CODEC_OK) {
// If delegate decoder failed, then cleanup data for this image.
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
timestamp_to_frame_id_.erase(input_image.Timestamp());
decoding_images_.erase(out.id);
}
- analyzer_->OnDecoderError(out.id, result);
+ analyzer_->OnDecoderError(peer_name_, out.id, result);
}
return result;
}
@@ -113,7 +116,7 @@ int32_t QualityAnalyzingVideoDecoder::Release() {
// frames, so we don't take a lock to prevent deadlock.
int32_t result = delegate_->Release();
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
analyzing_callback_->SetDelegateCallback(nullptr);
timestamp_to_frame_id_.clear();
decoding_images_.clear();
@@ -135,7 +138,7 @@ QualityAnalyzingVideoDecoder::DecoderCallback::~DecoderCallback() = default;
void QualityAnalyzingVideoDecoder::DecoderCallback::SetDelegateCallback(
DecodedImageCallback* delegate) {
- rtc::CritScope crit(&callback_lock_);
+ MutexLock lock(&callback_lock_);
delegate_callback_ = delegate;
}
@@ -147,7 +150,7 @@ int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded(
decoder_->OnFrameDecoded(&decodedImage, /*decode_time_ms=*/absl::nullopt,
/*qp=*/absl::nullopt);
- rtc::CritScope crit(&callback_lock_);
+ MutexLock lock(&callback_lock_);
RTC_DCHECK(delegate_callback_);
return delegate_callback_->Decoded(decodedImage);
}
@@ -157,7 +160,7 @@ int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded(
int64_t decode_time_ms) {
decoder_->OnFrameDecoded(&decodedImage, decode_time_ms, /*qp=*/absl::nullopt);
- rtc::CritScope crit(&callback_lock_);
+ MutexLock lock(&callback_lock_);
RTC_DCHECK(delegate_callback_);
return delegate_callback_->Decoded(decodedImage, decode_time_ms);
}
@@ -168,7 +171,7 @@ void QualityAnalyzingVideoDecoder::DecoderCallback::Decoded(
absl::optional<uint8_t> qp) {
decoder_->OnFrameDecoded(&decodedImage, decode_time_ms, qp);
- rtc::CritScope crit(&callback_lock_);
+ MutexLock lock(&callback_lock_);
RTC_DCHECK(delegate_callback_);
delegate_callback_->Decoded(decodedImage, decode_time_ms, qp);
}
@@ -183,7 +186,7 @@ QualityAnalyzingVideoDecoder::DecoderCallback::IrrelevantSimulcastStreamDecoded(
.set_timestamp_rtp(timestamp_ms)
.set_id(frame_id)
.build();
- rtc::CritScope crit(&callback_lock_);
+ MutexLock lock(&callback_lock_);
RTC_DCHECK(delegate_callback_);
delegate_callback_->Decoded(dummy_frame, absl::nullopt, absl::nullopt);
return WEBRTC_VIDEO_CODEC_OK;
@@ -204,7 +207,7 @@ void QualityAnalyzingVideoDecoder::OnFrameDecoded(
absl::optional<uint8_t> qp) {
uint16_t frame_id;
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
auto it = timestamp_to_frame_id_.find(frame->timestamp());
if (it == timestamp_to_frame_id_.end()) {
// Ensure, that we have info about this frame. It can happen that for some
@@ -224,15 +227,17 @@ void QualityAnalyzingVideoDecoder::OnFrameDecoded(
frame->set_id(frame_id);
VideoQualityAnalyzerInterface::DecoderStats stats;
stats.decode_time_ms = decode_time_ms;
- analyzer_->OnFrameDecoded(*frame, stats);
+ analyzer_->OnFrameDecoded(peer_name_, *frame, stats);
}
QualityAnalyzingVideoDecoderFactory::QualityAnalyzingVideoDecoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoderFactory> delegate,
IdGenerator<int>* id_generator,
EncodedImageDataExtractor* extractor,
VideoQualityAnalyzerInterface* analyzer)
- : delegate_(std::move(delegate)),
+ : peer_name_(peer_name),
+ delegate_(std::move(delegate)),
id_generator_(id_generator),
extractor_(extractor),
analyzer_(analyzer) {}
@@ -249,7 +254,8 @@ QualityAnalyzingVideoDecoderFactory::CreateVideoDecoder(
const SdpVideoFormat& format) {
std::unique_ptr<VideoDecoder> decoder = delegate_->CreateVideoDecoder(format);
return std::make_unique<QualityAnalyzingVideoDecoder>(
- id_generator_->GetNextId(), std::move(decoder), extractor_, analyzer_);
+ id_generator_->GetNextId(), peer_name_, std::move(decoder), extractor_,
+ analyzer_);
}
std::unique_ptr<VideoDecoder>
@@ -259,7 +265,8 @@ QualityAnalyzingVideoDecoderFactory::LegacyCreateVideoDecoder(
std::unique_ptr<VideoDecoder> decoder =
delegate_->LegacyCreateVideoDecoder(format, receive_stream_id);
return std::make_unique<QualityAnalyzingVideoDecoder>(
- id_generator_->GetNextId(), std::move(decoder), extractor_, analyzer_);
+ id_generator_->GetNextId(), peer_name_, std::move(decoder), extractor_,
+ analyzer_);
}
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
index 5cbc882226..2381f593b9 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
@@ -16,13 +16,14 @@
#include <string>
#include <vector>
+#include "absl/strings/string_view.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
#include "api/video_codecs/sdp_video_format.h"
#include "api/video_codecs/video_decoder.h"
#include "api/video_codecs/video_decoder_factory.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
#include "test/pc/e2e/analyzer/video/id_generator.h"
@@ -45,14 +46,15 @@ namespace webrtc_pc_e2e {
// callback, where video analyzer will be called again and then decoded frame
// will be passed to origin callback, provided by user.
//
-// Quality decoder registers its own callback in origin decoder at the same
-// time, when user registers his callback in quality decoder.
+// Quality decoder registers its own callback in origin decoder, at the same
+// time the user registers their callback in quality decoder.
class QualityAnalyzingVideoDecoder : public VideoDecoder {
public:
// Creates analyzing decoder. |id| is unique coding entity id, that will
// be used to distinguish all encoders and decoders inside
// EncodedImageDataInjector and EncodedImageIdExtracor.
QualityAnalyzingVideoDecoder(int id,
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoder> delegate,
EncodedImageDataExtractor* extractor,
VideoQualityAnalyzerInterface* analyzer);
@@ -95,7 +97,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> dummy_frame_buffer_;
- rtc::CriticalSection callback_lock_;
+ Mutex callback_lock_;
DecodedImageCallback* delegate_callback_ RTC_GUARDED_BY(callback_lock_);
};
@@ -104,6 +106,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder {
absl::optional<uint8_t> qp);
const int id_;
+ const std::string peer_name_;
const std::string implementation_name_;
std::unique_ptr<VideoDecoder> delegate_;
EncodedImageDataExtractor* const extractor_;
@@ -113,7 +116,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder {
// VideoDecoder interface assumes async delivery of decoded video frames.
// This lock is used to protect shared state, that have to be propagated
// from received EncodedImage to resulted VideoFrame.
- rtc::CriticalSection lock_;
+ Mutex lock_;
std::map<uint32_t, uint16_t> timestamp_to_frame_id_ RTC_GUARDED_BY(lock_);
// Stores currently being decoded images by frame id. Because
@@ -129,6 +132,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder {
class QualityAnalyzingVideoDecoderFactory : public VideoDecoderFactory {
public:
QualityAnalyzingVideoDecoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoderFactory> delegate,
IdGenerator<int>* id_generator,
EncodedImageDataExtractor* extractor,
@@ -144,6 +148,7 @@ class QualityAnalyzingVideoDecoderFactory : public VideoDecoderFactory {
const std::string& receive_stream_id) override;
private:
+ const std::string peer_name_;
std::unique_ptr<VideoDecoderFactory> delegate_;
IdGenerator<int>* const id_generator_;
EncodedImageDataExtractor* const extractor_;
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
index 2e7b8f4152..27b8f92bed 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
@@ -14,10 +14,10 @@
#include <memory>
#include <utility>
+#include "absl/strings/string_view.h"
#include "api/video/video_codec_type.h"
#include "api/video_codecs/video_encoder.h"
#include "modules/video_coding/include/video_error_codes.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
namespace webrtc {
@@ -54,12 +54,14 @@ std::pair<uint32_t, uint32_t> GetMinMaxBitratesBps(const VideoCodec& codec,
QualityAnalyzingVideoEncoder::QualityAnalyzingVideoEncoder(
int id,
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoder> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
EncodedImageDataInjector* injector,
VideoQualityAnalyzerInterface* analyzer)
: id_(id),
+ peer_name_(peer_name),
delegate_(std::move(delegate)),
bitrate_multiplier_(bitrate_multiplier),
stream_required_spatial_index_(std::move(stream_required_spatial_index)),
@@ -77,7 +79,7 @@ void QualityAnalyzingVideoEncoder::SetFecControllerOverride(
int32_t QualityAnalyzingVideoEncoder::InitEncode(
const VideoCodec* codec_settings,
const Settings& settings) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
codec_settings_ = *codec_settings;
mode_ = SimulcastMode::kNormal;
if (codec_settings->codecType == kVideoCodecVP9) {
@@ -108,7 +110,7 @@ int32_t QualityAnalyzingVideoEncoder::RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) {
// We need to get a lock here because delegate_callback can be hypothetically
// accessed from different thread (encoder one) concurrently.
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
delegate_callback_ = callback;
return delegate_->RegisterEncodeCompleteCallback(this);
}
@@ -118,7 +120,7 @@ int32_t QualityAnalyzingVideoEncoder::Release() {
// frames, so we don't take a lock to prevent deadlock.
int32_t result = delegate_->Release();
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
delegate_callback_ = nullptr;
return result;
}
@@ -127,19 +129,19 @@ int32_t QualityAnalyzingVideoEncoder::Encode(
const VideoFrame& frame,
const std::vector<VideoFrameType>* frame_types) {
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
// Store id to be able to retrieve it in analyzing callback.
timestamp_to_frame_id_list_.push_back({frame.timestamp(), frame.id()});
// If this list is growing, it means that we are not receiving new encoded
// images from encoder. So it should be a bug in setup on in the encoder.
RTC_DCHECK_LT(timestamp_to_frame_id_list_.size(), kMaxFrameInPipelineCount);
}
- analyzer_->OnFramePreEncode(frame);
+ analyzer_->OnFramePreEncode(peer_name_, frame);
int32_t result = delegate_->Encode(frame, frame_types);
if (result != WEBRTC_VIDEO_CODEC_OK) {
// If origin encoder failed, then cleanup data for this frame.
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
// The timestamp-frame_id pair can be not the last one, so we need to
// find it first and then remove. We will search from the end, because
// usually it will be the last or close to the last one.
@@ -152,7 +154,7 @@ int32_t QualityAnalyzingVideoEncoder::Encode(
}
}
}
- analyzer_->OnEncoderError(frame, result);
+ analyzer_->OnEncoderError(peer_name_, frame, result);
}
return result;
}
@@ -162,7 +164,7 @@ void QualityAnalyzingVideoEncoder::SetRates(
RTC_DCHECK_GT(bitrate_multiplier_, 0.0);
if (fabs(bitrate_multiplier_ - kNoMultiplier) < kEps) {
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
bitrate_allocation_ = parameters.bitrate;
}
return delegate_->SetRates(parameters);
@@ -205,7 +207,7 @@ void QualityAnalyzingVideoEncoder::SetRates(
RateControlParameters adjusted_params = parameters;
adjusted_params.bitrate = multiplied_allocation;
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
bitrate_allocation_ = adjusted_params.bitrate;
}
return delegate_->SetRates(adjusted_params);
@@ -236,7 +238,7 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage(
bool discard = false;
uint32_t target_encode_bitrate = 0;
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
std::pair<uint32_t, uint16_t> timestamp_frame_id;
while (!timestamp_to_frame_id_list_.empty()) {
timestamp_frame_id = timestamp_to_frame_id_list_.front();
@@ -277,7 +279,7 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage(
// not discarded layers have to be passed.
VideoQualityAnalyzerInterface::EncoderStats stats;
stats.target_encode_bitrate = target_encode_bitrate;
- analyzer_->OnFrameEncoded(frame_id, encoded_image, stats);
+ analyzer_->OnFrameEncoded(peer_name_, frame_id, encoded_image, stats);
}
// Image data injector injects frame id and discard flag into provided
@@ -288,7 +290,7 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage(
const EncodedImage& image =
injector_->InjectData(frame_id, discard, encoded_image, id_);
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
RTC_DCHECK(delegate_callback_);
return delegate_callback_->OnEncodedImage(image, codec_specific_info,
fragmentation);
@@ -297,8 +299,8 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage(
void QualityAnalyzingVideoEncoder::OnDroppedFrame(
EncodedImageCallback::DropReason reason) {
- rtc::CritScope crit(&lock_);
- analyzer_->OnFrameDropped(reason);
+ MutexLock lock(&lock_);
+ analyzer_->OnFrameDropped(peer_name_, reason);
RTC_DCHECK(delegate_callback_);
delegate_callback_->OnDroppedFrame(reason);
}
@@ -348,13 +350,15 @@ bool QualityAnalyzingVideoEncoder::ShouldDiscard(
}
QualityAnalyzingVideoEncoderFactory::QualityAnalyzingVideoEncoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoderFactory> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
IdGenerator<int>* id_generator,
EncodedImageDataInjector* injector,
VideoQualityAnalyzerInterface* analyzer)
- : delegate_(std::move(delegate)),
+ : peer_name_(peer_name),
+ delegate_(std::move(delegate)),
bitrate_multiplier_(bitrate_multiplier),
stream_required_spatial_index_(std::move(stream_required_spatial_index)),
id_generator_(id_generator),
@@ -378,9 +382,9 @@ std::unique_ptr<VideoEncoder>
QualityAnalyzingVideoEncoderFactory::CreateVideoEncoder(
const SdpVideoFormat& format) {
return std::make_unique<QualityAnalyzingVideoEncoder>(
- id_generator_->GetNextId(), delegate_->CreateVideoEncoder(format),
- bitrate_multiplier_, stream_required_spatial_index_, injector_,
- analyzer_);
+ id_generator_->GetNextId(), peer_name_,
+ delegate_->CreateVideoEncoder(format), bitrate_multiplier_,
+ stream_required_spatial_index_, injector_, analyzer_);
}
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
index 3307dc7325..3486c55a5f 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
@@ -16,13 +16,14 @@
#include <utility>
#include <vector>
+#include "absl/strings/string_view.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/video/video_frame.h"
#include "api/video_codecs/sdp_video_format.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_factory.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
#include "test/pc/e2e/analyzer/video/id_generator.h"
@@ -49,8 +50,8 @@ constexpr int kAnalyzeAnySpatialStream = -1;
// injected into EncodedImage with passed EncodedImageDataInjector. Then new
// EncodedImage will be passed to origin callback, provided by user.
//
-// Quality encoder registers its own callback in origin encoder at the same
-// time, when user registers his callback in quality encoder.
+// Quality encoder registers its own callback in origin encoder, at the same
+// time the user registers their callback in quality encoder.
class QualityAnalyzingVideoEncoder : public VideoEncoder,
public EncodedImageCallback {
public:
@@ -59,6 +60,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder,
// EncodedImageDataInjector and EncodedImageIdExtracor.
QualityAnalyzingVideoEncoder(
int id,
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoder> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
@@ -139,6 +141,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder,
RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
const int id_;
+ const std::string peer_name_;
std::unique_ptr<VideoEncoder> delegate_;
const double bitrate_multiplier_;
// Contains mapping from stream label to optional spatial index.
@@ -154,7 +157,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder,
// VideoEncoder interface assumes async delivery of encoded images.
// This lock is used to protect shared state, that have to be propagated
// from received VideoFrame to resulted EncodedImage.
- rtc::CriticalSection lock_;
+ Mutex lock_;
VideoCodec codec_settings_;
SimulcastMode mode_ RTC_GUARDED_BY(lock_);
@@ -170,6 +173,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder,
class QualityAnalyzingVideoEncoderFactory : public VideoEncoderFactory {
public:
QualityAnalyzingVideoEncoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoderFactory> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
@@ -186,6 +190,7 @@ class QualityAnalyzingVideoEncoderFactory : public VideoEncoderFactory {
const SdpVideoFormat& format) override;
private:
+ const std::string peer_name_;
std::unique_ptr<VideoEncoderFactory> delegate_;
const double bitrate_multiplier_;
std::map<std::string, absl::optional<int>> stream_required_spatial_index_;
diff --git a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc
index ec0d26b780..8e7a511878 100644
--- a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc
+++ b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc
@@ -19,13 +19,6 @@
namespace webrtc {
namespace webrtc_pc_e2e {
-namespace {
-
-// Number of bytes from the beginning of the EncodedImage buffer that will be
-// used to store frame id and sub id.
-constexpr size_t kUsedBufferSize = 3;
-
-} // namespace
SingleProcessEncodedImageDataInjector::SingleProcessEncodedImageDataInjector() =
default;
@@ -37,15 +30,15 @@ EncodedImage SingleProcessEncodedImageDataInjector::InjectData(
bool discard,
const EncodedImage& source,
int coding_entity_id) {
- RTC_CHECK(source.size() >= kUsedBufferSize);
+ RTC_CHECK(source.size() >= ExtractionInfo::kUsedBufferSize);
ExtractionInfo info;
- info.length = source.size();
info.discard = discard;
- size_t insertion_pos = source.size() - kUsedBufferSize;
- memcpy(info.origin_data, &source.data()[insertion_pos], kUsedBufferSize);
+ size_t insertion_pos = source.size() - ExtractionInfo::kUsedBufferSize;
+ memcpy(info.origin_data, &source.data()[insertion_pos],
+ ExtractionInfo::kUsedBufferSize);
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
// Will create new one if missed.
ExtractionInfoVector& ev = extraction_cache_[id];
info.sub_id = ev.next_sub_id++;
@@ -69,15 +62,27 @@ EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData(
uint8_t* buffer = out.data();
size_t size = out.size();
- // |pos| is pointing to end of current encoded image.
- size_t pos = size - 1;
+ std::vector<size_t> frame_sizes;
+ std::vector<size_t> frame_sl_index;
+ size_t max_spatial_index = out.SpatialIndex().value_or(0);
+ for (size_t i = 0; i <= max_spatial_index; ++i) {
+ auto frame_size = source.SpatialLayerFrameSize(i);
+ if (frame_size.value_or(0)) {
+ frame_sl_index.push_back(i);
+ frame_sizes.push_back(frame_size.value());
+ }
+ }
+ if (frame_sizes.empty()) {
+ frame_sizes.push_back(size);
+ }
+
+ size_t prev_frames_size = 0;
absl::optional<uint16_t> id = absl::nullopt;
bool discard = true;
std::vector<ExtractionInfo> extraction_infos;
- // Go through whole buffer and find all related extraction infos in
- // order from 1st encoded image to the last.
- while (true) {
- size_t insertion_pos = pos - kUsedBufferSize + 1;
+ for (size_t frame_size : frame_sizes) {
+ size_t insertion_pos =
+ prev_frames_size + frame_size - ExtractionInfo::kUsedBufferSize;
// Extract frame id from first 2 bytes starting from insertion pos.
uint16_t next_id = buffer[insertion_pos] + (buffer[insertion_pos + 1] << 8);
// Extract frame sub id from second 3 byte starting from insertion pos.
@@ -88,8 +93,10 @@ EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData(
id = next_id;
ExtractionInfo info;
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
auto ext_vector_it = extraction_cache_.find(next_id);
+ // TODO(titovartem) add support for receiving single frame multiple times
+ // when in simulcast key frame for another spatial stream can be received.
RTC_CHECK(ext_vector_it != extraction_cache_.end())
<< "Unknown frame_id=" << next_id;
@@ -99,41 +106,45 @@ EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData(
info = info_it->second;
ext_vector_it->second.infos.erase(info_it);
}
- extraction_infos.push_back(info);
// We need to discard encoded image only if all concatenated encoded images
// have to be discarded.
discard = discard && info.discard;
- if (pos < info.length) {
- break;
- }
- pos -= info.length;
+
+ extraction_infos.push_back(info);
+ prev_frames_size += frame_size;
}
RTC_CHECK(id);
- std::reverse(extraction_infos.begin(), extraction_infos.end());
+
if (discard) {
out.set_size(0);
+ for (size_t i = 0; i <= max_spatial_index; ++i) {
+ out.SetSpatialLayerFrameSize(i, 0);
+ }
return EncodedImageExtractionResult{*id, out, true};
}
// Make a pass from begin to end to restore origin payload and erase discarded
// encoded images.
- pos = 0;
- auto extraction_infos_it = extraction_infos.begin();
- while (pos < size) {
- RTC_DCHECK(extraction_infos_it != extraction_infos.end());
- const ExtractionInfo& info = *extraction_infos_it;
+ size_t pos = 0;
+ for (size_t frame_index = 0; frame_index < frame_sizes.size();
+ ++frame_index) {
+ RTC_CHECK(pos < size);
+ const size_t frame_size = frame_sizes[frame_index];
+ const ExtractionInfo& info = extraction_infos[frame_index];
if (info.discard) {
// If this encoded image is marked to be discarded - erase it's payload
// from the buffer.
- memmove(&buffer[pos], &buffer[pos + info.length],
- size - pos - info.length);
- size -= info.length;
+ memmove(&buffer[pos], &buffer[pos + frame_size], size - pos - frame_size);
+ RTC_CHECK_LT(frame_index, frame_sl_index.size())
+ << "codec doesn't support discard option or the image, that was "
+ "supposed to be discarded, is lost";
+ out.SetSpatialLayerFrameSize(frame_sl_index[frame_index], 0);
+ size -= frame_size;
} else {
- memcpy(&buffer[pos + info.length - kUsedBufferSize], info.origin_data,
- kUsedBufferSize);
- pos += info.length;
+ memcpy(&buffer[pos + frame_size - ExtractionInfo::kUsedBufferSize],
+ info.origin_data, ExtractionInfo::kUsedBufferSize);
+ pos += frame_size;
}
- ++extraction_infos_it;
}
out.set_size(pos);
diff --git a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h
index 3787cc51aa..c70c25bc7d 100644
--- a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h
+++ b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h
@@ -18,7 +18,7 @@
#include <vector>
#include "api/video/encoded_image.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
namespace webrtc {
@@ -57,15 +57,16 @@ class SingleProcessEncodedImageDataInjector : public EncodedImageDataInjector,
// Contains data required to extract frame id from EncodedImage and restore
// original buffer.
struct ExtractionInfo {
+ // Number of bytes from the beginning of the EncodedImage buffer that will
+ // be used to store frame id and sub id.
+ const static size_t kUsedBufferSize = 3;
// Frame sub id to distinguish encoded images for different spatial layers.
uint8_t sub_id;
- // Length of the origin buffer encoded image.
- size_t length;
// Flag to show is this encoded images should be discarded by analyzing
// decoder because of not required spatial layer/simulcast stream.
bool discard;
// Data from first 3 bytes of origin encoded image's payload.
- uint8_t origin_data[3];
+ uint8_t origin_data[ExtractionInfo::kUsedBufferSize];
};
struct ExtractionInfoVector {
@@ -77,7 +78,7 @@ class SingleProcessEncodedImageDataInjector : public EncodedImageDataInjector,
std::map<uint8_t, ExtractionInfo> infos;
};
- rtc::CriticalSection lock_;
+ Mutex lock_;
// Stores a mapping from frame id to extraction info for spatial layers
// for this frame id. There can be a lot of them, because if frame was
// dropped we can't clean it up, because we won't receive a signal on
diff --git a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc
index 67cafa75a6..e25361e337 100644
--- a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc
+++ b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc
@@ -44,6 +44,7 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractDiscardFalse) {
EXPECT_FALSE(out.discard);
EXPECT_EQ(out.image.size(), 10ul);
EXPECT_EQ(out.image.capacity(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
for (int i = 0; i < 10; ++i) {
EXPECT_EQ(out.image.data()[i], i + 1);
}
@@ -63,6 +64,60 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractDiscardTrue) {
EXPECT_TRUE(out.discard);
EXPECT_EQ(out.image.size(), 0ul);
EXPECT_EQ(out.image.capacity(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+}
+
+TEST(SingleProcessEncodedImageDataInjector, InjectWithUnsetSpatialLayerSizes) {
+ SingleProcessEncodedImageDataInjector injector;
+
+ rtc::Buffer buffer = CreateBufferOfSizeNFilledWithValuesFromX(10, 1);
+
+ EncodedImage source(buffer.data(), 10, 10);
+ source.SetTimestamp(123456789);
+
+ EncodedImage intermediate = injector.InjectData(512, false, source, 1);
+ intermediate.SetSpatialIndex(2);
+
+ EncodedImageExtractionResult out = injector.ExtractData(intermediate, 2);
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.capacity(), 10ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjector, InjectWithZeroSpatialLayerSizes) {
+ SingleProcessEncodedImageDataInjector injector;
+
+ rtc::Buffer buffer = CreateBufferOfSizeNFilledWithValuesFromX(10, 1);
+
+ EncodedImage source(buffer.data(), 10, 10);
+ source.SetTimestamp(123456789);
+
+ EncodedImage intermediate = injector.InjectData(512, false, source, 1);
+ intermediate.SetSpatialIndex(2);
+ intermediate.SetSpatialLayerFrameSize(0, 0);
+ intermediate.SetSpatialLayerFrameSize(1, 0);
+ intermediate.SetSpatialLayerFrameSize(2, 0);
+
+ EncodedImageExtractionResult out = injector.ExtractData(intermediate, 2);
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.capacity(), 10ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul);
+ }
}
TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) {
@@ -95,6 +150,7 @@ TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) {
EXPECT_FALSE(out1.discard);
EXPECT_EQ(out1.image.size(), 10ul);
EXPECT_EQ(out1.image.capacity(), 10ul);
+ EXPECT_EQ(out1.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
for (int i = 0; i < 10; ++i) {
EXPECT_EQ(out1.image.data()[i], i + 1);
}
@@ -102,10 +158,12 @@ TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) {
EXPECT_TRUE(out2.discard);
EXPECT_EQ(out2.image.size(), 0ul);
EXPECT_EQ(out2.image.capacity(), 10ul);
+ EXPECT_EQ(out2.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
EXPECT_EQ(out3.id, 520);
EXPECT_FALSE(out3.discard);
EXPECT_EQ(out3.image.size(), 10ul);
EXPECT_EQ(out3.image.capacity(), 10ul);
+ EXPECT_EQ(out3.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
for (int i = 0; i < 10; ++i) {
EXPECT_EQ(out3.image.data()[i], i + 21);
}
@@ -140,6 +198,10 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractFromConcatenated) {
concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size());
EncodedImage concatenated(concatenated_buffer.data(), concatenated_length,
concatenated_length);
+ concatenated.SetSpatialIndex(2);
+ concatenated.SetSpatialLayerFrameSize(0, intermediate1.size());
+ concatenated.SetSpatialLayerFrameSize(1, intermediate2.size());
+ concatenated.SetSpatialLayerFrameSize(2, intermediate3.size());
// Extract frame id from concatenated image
EncodedImageExtractionResult out = injector.ExtractData(concatenated, 2);
@@ -152,6 +214,10 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractFromConcatenated) {
EXPECT_EQ(out.image.data()[i], i + 1);
EXPECT_EQ(out.image.data()[i + 10], i + 21);
}
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(1).value_or(0), 0ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(2).value_or(0), 10ul);
}
TEST(SingleProcessEncodedImageDataInjector,
@@ -184,6 +250,10 @@ TEST(SingleProcessEncodedImageDataInjector,
concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size());
EncodedImage concatenated(concatenated_buffer.data(), concatenated_length,
concatenated_length);
+ concatenated.SetSpatialIndex(2);
+ concatenated.SetSpatialLayerFrameSize(0, intermediate1.size());
+ concatenated.SetSpatialLayerFrameSize(1, intermediate2.size());
+ concatenated.SetSpatialLayerFrameSize(2, intermediate3.size());
// Extract frame id from concatenated image
EncodedImageExtractionResult out = injector.ExtractData(concatenated, 2);
@@ -192,6 +262,10 @@ TEST(SingleProcessEncodedImageDataInjector,
EXPECT_TRUE(out.discard);
EXPECT_EQ(out.image.size(), 0ul);
EXPECT_EQ(out.image.capacity(), 3 * 10ul);
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul);
+ }
}
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc
index 074188439b..48e65ef686 100644
--- a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc
+++ b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc
@@ -14,6 +14,8 @@
#include <vector>
#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
#include "test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h"
#include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h"
#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
@@ -43,10 +45,12 @@ class AnalyzingFramePreprocessor
: public test::TestVideoCapturer::FramePreprocessor {
public:
AnalyzingFramePreprocessor(
- std::string stream_label,
+ absl::string_view peer_name,
+ absl::string_view stream_label,
VideoQualityAnalyzerInterface* analyzer,
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks)
- : stream_label_(std::move(stream_label)),
+ : peer_name_(peer_name),
+ stream_label_(stream_label),
analyzer_(analyzer),
sinks_(std::move(sinks)) {}
~AnalyzingFramePreprocessor() override = default;
@@ -54,7 +58,8 @@ class AnalyzingFramePreprocessor
VideoFrame Preprocess(const VideoFrame& source_frame) override {
// Copy VideoFrame to be able to set id on it.
VideoFrame frame = source_frame;
- uint16_t frame_id = analyzer_->OnFrameCaptured(stream_label_, frame);
+ uint16_t frame_id =
+ analyzer_->OnFrameCaptured(peer_name_, stream_label_, frame);
frame.set_id(frame_id);
for (auto& sink : sinks_) {
@@ -64,6 +69,7 @@ class AnalyzingFramePreprocessor
}
private:
+ const std::string peer_name_;
const std::string stream_label_;
VideoQualityAnalyzerInterface* const analyzer_;
const std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>
@@ -88,26 +94,29 @@ VideoQualityAnalyzerInjectionHelper::~VideoQualityAnalyzerInjectionHelper() =
std::unique_ptr<VideoEncoderFactory>
VideoQualityAnalyzerInjectionHelper::WrapVideoEncoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoderFactory> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index)
const {
return std::make_unique<QualityAnalyzingVideoEncoderFactory>(
- std::move(delegate), bitrate_multiplier,
+ peer_name, std::move(delegate), bitrate_multiplier,
std::move(stream_required_spatial_index),
encoding_entities_id_generator_.get(), injector_, analyzer_.get());
}
std::unique_ptr<VideoDecoderFactory>
VideoQualityAnalyzerInjectionHelper::WrapVideoDecoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoderFactory> delegate) const {
return std::make_unique<QualityAnalyzingVideoDecoderFactory>(
- std::move(delegate), encoding_entities_id_generator_.get(), extractor_,
- analyzer_.get());
+ peer_name, std::move(delegate), encoding_entities_id_generator_.get(),
+ extractor_, analyzer_.get());
}
std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor(
+ absl::string_view peer_name,
const VideoConfig& config) {
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks;
test::VideoFrameWriter* writer =
@@ -121,27 +130,31 @@ VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor(
config.width, config.height)));
}
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
known_video_configs_.insert({*config.stream_label, config});
}
return std::make_unique<AnalyzingFramePreprocessor>(
- std::move(*config.stream_label), analyzer_.get(), std::move(sinks));
+ peer_name, std::move(*config.stream_label), analyzer_.get(),
+ std::move(sinks));
}
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>
-VideoQualityAnalyzerInjectionHelper::CreateVideoSink() {
- return std::make_unique<AnalyzingVideoSink>(this);
+VideoQualityAnalyzerInjectionHelper::CreateVideoSink(
+ absl::string_view peer_name) {
+ return std::make_unique<AnalyzingVideoSink>(peer_name, this);
}
-void VideoQualityAnalyzerInjectionHelper::Start(std::string test_case_name,
- int max_threads_count) {
- analyzer_->Start(std::move(test_case_name), max_threads_count);
+void VideoQualityAnalyzerInjectionHelper::Start(
+ std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {
+ analyzer_->Start(std::move(test_case_name), peer_names, max_threads_count);
}
void VideoQualityAnalyzerInjectionHelper::OnStatsReports(
- const std::string& pc_label,
- const StatsReports& stats_reports) {
- analyzer_->OnStatsReports(pc_label, stats_reports);
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ analyzer_->OnStatsReports(pc_label, report);
}
void VideoQualityAnalyzerInjectionHelper::Stop() {
@@ -169,12 +182,20 @@ VideoQualityAnalyzerInjectionHelper::MaybeCreateVideoWriter(
return out;
}
-void VideoQualityAnalyzerInjectionHelper::OnFrame(const VideoFrame& frame) {
- if (IsDummyFrameBuffer(frame.video_frame_buffer()->ToI420())) {
+void VideoQualityAnalyzerInjectionHelper::OnFrame(absl::string_view peer_name,
+ const VideoFrame& frame) {
+ rtc::scoped_refptr<I420BufferInterface> i420_buffer =
+ frame.video_frame_buffer()->ToI420();
+ if (IsDummyFrameBuffer(i420_buffer)) {
// This is dummy frame, so we don't need to process it further.
return;
}
- analyzer_->OnFrameRendered(frame);
+ // Copy entire video frame including video buffer to ensure that analyzer
+ // won't hold any WebRTC internal buffers.
+ VideoFrame frame_copy = frame;
+ frame_copy.set_video_frame_buffer(I420Buffer::Copy(*i420_buffer));
+ analyzer_->OnFrameRendered(peer_name, frame_copy);
+
std::string stream_label = analyzer_->GetStreamLabel(frame.id());
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>* sinks =
PopulateSinks(stream_label);
@@ -189,7 +210,7 @@ void VideoQualityAnalyzerInjectionHelper::OnFrame(const VideoFrame& frame) {
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>*
VideoQualityAnalyzerInjectionHelper::PopulateSinks(
const std::string& stream_label) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
auto sinks_it = sinks_.find(stream_label);
if (sinks_it != sinks_.end()) {
return &sinks_it->second;
diff --git a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h
index a0daa9ff18..d741288345 100644
--- a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h
+++ b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h
@@ -14,7 +14,10 @@
#include <map>
#include <memory>
#include <string>
+#include <vector>
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
#include "api/test/peerconnection_quality_test_fixture.h"
#include "api/test/stats_observer_interface.h"
#include "api/test/video_quality_analyzer_interface.h"
@@ -22,7 +25,7 @@
#include "api/video/video_sink_interface.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
#include "test/pc/e2e/analyzer/video/id_generator.h"
#include "test/test_video_capturer.h"
@@ -46,6 +49,7 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface {
// Wraps video encoder factory to give video quality analyzer access to frames
// before encoding and encoded images after.
std::unique_ptr<VideoEncoderFactory> WrapVideoEncoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoderFactory> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index)
@@ -53,25 +57,31 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface {
// Wraps video decoder factory to give video quality analyzer access to
// received encoded images and frames, that were decoded from them.
std::unique_ptr<VideoDecoderFactory> WrapVideoDecoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoderFactory> delegate) const;
// Creates VideoFrame preprocessor, that will allow video quality analyzer to
// get access to the captured frames. If provided config also specifies
// |input_dump_file_name|, video will be written into that file.
std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
- CreateFramePreprocessor(const VideoConfig& config);
+ CreateFramePreprocessor(absl::string_view peer_name,
+ const VideoConfig& config);
// Creates sink, that will allow video quality analyzer to get access to
// the rendered frames. If corresponding video track has
// |output_dump_file_name| in its VideoConfig, then video also will be written
// into that file.
- std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> CreateVideoSink();
+ std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> CreateVideoSink(
+ absl::string_view peer_name);
- void Start(std::string test_case_name, int max_threads_count);
+ void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count);
// Forwards |stats_reports| for Peer Connection |pc_label| to
// |analyzer_|.
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& stats_reports) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
// Stops VideoQualityAnalyzerInterface to populate final data and metrics.
// Should be invoked after analyzed video tracks are disposed.
@@ -80,20 +90,26 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface {
private:
class AnalyzingVideoSink final : public rtc::VideoSinkInterface<VideoFrame> {
public:
- explicit AnalyzingVideoSink(VideoQualityAnalyzerInjectionHelper* helper)
- : helper_(helper) {}
+ explicit AnalyzingVideoSink(absl::string_view peer_name,
+ VideoQualityAnalyzerInjectionHelper* helper)
+ : peer_name_(peer_name), helper_(helper) {}
~AnalyzingVideoSink() override = default;
- void OnFrame(const VideoFrame& frame) override { helper_->OnFrame(frame); }
+ void OnFrame(const VideoFrame& frame) override {
+ helper_->OnFrame(peer_name_, frame);
+ }
private:
+ const std::string peer_name_;
VideoQualityAnalyzerInjectionHelper* const helper_;
};
test::VideoFrameWriter* MaybeCreateVideoWriter(
absl::optional<std::string> file_name,
const PeerConnectionE2EQualityTestFixture::VideoConfig& config);
- void OnFrame(const VideoFrame& frame);
+ // Creates a deep copy of the frame and passes it to the video analyzer, while
+ // passing real frame to the sinks
+ void OnFrame(absl::string_view peer_name, const VideoFrame& frame);
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>*
PopulateSinks(const std::string& stream_label);
@@ -103,7 +119,7 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface {
std::vector<std::unique_ptr<test::VideoFrameWriter>> video_writers_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
std::map<std::string, VideoConfig> known_video_configs_ RTC_GUARDED_BY(lock_);
std::map<std::string,
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>>
diff --git a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc
index 754a0a468f..cc675cc2df 100644
--- a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc
+++ b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc
@@ -10,55 +10,94 @@
#include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/units/data_rate.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+
namespace webrtc {
namespace webrtc_pc_e2e {
-namespace {
-
-constexpr int kBitsInByte = 8;
-} // namespace
-
-void VideoQualityMetricsReporter::Start(absl::string_view test_case_name) {
+void VideoQualityMetricsReporter::Start(
+ absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* /*reporter_helper*/) {
test_case_name_ = std::string(test_case_name);
+ start_time_ = Now();
}
-// TODO(bugs.webrtc.org/10430): Migrate to the new GetStats as soon as
-// bugs.webrtc.org/10428 is fixed.
void VideoQualityMetricsReporter::OnStatsReports(
- const std::string& pc_label,
- const StatsReports& stats_reports) {
- for (const StatsReport* stats_report : stats_reports) {
- // The only stats collected by this analyzer are present in
- // kStatsReportTypeBwe reports, so all other reports are just ignored.
- if (stats_report->type() != StatsReport::StatsType::kStatsReportTypeBwe) {
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ RTC_CHECK(start_time_)
+ << "Please invoke Start(...) method before calling OnStatsReports(...)";
+
+ auto transport_stats = report->GetStatsOfType<RTCTransportStats>();
+ if (transport_stats.size() == 0u ||
+ !transport_stats[0]->selected_candidate_pair_id.is_defined()) {
+ return;
+ }
+ RTC_DCHECK_EQ(transport_stats.size(), 1);
+ std::string selected_ice_id =
+ transport_stats[0]->selected_candidate_pair_id.ValueToString();
+ // Use the selected ICE candidate pair ID to get the appropriate ICE stats.
+ const RTCIceCandidatePairStats ice_candidate_pair_stats =
+ report->Get(selected_ice_id)->cast_to<const RTCIceCandidatePairStats>();
+
+ auto outbound_rtp_stats = report->GetStatsOfType<RTCOutboundRTPStreamStats>();
+ StatsSample sample;
+ for (auto& s : outbound_rtp_stats) {
+ if (!s->media_type.is_defined()) {
+ continue;
+ }
+ if (!(*s->media_type == RTCMediaStreamTrackKind::kVideo)) {
continue;
}
- const webrtc::StatsReport::Value* available_send_bandwidth =
- stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameAvailableSendBandwidth);
- const webrtc::StatsReport::Value* retransmission_bitrate =
- stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameRetransmitBitrate);
- const webrtc::StatsReport::Value* transmission_bitrate =
- stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameTransmitBitrate);
- RTC_CHECK(available_send_bandwidth);
- RTC_CHECK(retransmission_bitrate);
- RTC_CHECK(transmission_bitrate);
-
- rtc::CritScope crit(&video_bwe_stats_lock_);
- VideoBweStats& video_bwe_stats = video_bwe_stats_[pc_label];
+ if (s->timestamp_us() > sample.sample_time.us()) {
+ sample.sample_time = Timestamp::Micros(s->timestamp_us());
+ }
+ sample.retransmitted_bytes_sent +=
+ DataSize::Bytes(s->retransmitted_bytes_sent.ValueOrDefault(0ul));
+ sample.bytes_sent += DataSize::Bytes(s->bytes_sent.ValueOrDefault(0ul));
+ sample.header_bytes_sent +=
+ DataSize::Bytes(s->header_bytes_sent.ValueOrDefault(0ul));
+ }
+
+ MutexLock lock(&video_bwe_stats_lock_);
+ VideoBweStats& video_bwe_stats = video_bwe_stats_[std::string(pc_label)];
+ if (ice_candidate_pair_stats.available_outgoing_bitrate.is_defined()) {
video_bwe_stats.available_send_bandwidth.AddSample(
- available_send_bandwidth->int_val());
- video_bwe_stats.transmission_bitrate.AddSample(
- transmission_bitrate->int_val());
- video_bwe_stats.retransmission_bitrate.AddSample(
- retransmission_bitrate->int_val());
+ DataRate::BitsPerSec(
+ *ice_candidate_pair_stats.available_outgoing_bitrate)
+ .bytes_per_sec());
+ }
+
+ StatsSample prev_sample = last_stats_sample_[std::string(pc_label)];
+ if (prev_sample.sample_time.IsZero()) {
+ prev_sample.sample_time = start_time_.value();
+ }
+ last_stats_sample_[std::string(pc_label)] = sample;
+
+ TimeDelta time_between_samples = sample.sample_time - prev_sample.sample_time;
+ if (time_between_samples.IsZero()) {
+ return;
}
+
+ DataRate retransmission_bitrate =
+ (sample.retransmitted_bytes_sent - prev_sample.retransmitted_bytes_sent) /
+ time_between_samples;
+ video_bwe_stats.retransmission_bitrate.AddSample(
+ retransmission_bitrate.bytes_per_sec());
+ DataRate transmission_bitrate =
+ (sample.bytes_sent + sample.header_bytes_sent - prev_sample.bytes_sent -
+ prev_sample.header_bytes_sent) /
+ time_between_samples;
+ video_bwe_stats.transmission_bitrate.AddSample(
+ transmission_bitrate.bytes_per_sec());
}
void VideoQualityMetricsReporter::StopAndReportResults() {
- rtc::CritScope video_bwe_crit(&video_bwe_stats_lock_);
+ MutexLock video_bwemutex_(&video_bwe_stats_lock_);
for (const auto& item : video_bwe_stats_) {
ReportVideoBweResults(GetTestCaseName(item.first), item.second);
}
@@ -73,14 +112,11 @@ void VideoQualityMetricsReporter::ReportVideoBweResults(
const std::string& test_case_name,
const VideoBweStats& video_bwe_stats) {
ReportResult("available_send_bandwidth", test_case_name,
- video_bwe_stats.available_send_bandwidth / kBitsInByte,
- "bytesPerSecond");
+ video_bwe_stats.available_send_bandwidth, "bytesPerSecond");
ReportResult("transmission_bitrate", test_case_name,
- video_bwe_stats.transmission_bitrate / kBitsInByte,
- "bytesPerSecond");
+ video_bwe_stats.transmission_bitrate, "bytesPerSecond");
ReportResult("retransmission_bitrate", test_case_name,
- video_bwe_stats.retransmission_bitrate / kBitsInByte,
- "bytesPerSecond");
+ video_bwe_stats.retransmission_bitrate, "bytesPerSecond");
}
void VideoQualityMetricsReporter::ReportResult(
diff --git a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h
index 1688a7b6fc..188aa42ef1 100644
--- a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h
+++ b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h
@@ -14,9 +14,13 @@
#include <map>
#include <string>
+#include "absl/strings/string_view.h"
#include "api/test/peerconnection_quality_test_fixture.h"
-#include "rtc_base/critical_section.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "api/units/data_size.h"
+#include "api/units/timestamp.h"
#include "rtc_base/numerics/samples_stats_counter.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/testsupport/perf_test.h"
namespace webrtc {
@@ -31,15 +35,25 @@ struct VideoBweStats {
class VideoQualityMetricsReporter
: public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter {
public:
- VideoQualityMetricsReporter() = default;
+ VideoQualityMetricsReporter(Clock* const clock) : clock_(clock) {}
~VideoQualityMetricsReporter() override = default;
- void Start(absl::string_view test_case_name) override;
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& reports) override;
+ void Start(absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
void StopAndReportResults() override;
private:
+ struct StatsSample {
+ DataSize bytes_sent = DataSize::Zero();
+ DataSize header_bytes_sent = DataSize::Zero();
+ DataSize retransmitted_bytes_sent = DataSize::Zero();
+
+ Timestamp sample_time = Timestamp::Zero();
+ };
+
std::string GetTestCaseName(const std::string& stream_label) const;
static void ReportVideoBweResults(const std::string& test_case_name,
const VideoBweStats& video_bwe_stats);
@@ -50,14 +64,20 @@ class VideoQualityMetricsReporter
const std::string& unit,
webrtc::test::ImproveDirection improve_direction =
webrtc::test::ImproveDirection::kNone);
+ Timestamp Now() const { return clock_->CurrentTime(); }
+
+ Clock* const clock_;
std::string test_case_name_;
+ absl::optional<Timestamp> start_time_;
- rtc::CriticalSection video_bwe_stats_lock_;
+ Mutex video_bwe_stats_lock_;
// Map between a peer connection label (provided by the framework) and
// its video BWE stats.
std::map<std::string, VideoBweStats> video_bwe_stats_
RTC_GUARDED_BY(video_bwe_stats_lock_);
+ std::map<std::string, StatsSample> last_stats_sample_
+ RTC_GUARDED_BY(video_bwe_stats_lock_);
};
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/analyzer_helper.cc b/test/pc/e2e/analyzer_helper.cc
index f11b3bb803..852f0a3435 100644
--- a/test/pc/e2e/analyzer_helper.cc
+++ b/test/pc/e2e/analyzer_helper.cc
@@ -22,16 +22,36 @@ AnalyzerHelper::AnalyzerHelper() {
void AnalyzerHelper::AddTrackToStreamMapping(std::string track_id,
std::string stream_label) {
RTC_DCHECK_RUN_ON(&signaling_sequence_checker_);
- track_to_stream_map_.insert({std::move(track_id), std::move(stream_label)});
+ track_to_stream_map_.insert(
+ {std::move(track_id), StreamInfo{stream_label, stream_label}});
}
-const std::string& AnalyzerHelper::GetStreamLabelFromTrackId(
- const std::string& track_id) const {
+void AnalyzerHelper::AddTrackToStreamMapping(std::string track_id,
+ std::string stream_label,
+ std::string sync_group) {
+ RTC_DCHECK_RUN_ON(&signaling_sequence_checker_);
+ track_to_stream_map_.insert(
+ {std::move(track_id),
+ StreamInfo{std::move(stream_label), std::move(sync_group)}});
+}
+
+const AnalyzerHelper::StreamInfo& AnalyzerHelper::GetStreamInfoFromTrackId(
+ absl::string_view track_id) const {
RTC_DCHECK_RUN_ON(&signaling_sequence_checker_);
- auto track_to_stream_pair = track_to_stream_map_.find(track_id);
+ auto track_to_stream_pair = track_to_stream_map_.find(std::string(track_id));
RTC_CHECK(track_to_stream_pair != track_to_stream_map_.end());
return track_to_stream_pair->second;
}
+absl::string_view AnalyzerHelper::GetStreamLabelFromTrackId(
+ absl::string_view track_id) const {
+ return GetStreamInfoFromTrackId(track_id).stream_label;
+}
+
+absl::string_view AnalyzerHelper::GetSyncGroupLabelFromTrackId(
+ absl::string_view track_id) const {
+ return GetStreamInfoFromTrackId(track_id).sync_group;
+}
+
} // namespace webrtc_pc_e2e
} // namespace webrtc
diff --git a/test/pc/e2e/analyzer_helper.h b/test/pc/e2e/analyzer_helper.h
index 51cfe5587d..4b0e0c3ac4 100644
--- a/test/pc/e2e/analyzer_helper.h
+++ b/test/pc/e2e/analyzer_helper.h
@@ -14,7 +14,8 @@
#include <map>
#include <string>
-#include "api/test/track_id_stream_label_map.h"
+#include "absl/strings/string_view.h"
+#include "api/test/track_id_stream_info_map.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/thread_annotations.h"
@@ -22,25 +23,40 @@ namespace webrtc {
namespace webrtc_pc_e2e {
// This class is a utility that provides bookkeeping capabilities that
-// are useful to associate stats reports track_ids to the remote stream_id.
+// are useful to associate stats reports track_ids to the remote stream info.
// The framework will populate an instance of this class and it will pass
// it to the Start method of Media Quality Analyzers.
// An instance of AnalyzerHelper must only be accessed from a single
// thread and since stats collection happens on the signaling thread,
-// both AddTrackToStreamMapping and GetStreamLabelFromTrackId must be
-// invoked from the signaling thread.
-class AnalyzerHelper : public TrackIdStreamLabelMap {
+// AddTrackToStreamMapping, GetStreamLabelFromTrackId and
+// GetSyncGroupLabelFromTrackId must be invoked from the signaling thread. Get
+// methods should be invoked only after all data is added. Mixing Get methods
+// with adding new data may lead to undefined behaviour.
+class AnalyzerHelper : public TrackIdStreamInfoMap {
public:
AnalyzerHelper();
void AddTrackToStreamMapping(std::string track_id, std::string stream_label);
+ void AddTrackToStreamMapping(std::string track_id,
+ std::string stream_label,
+ std::string sync_group);
- const std::string& GetStreamLabelFromTrackId(
- const std::string& track_id) const override;
+ absl::string_view GetStreamLabelFromTrackId(
+ absl::string_view track_id) const override;
+
+ absl::string_view GetSyncGroupLabelFromTrackId(
+ absl::string_view track_id) const override;
private:
+ struct StreamInfo {
+ std::string stream_label;
+ std::string sync_group;
+ };
+
+ const StreamInfo& GetStreamInfoFromTrackId(absl::string_view track_id) const;
+
SequenceChecker signaling_sequence_checker_;
- std::map<std::string, std::string> track_to_stream_map_
+ std::map<std::string, StreamInfo> track_to_stream_map_
RTC_GUARDED_BY(signaling_sequence_checker_);
};
diff --git a/test/pc/e2e/cross_media_metrics_reporter.cc b/test/pc/e2e/cross_media_metrics_reporter.cc
new file mode 100644
index 0000000000..96f661fd4f
--- /dev/null
+++ b/test/pc/e2e/cross_media_metrics_reporter.cc
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/cross_media_metrics_reporter.h"
+
+#include <utility>
+#include <vector>
+
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/event.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+void CrossMediaMetricsReporter::Start(
+ absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) {
+ test_case_name_ = std::string(test_case_name);
+ reporter_helper_ = reporter_helper;
+}
+
+void CrossMediaMetricsReporter::OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ auto inbound_stats = report->GetStatsOfType<RTCInboundRTPStreamStats>();
+ std::map<absl::string_view, std::vector<const RTCInboundRTPStreamStats*>>
+ sync_group_stats;
+ for (const auto& stat : inbound_stats) {
+ auto media_source_stat =
+ report->GetAs<RTCMediaStreamTrackStats>(*stat->track_id);
+ if (stat->estimated_playout_timestamp.ValueOrDefault(0.) > 0 &&
+ media_source_stat->track_identifier.is_defined()) {
+ sync_group_stats[reporter_helper_->GetSyncGroupLabelFromTrackId(
+ *media_source_stat->track_identifier)]
+ .push_back(stat);
+ }
+ }
+
+ MutexLock lock(&mutex_);
+ for (const auto& pair : sync_group_stats) {
+ // If there is less than two streams, it is not a sync group.
+ if (pair.second.size() < 2) {
+ continue;
+ }
+ auto sync_group = std::string(pair.first);
+ const RTCInboundRTPStreamStats* audio_stat = pair.second[0];
+ const RTCInboundRTPStreamStats* video_stat = pair.second[1];
+
+ RTC_CHECK(pair.second.size() == 2 && audio_stat->kind.is_defined() &&
+ video_stat->kind.is_defined() &&
+ *audio_stat->kind != *video_stat->kind)
+ << "Sync group should consist of one audio and one video stream.";
+
+ if (*audio_stat->kind == RTCMediaStreamTrackKind::kVideo) {
+ std::swap(audio_stat, video_stat);
+ }
+ // Stream labels of a sync group are same for all polls, so we need it add
+ // it only once.
+ if (stats_info_.find(sync_group) == stats_info_.end()) {
+ auto audio_source_stat =
+ report->GetAs<RTCMediaStreamTrackStats>(*audio_stat->track_id);
+ auto video_source_stat =
+ report->GetAs<RTCMediaStreamTrackStats>(*video_stat->track_id);
+ // *_source_stat->track_identifier is always defined here because we
+ // checked it while grouping stats.
+ stats_info_[sync_group].audio_stream_label =
+ std::string(reporter_helper_->GetStreamLabelFromTrackId(
+ *audio_source_stat->track_identifier));
+ stats_info_[sync_group].video_stream_label =
+ std::string(reporter_helper_->GetStreamLabelFromTrackId(
+ *video_source_stat->track_identifier));
+ }
+
+ double audio_video_playout_diff = *audio_stat->estimated_playout_timestamp -
+ *video_stat->estimated_playout_timestamp;
+ if (audio_video_playout_diff > 0) {
+ stats_info_[sync_group].audio_ahead_ms.AddSample(
+ audio_video_playout_diff);
+ stats_info_[sync_group].video_ahead_ms.AddSample(0);
+ } else {
+ stats_info_[sync_group].audio_ahead_ms.AddSample(0);
+ stats_info_[sync_group].video_ahead_ms.AddSample(
+ std::abs(audio_video_playout_diff));
+ }
+ }
+}
+
+void CrossMediaMetricsReporter::StopAndReportResults() {
+ MutexLock lock(&mutex_);
+ for (const auto& pair : stats_info_) {
+ const std::string& sync_group = pair.first;
+ ReportResult("audio_ahead_ms",
+ GetTestCaseName(pair.second.audio_stream_label, sync_group),
+ pair.second.audio_ahead_ms, "ms",
+ webrtc::test::ImproveDirection::kSmallerIsBetter);
+ ReportResult("video_ahead_ms",
+ GetTestCaseName(pair.second.video_stream_label, sync_group),
+ pair.second.video_ahead_ms, "ms",
+ webrtc::test::ImproveDirection::kSmallerIsBetter);
+ }
+}
+
+void CrossMediaMetricsReporter::ReportResult(
+ const std::string& metric_name,
+ const std::string& test_case_name,
+ const SamplesStatsCounter& counter,
+ const std::string& unit,
+ webrtc::test::ImproveDirection improve_direction) {
+ test::PrintResult(metric_name, /*modifier=*/"", test_case_name, counter, unit,
+ /*important=*/false, improve_direction);
+}
+
+std::string CrossMediaMetricsReporter::GetTestCaseName(
+ const std::string& stream_label,
+ const std::string& sync_group) const {
+ return test_case_name_ + "/" + sync_group + "_" + stream_label;
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/test/pc/e2e/cross_media_metrics_reporter.h b/test/pc/e2e/cross_media_metrics_reporter.h
new file mode 100644
index 0000000000..af2a62e77a
--- /dev/null
+++ b/test/pc/e2e/cross_media_metrics_reporter.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_
+#define TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_
+
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/numerics/samples_stats_counter.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/testsupport/perf_test.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+class CrossMediaMetricsReporter
+ : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter {
+ public:
+ CrossMediaMetricsReporter() = default;
+ ~CrossMediaMetricsReporter() override = default;
+
+ void Start(absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+ void StopAndReportResults() override;
+
+ private:
+ struct StatsInfo {
+ SamplesStatsCounter audio_ahead_ms;
+ SamplesStatsCounter video_ahead_ms;
+
+ std::string audio_stream_label;
+ std::string video_stream_label;
+ };
+
+ static void ReportResult(const std::string& metric_name,
+ const std::string& test_case_name,
+ const SamplesStatsCounter& counter,
+ const std::string& unit,
+ webrtc::test::ImproveDirection improve_direction =
+ webrtc::test::ImproveDirection::kNone);
+ std::string GetTestCaseName(const std::string& stream_label,
+ const std::string& sync_group) const;
+
+ std::string test_case_name_;
+ const TrackIdStreamInfoMap* reporter_helper_;
+
+ Mutex mutex_;
+ std::map<std::string, StatsInfo> stats_info_ RTC_GUARDED_BY(mutex_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_
diff --git a/test/pc/e2e/echo/echo_emulation.cc b/test/pc/e2e/echo/echo_emulation.cc
index 2beaa34cbd..230e8e3eca 100644
--- a/test/pc/e2e/echo/echo_emulation.cc
+++ b/test/pc/e2e/echo/echo_emulation.cc
@@ -57,17 +57,7 @@ void EchoEmulatingCapturer::OnAudioRendered(
}
queue_input_.assign(data.begin(), data.end());
if (!renderer_queue_.Insert(&queue_input_)) {
- // Test audio device works too slow with sanitizers and on some platforms
- // and can't properly process audio, so when capturer will be stopped
- // renderer will quickly overfill the queue.
- // TODO(crbug.com/webrtc/10850) remove it when test ADM will be fast enough.
-#if defined(THREAD_SANITIZER) || defined(MEMORY_SANITIZER) || \
- defined(ADDRESS_SANITIZER) || defined(WEBRTC_ANDROID) || \
- (defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG))
RTC_LOG(WARNING) << "Echo queue is full";
-#else
- RTC_CHECK(false) << "Echo queue is full";
-#endif
}
}
diff --git a/test/pc/e2e/media/media_helper.cc b/test/pc/e2e/media/media_helper.cc
index ebe1571fe4..d1c27838a6 100644
--- a/test/pc/e2e/media/media_helper.cc
+++ b/test/pc/e2e/media/media_helper.cc
@@ -12,6 +12,7 @@
#include <string>
#include <utility>
+#include "absl/types/variant.h"
#include "api/media_stream_interface.h"
#include "api/test/create_frame_generator.h"
#include "test/frame_generator_capturer.h"
@@ -26,8 +27,8 @@ using VideoConfig =
::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig;
using AudioConfig =
::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig;
-using VideoGeneratorType = ::webrtc::webrtc_pc_e2e::
- PeerConnectionE2EQualityTestFixture::VideoGeneratorType;
+using CapturingDeviceIndex = ::webrtc::webrtc_pc_e2e::
+ PeerConnectionE2EQualityTestFixture::CapturingDeviceIndex;
} // namespace
@@ -55,9 +56,9 @@ MediaHelper::MaybeAddVideo(TestPeer* peer) {
auto video_config = params->video_configs[i];
// Setup input video source into peer connection.
std::unique_ptr<test::TestVideoCapturer> capturer = CreateVideoCapturer(
- video_config, peer->ReleaseVideoGenerator(i),
+ video_config, peer->ReleaseVideoSource(i),
video_quality_analyzer_injection_helper_->CreateFramePreprocessor(
- video_config));
+ params->name.value(), video_config));
bool is_screencast =
video_config.content_hint == VideoTrackInterface::ContentHint::kText ||
video_config.content_hint ==
@@ -95,25 +96,28 @@ MediaHelper::MaybeAddVideo(TestPeer* peer) {
std::unique_ptr<test::TestVideoCapturer> MediaHelper::CreateVideoCapturer(
const VideoConfig& video_config,
- std::unique_ptr<test::FrameGeneratorInterface> generator,
+ PeerConfigurerImpl::VideoSource source,
std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
frame_preprocessor) {
- if (video_config.capturing_device_index) {
+ CapturingDeviceIndex* capturing_device_index =
+ absl::get_if<CapturingDeviceIndex>(&source);
+ if (capturing_device_index != nullptr) {
std::unique_ptr<test::TestVideoCapturer> capturer =
test::CreateVideoCapturer(video_config.width, video_config.height,
video_config.fps,
- *video_config.capturing_device_index);
+ static_cast<size_t>(*capturing_device_index));
RTC_CHECK(capturer)
<< "Failed to obtain input stream from capturing device #"
- << *video_config.capturing_device_index;
+ << *capturing_device_index;
capturer->SetFramePreprocessor(std::move(frame_preprocessor));
return capturer;
}
- RTC_CHECK(generator) << "No input source.";
-
auto capturer = std::make_unique<test::FrameGeneratorCapturer>(
- clock_, std::move(generator), video_config.fps, *task_queue_factory_);
+ clock_,
+ absl::get<std::unique_ptr<test::FrameGeneratorInterface>>(
+ std::move(source)),
+ video_config.fps, *task_queue_factory_);
capturer->SetFramePreprocessor(std::move(frame_preprocessor));
capturer->Init();
return capturer;
diff --git a/test/pc/e2e/media/media_helper.h b/test/pc/e2e/media/media_helper.h
index 08f74335ee..8b36646a0b 100644
--- a/test/pc/e2e/media/media_helper.h
+++ b/test/pc/e2e/media/media_helper.h
@@ -18,6 +18,7 @@
#include "api/test/peerconnection_quality_test_fixture.h"
#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h"
#include "test/pc/e2e/media/test_video_capturer_video_track_source.h"
+#include "test/pc/e2e/peer_configurer.h"
#include "test/pc/e2e/test_peer.h"
namespace webrtc {
@@ -41,7 +42,7 @@ class MediaHelper {
private:
std::unique_ptr<test::TestVideoCapturer> CreateVideoCapturer(
const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config,
- std::unique_ptr<test::FrameGeneratorInterface> generator,
+ PeerConfigurerImpl::VideoSource source,
std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
frame_preprocessor);
diff --git a/test/pc/e2e/network_quality_metrics_reporter.cc b/test/pc/e2e/network_quality_metrics_reporter.cc
index 56f0337037..cd6dfb5032 100644
--- a/test/pc/e2e/network_quality_metrics_reporter.cc
+++ b/test/pc/e2e/network_quality_metrics_reporter.cc
@@ -11,7 +11,8 @@
#include <utility>
-#include "api/stats_types.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
#include "rtc_base/event.h"
#include "system_wrappers/include/field_trial.h"
#include "test/testsupport/perf_test.h"
@@ -28,56 +29,59 @@ constexpr int kStatsWaitTimeoutMs = 1000;
constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats";
}
-void NetworkQualityMetricsReporter::Start(absl::string_view test_case_name) {
+void NetworkQualityMetricsReporter::Start(
+ absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* /*reporter_helper*/) {
test_case_name_ = std::string(test_case_name);
// Check that network stats are clean before test execution.
EmulatedNetworkStats alice_stats = PopulateStats(alice_network_);
RTC_CHECK_EQ(alice_stats.packets_sent, 0);
- RTC_CHECK_EQ(alice_stats.packets_received, 0);
+ RTC_CHECK_EQ(alice_stats.PacketsReceived(), 0);
EmulatedNetworkStats bob_stats = PopulateStats(bob_network_);
RTC_CHECK_EQ(bob_stats.packets_sent, 0);
- RTC_CHECK_EQ(bob_stats.packets_received, 0);
+ RTC_CHECK_EQ(bob_stats.PacketsReceived(), 0);
}
void NetworkQualityMetricsReporter::OnStatsReports(
- const std::string& pc_label,
- const StatsReports& reports) {
- rtc::CritScope cs(&lock_);
- int64_t payload_bytes_received = 0;
- int64_t payload_bytes_sent = 0;
- for (const StatsReport* report : reports) {
- if (report->type() == StatsReport::kStatsReportTypeSsrc) {
- const auto* received =
- report->FindValue(StatsReport::kStatsValueNameBytesReceived);
- if (received) {
- payload_bytes_received += received->int64_val();
- }
- const auto* sent =
- report->FindValue(StatsReport::kStatsValueNameBytesSent);
- if (sent) {
- payload_bytes_sent += sent->int64_val();
- }
- }
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ DataSize payload_received = DataSize::Zero();
+ DataSize payload_sent = DataSize::Zero();
+
+ auto inbound_stats = report->GetStatsOfType<RTCInboundRTPStreamStats>();
+ for (const auto& stat : inbound_stats) {
+ payload_received +=
+ DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul) +
+ stat->header_bytes_received.ValueOrDefault(0ul));
}
- PCStats& stats = pc_stats_[pc_label];
- stats.payload_bytes_received = payload_bytes_received;
- stats.payload_bytes_sent = payload_bytes_sent;
+
+ auto outbound_stats = report->GetStatsOfType<RTCOutboundRTPStreamStats>();
+ for (const auto& stat : outbound_stats) {
+ payload_sent +=
+ DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul) +
+ stat->header_bytes_sent.ValueOrDefault(0ul));
+ }
+
+ MutexLock lock(&lock_);
+ PCStats& stats = pc_stats_[std::string(pc_label)];
+ stats.payload_received = payload_received;
+ stats.payload_sent = payload_sent;
}
void NetworkQualityMetricsReporter::StopAndReportResults() {
EmulatedNetworkStats alice_stats = PopulateStats(alice_network_);
EmulatedNetworkStats bob_stats = PopulateStats(bob_network_);
ReportStats("alice", alice_stats,
- alice_stats.packets_sent - bob_stats.packets_received);
+ alice_stats.packets_sent - bob_stats.PacketsReceived());
ReportStats("bob", bob_stats,
- bob_stats.packets_sent - alice_stats.packets_received);
+ bob_stats.packets_sent - alice_stats.PacketsReceived());
if (!webrtc::field_trial::IsEnabled(kUseStandardBytesStats)) {
RTC_LOG(LS_ERROR)
<< "Non-standard GetStats; \"payload\" counts include RTP headers";
}
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
for (const auto& pair : pc_stats_) {
ReportPCStats(pair.first, pair.second);
}
@@ -107,16 +111,16 @@ void NetworkQualityMetricsReporter::ReportStats(
"average_send_rate", network_label,
stats.packets_sent >= 2 ? stats.AverageSendRate().bytes_per_sec() : 0,
"bytesPerSecond");
- ReportResult("bytes_dropped", network_label, stats.bytes_dropped.bytes(),
+ ReportResult("bytes_dropped", network_label, stats.BytesDropped().bytes(),
"sizeInBytes");
- ReportResult("packets_dropped", network_label, stats.packets_dropped,
+ ReportResult("packets_dropped", network_label, stats.PacketsDropped(),
"unitless");
- ReportResult("bytes_received", network_label, stats.bytes_received.bytes(),
+ ReportResult("bytes_received", network_label, stats.BytesReceived().bytes(),
"sizeInBytes");
- ReportResult("packets_received", network_label, stats.packets_received,
+ ReportResult("packets_received", network_label, stats.PacketsReceived(),
"unitless");
ReportResult("average_receive_rate", network_label,
- stats.packets_received >= 2
+ stats.PacketsReceived() >= 2
? stats.AverageReceiveRate().bytes_per_sec()
: 0,
"bytesPerSecond");
@@ -125,9 +129,9 @@ void NetworkQualityMetricsReporter::ReportStats(
void NetworkQualityMetricsReporter::ReportPCStats(const std::string& pc_label,
const PCStats& stats) {
- ReportResult("payload_bytes_received", pc_label, stats.payload_bytes_received,
- "sizeInBytes");
- ReportResult("payload_bytes_sent", pc_label, stats.payload_bytes_sent,
+ ReportResult("payload_bytes_received", pc_label,
+ stats.payload_received.bytes(), "sizeInBytes");
+ ReportResult("payload_bytes_sent", pc_label, stats.payload_sent.bytes(),
"sizeInBytes");
}
diff --git a/test/pc/e2e/network_quality_metrics_reporter.h b/test/pc/e2e/network_quality_metrics_reporter.h
index 6454f17526..4c81f9d716 100644
--- a/test/pc/e2e/network_quality_metrics_reporter.h
+++ b/test/pc/e2e/network_quality_metrics_reporter.h
@@ -13,9 +13,12 @@
#include <string>
+#include "absl/strings/string_view.h"
#include "api/test/network_emulation_manager.h"
#include "api/test/peerconnection_quality_test_fixture.h"
-#include "rtc_base/critical_section.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "api/units/data_size.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@@ -29,17 +32,19 @@ class NetworkQualityMetricsReporter
~NetworkQualityMetricsReporter() override = default;
// Network stats must be empty when this method will be invoked.
- void Start(absl::string_view test_case_name) override;
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& reports) override;
+ void Start(absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
void StopAndReportResults() override;
private:
struct PCStats {
// TODO(nisse): Separate audio and video counters. Depends on standard stat
// counters, enabled by field trial "WebRTC-UseStandardBytesStats".
- int64_t payload_bytes_received = 0;
- int64_t payload_bytes_sent = 0;
+ DataSize payload_received = DataSize::Zero();
+ DataSize payload_sent = DataSize::Zero();
};
static EmulatedNetworkStats PopulateStats(
@@ -58,7 +63,7 @@ class NetworkQualityMetricsReporter
EmulatedNetworkManagerInterface* alice_network_;
EmulatedNetworkManagerInterface* bob_network_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
std::map<std::string, PCStats> pc_stats_ RTC_GUARDED_BY(lock_);
};
diff --git a/test/pc/e2e/peer_configurer.cc b/test/pc/e2e/peer_configurer.cc
index eabe1ab633..b5616b5d68 100644
--- a/test/pc/e2e/peer_configurer.cc
+++ b/test/pc/e2e/peer_configurer.cc
@@ -23,8 +23,6 @@ namespace {
using AudioConfig = PeerConnectionE2EQualityTestFixture::AudioConfig;
using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig;
using RunParams = PeerConnectionE2EQualityTestFixture::RunParams;
-using VideoGeneratorType =
- PeerConnectionE2EQualityTestFixture::VideoGeneratorType;
using VideoCodecConfig = PeerConnectionE2EQualityTestFixture::VideoCodecConfig;
// List of default names of generic participants according to
@@ -104,13 +102,15 @@ void ValidateParams(
const RunParams& run_params,
const std::vector<std::unique_ptr<PeerConfigurerImpl>>& peers) {
RTC_CHECK_GT(run_params.video_encoder_bitrate_multiplier, 0.0);
+ RTC_CHECK_GE(run_params.video_codecs.size(), 1);
std::set<std::string> peer_names;
std::set<std::string> video_labels;
std::set<std::string> audio_labels;
+ std::set<std::string> video_sync_groups;
+ std::set<std::string> audio_sync_groups;
int media_streams_count = 0;
- bool has_simulcast = false;
for (size_t i = 0; i < peers.size(); ++i) {
Params* p = peers[i]->params();
@@ -125,7 +125,8 @@ void ValidateParams(
}
media_streams_count += p->video_configs.size();
- // Validate that all video stream labels are unique.
+ // Validate that all video stream labels are unique and sync groups are
+ // valid.
for (const VideoConfig& video_config : p->video_configs) {
RTC_CHECK(video_config.stream_label);
bool inserted =
@@ -133,17 +134,37 @@ void ValidateParams(
RTC_CHECK(inserted) << "Duplicate video_config.stream_label="
<< video_config.stream_label.value();
+ // TODO(bugs.webrtc.org/4762): remove this check after synchronization of
+ // more than two streams is supported.
+ if (video_config.sync_group.has_value()) {
+ bool sync_group_inserted =
+ video_sync_groups.insert(video_config.sync_group.value()).second;
+ RTC_CHECK(sync_group_inserted)
+ << "Sync group shouldn't consist of more than two streams (one "
+ "video and one audio). Duplicate video_config.sync_group="
+ << video_config.sync_group.value();
+ }
+
if (video_config.simulcast_config) {
- has_simulcast = true;
if (video_config.simulcast_config->target_spatial_index) {
RTC_CHECK_GE(*video_config.simulcast_config->target_spatial_index, 0);
RTC_CHECK_LT(*video_config.simulcast_config->target_spatial_index,
video_config.simulcast_config->simulcast_streams_count);
}
+ RTC_CHECK_EQ(run_params.video_codecs.size(), 1)
+ << "Only 1 video codec is supported when simulcast is enabled in "
+ << "at least 1 video config";
RTC_CHECK(!video_config.max_encode_bitrate_bps)
<< "Setting max encode bitrate is not implemented for simulcast.";
RTC_CHECK(!video_config.min_encode_bitrate_bps)
<< "Setting min encode bitrate is not implemented for simulcast.";
+ if (run_params.video_codecs[0].name == cricket::kVp8CodecName &&
+ !video_config.simulcast_config->encoding_params.empty()) {
+ RTC_CHECK_EQ(video_config.simulcast_config->simulcast_streams_count,
+ video_config.simulcast_config->encoding_params.size())
+ << "|encoding_params| have to be specified for each simulcast "
+ << "stream in |simulcast_config|.";
+ }
}
}
if (p->audio_config) {
@@ -151,6 +172,17 @@ void ValidateParams(
audio_labels.insert(p->audio_config->stream_label.value()).second;
RTC_CHECK(inserted) << "Duplicate audio_config.stream_label="
<< p->audio_config->stream_label.value();
+ // TODO(bugs.webrtc.org/4762): remove this check after synchronization of
+ // more than two streams is supported.
+ if (p->audio_config->sync_group.has_value()) {
+ bool sync_group_inserted =
+ audio_sync_groups.insert(p->audio_config->sync_group.value())
+ .second;
+ RTC_CHECK(sync_group_inserted)
+ << "Sync group shouldn't consist of more than two streams (one "
+ "video and one audio). Duplicate audio_config.sync_group="
+ << p->audio_config->sync_group.value();
+ }
// Check that if mode input file name specified only if mode is kFile.
if (p->audio_config.value().mode == AudioConfig::Mode::kGenerated) {
RTC_CHECK(!p->audio_config.value().input_file_name);
@@ -164,11 +196,6 @@ void ValidateParams(
}
}
}
- if (has_simulcast) {
- RTC_CHECK_EQ(run_params.video_codecs.size(), 1)
- << "Only 1 video codec is supported when simulcast is enabled in at "
- << "least 1 video config";
- }
RTC_CHECK_GT(media_streams_count, 0) << "No media in the call.";
}
diff --git a/test/pc/e2e/peer_configurer.h b/test/pc/e2e/peer_configurer.h
index 179482b875..422d3d7341 100644
--- a/test/pc/e2e/peer_configurer.h
+++ b/test/pc/e2e/peer_configurer.h
@@ -23,7 +23,6 @@
#include "api/task_queue/task_queue_factory.h"
#include "api/test/create_peer_connection_quality_test_frame_generator.h"
#include "api/test/peerconnection_quality_test_fixture.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
@@ -39,6 +38,10 @@ namespace webrtc_pc_e2e {
class PeerConfigurerImpl final
: public PeerConnectionE2EQualityTestFixture::PeerConfigurer {
public:
+ using VideoSource =
+ absl::variant<std::unique_ptr<test::FrameGeneratorInterface>,
+ PeerConnectionE2EQualityTestFixture::CapturingDeviceIndex>;
+
PeerConfigurerImpl(rtc::Thread* network_thread,
rtc::NetworkManager* network_manager)
: components_(std::make_unique<InjectableComponents>(network_thread,
@@ -82,12 +85,6 @@ class PeerConfigurerImpl final
std::move(network_controller_factory);
return this;
}
- PeerConfigurer* SetMediaTransportFactory(
- std::unique_ptr<MediaTransportFactory> media_transport_factory) override {
- components_->pcf_dependencies->media_transport_factory =
- std::move(media_transport_factory);
- return this;
- }
PeerConfigurer* SetVideoEncoderFactory(
std::unique_ptr<VideoEncoderFactory> video_encoder_factory) override {
components_->pcf_dependencies->video_encoder_factory =
@@ -123,7 +120,7 @@ class PeerConfigurerImpl final
PeerConfigurer* AddVideoConfig(
PeerConnectionE2EQualityTestFixture::VideoConfig config) override {
- video_generators_.push_back(
+ video_sources_.push_back(
CreateSquareFrameGenerator(config, /*type=*/absl::nullopt));
params_->video_configs.push_back(std::move(config));
return this;
@@ -132,7 +129,15 @@ class PeerConfigurerImpl final
PeerConnectionE2EQualityTestFixture::VideoConfig config,
std::unique_ptr<test::FrameGeneratorInterface> generator) override {
params_->video_configs.push_back(std::move(config));
- video_generators_.push_back(std::move(generator));
+ video_sources_.push_back(std::move(generator));
+ return this;
+ }
+ PeerConfigurer* AddVideoConfig(
+ PeerConnectionE2EQualityTestFixture::VideoConfig config,
+ PeerConnectionE2EQualityTestFixture::CapturingDeviceIndex index)
+ override {
+ params_->video_configs.push_back(std::move(config));
+ video_sources_.push_back(index);
return this;
}
PeerConfigurer* SetAudioConfig(
@@ -158,9 +163,9 @@ class PeerConfigurerImpl final
params_->rtc_configuration = std::move(configuration);
return this;
}
- PeerConfigurer* SetBitrateParameters(
- PeerConnectionInterface::BitrateParameters bitrate_params) override {
- params_->bitrate_params = bitrate_params;
+ PeerConfigurer* SetBitrateSettings(
+ BitrateSettings bitrate_settings) override {
+ params_->bitrate_settings = bitrate_settings;
return this;
}
@@ -173,10 +178,7 @@ class PeerConfigurerImpl final
InjectableComponents* components() { return components_.get(); }
Params* params() { return params_.get(); }
- std::vector<std::unique_ptr<test::FrameGeneratorInterface>>*
- video_generators() {
- return &video_generators_;
- }
+ std::vector<VideoSource>* video_sources() { return &video_sources_; }
// Returns InjectableComponents and transfer ownership to the caller.
// Can be called once.
@@ -194,19 +196,18 @@ class PeerConfigurerImpl final
params_ = nullptr;
return params;
}
- // Returns frame generators and transfer ownership to the caller.
- // Can be called once.
- std::vector<std::unique_ptr<test::FrameGeneratorInterface>>
- ReleaseVideoGenerators() {
- auto video_generators = std::move(video_generators_);
- video_generators_.clear();
- return video_generators;
+ // Returns video sources and transfer frame generators ownership to the
+ // caller. Can be called once.
+ std::vector<VideoSource> ReleaseVideoSources() {
+ auto video_sources = std::move(video_sources_);
+ video_sources_.clear();
+ return video_sources;
}
private:
std::unique_ptr<InjectableComponents> components_;
std::unique_ptr<Params> params_;
- std::vector<std::unique_ptr<test::FrameGeneratorInterface>> video_generators_;
+ std::vector<VideoSource> video_sources_;
};
// Set missing params to default values if it is required:
diff --git a/test/pc/e2e/peer_connection_e2e_smoke_test.cc b/test/pc/e2e/peer_connection_e2e_smoke_test.cc
index 8080d4bb0a..b09b093c25 100644
--- a/test/pc/e2e/peer_connection_e2e_smoke_test.cc
+++ b/test/pc/e2e/peer_connection_e2e_smoke_test.cc
@@ -74,16 +74,17 @@ class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test {
// Create analyzers.
std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer =
- std::make_unique<DefaultVideoQualityAnalyzer>();
+ std::make_unique<DefaultVideoQualityAnalyzer>(
+ network_emulation_manager->time_controller()->GetClock());
// This is only done for the sake of smoke testing. In general there should
// be no need to explicitly pull data from analyzers after the run.
auto* video_analyzer_ptr =
static_cast<DefaultVideoQualityAnalyzer*>(video_quality_analyzer.get());
auto fixture = CreatePeerConnectionE2EQualityTestFixture(
- test_case_name, /*audio_quality_analyzer=*/nullptr,
- std::move(video_quality_analyzer));
- fixture->ExecuteAt(TimeDelta::Seconds(2),
+ test_case_name, *network_emulation_manager->time_controller(),
+ /*audio_quality_analyzer=*/nullptr, std::move(video_quality_analyzer));
+ fixture->ExecuteAt(TimeDelta::Seconds(1),
[alice_network_behavior_ptr](TimeDelta) {
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
@@ -110,19 +111,20 @@ class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test {
fixture->Run(run_params);
EXPECT_GE(fixture->GetRealTestDuration(), run_params.run_duration);
- for (auto stream_label : video_analyzer_ptr->GetKnownVideoStreams()) {
+ for (auto stream_key : video_analyzer_ptr->GetKnownVideoStreams()) {
FrameCounters stream_conters =
- video_analyzer_ptr->GetPerStreamCounters().at(stream_label);
+ video_analyzer_ptr->GetPerStreamCounters().at(stream_key);
// On some devices the pipeline can be too slow, so we actually can't
// force real constraints here. Lets just check, that at least 1
// frame passed whole pipeline.
- int64_t expected_min_fps = run_params.run_duration.seconds() * 30;
- EXPECT_GE(stream_conters.captured, expected_min_fps);
- EXPECT_GE(stream_conters.pre_encoded, 1);
- EXPECT_GE(stream_conters.encoded, 1);
- EXPECT_GE(stream_conters.received, 1);
- EXPECT_GE(stream_conters.decoded, 1);
- EXPECT_GE(stream_conters.rendered, 1);
+ int64_t expected_min_fps = run_params.run_duration.seconds() * 15;
+ EXPECT_GE(stream_conters.captured, expected_min_fps)
+ << stream_key.ToString();
+ EXPECT_GE(stream_conters.pre_encoded, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.encoded, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.received, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.decoded, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.rendered, 1) << stream_key.ToString();
}
}
};
@@ -148,7 +150,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) {
RunTest(
"smoke", run_params,
[](PeerConfigurer* alice) {
- VideoConfig video(640, 360, 30);
+ VideoConfig video(160, 120, 15);
video.stream_label = "alice-video";
video.sync_group = "alice-media";
alice->AddVideoConfig(std::move(video));
@@ -164,23 +166,11 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) {
},
[](PeerConfigurer* charlie) {
charlie->SetName("charlie");
- VideoConfig video(640, 360, 30);
+ VideoConfig video(160, 120, 15);
video.stream_label = "charlie-video";
video.temporal_layers_count = 2;
charlie->AddVideoConfig(std::move(video));
- VideoConfig screenshare(640, 360, 30);
- screenshare.stream_label = "charlie-screenshare";
- screenshare.content_hint = VideoTrackInterface::ContentHint::kText;
- ScreenShareConfig screen_share_config =
- ScreenShareConfig(TimeDelta::Seconds(2));
- screen_share_config.scrolling_params = ScrollingParams(
- TimeDelta::Millis(1800), kDefaultSlidesWidth, kDefaultSlidesHeight);
- auto screen_share_frame_generator =
- CreateScreenShareFrameGenerator(screenshare, screen_share_config);
- charlie->AddVideoConfig(std::move(screenshare),
- std::move(screen_share_frame_generator));
-
AudioConfig audio;
audio.stream_label = "charlie-audio";
audio.mode = AudioConfig::Mode::kFile;
@@ -192,6 +182,35 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) {
// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_Screenshare DISABLED_Screenshare
+#else
+#define MAYBE_Screenshare Screenshare
+#endif
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Screenshare) {
+ RunParams run_params(TimeDelta::Seconds(2));
+ test::ScopedFieldTrials field_trials(
+ std::string(field_trial::GetFieldTrialString()) +
+ "WebRTC-UseStandardBytesStats/Enabled/");
+ RunTest(
+ "screenshare", run_params,
+ [](PeerConfigurer* alice) {
+ VideoConfig screenshare(320, 180, 30);
+ screenshare.stream_label = "alice-screenshare";
+ screenshare.content_hint = VideoTrackInterface::ContentHint::kText;
+ ScreenShareConfig screen_share_config =
+ ScreenShareConfig(TimeDelta::Seconds(2));
+ screen_share_config.scrolling_params = ScrollingParams(
+ TimeDelta::Millis(1800), kDefaultSlidesWidth, kDefaultSlidesHeight);
+ auto screen_share_frame_generator =
+ CreateScreenShareFrameGenerator(screenshare, screen_share_config);
+ alice->AddVideoConfig(std::move(screenshare),
+ std::move(screen_share_frame_generator));
+ },
+ [](PeerConfigurer* charlie) {});
+}
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
#define MAYBE_Echo DISABLED_Echo
#else
#define MAYBE_Echo Echo
@@ -232,9 +251,9 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Simulcast) {
RunTest(
"simulcast", run_params,
[](PeerConfigurer* alice) {
- VideoConfig simulcast(1280, 720, 30);
+ VideoConfig simulcast(1280, 720, 15);
simulcast.stream_label = "alice-simulcast";
- simulcast.simulcast_config = VideoSimulcastConfig(3, 0);
+ simulcast.simulcast_config = VideoSimulcastConfig(2, 0);
alice->AddVideoConfig(std::move(simulcast));
AudioConfig audio;
@@ -244,18 +263,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Simulcast) {
test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
alice->SetAudioConfig(std::move(audio));
},
- [](PeerConfigurer* bob) {
- VideoConfig video(640, 360, 30);
- video.stream_label = "bob-video";
- bob->AddVideoConfig(std::move(video));
-
- AudioConfig audio;
- audio.stream_label = "bob-audio";
- audio.mode = AudioConfig::Mode::kFile;
- audio.input_file_name =
- test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
- bob->SetAudioConfig(std::move(audio));
- });
+ [](PeerConfigurer* bob) {});
}
// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
@@ -270,11 +278,11 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Svc) {
RunTest(
"simulcast", run_params,
[](PeerConfigurer* alice) {
- VideoConfig simulcast(1280, 720, 30);
+ VideoConfig simulcast(1280, 720, 15);
simulcast.stream_label = "alice-svc";
// Because we have network with packets loss we can analyze only the
// highest spatial layer in SVC mode.
- simulcast.simulcast_config = VideoSimulcastConfig(3, 2);
+ simulcast.simulcast_config = VideoSimulcastConfig(2, 1);
alice->AddVideoConfig(std::move(simulcast));
AudioConfig audio;
@@ -284,18 +292,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Svc) {
test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
alice->SetAudioConfig(std::move(audio));
},
- [](PeerConfigurer* bob) {
- VideoConfig video(640, 360, 30);
- video.stream_label = "bob-video";
- bob->AddVideoConfig(std::move(video));
-
- AudioConfig audio;
- audio.stream_label = "bob-audio";
- audio.mode = AudioConfig::Mode::kFile;
- audio.input_file_name =
- test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
- bob->SetAudioConfig(std::move(audio));
- });
+ [](PeerConfigurer* bob) {});
}
// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
@@ -312,11 +309,11 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_HighBitrate) {
RunTest(
"smoke", run_params,
[](PeerConfigurer* alice) {
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.current_bitrate_bps = 3'000'000;
- bitrate_params.max_bitrate_bps = 3'000'000;
- alice->SetBitrateParameters(bitrate_params);
- VideoConfig video(800, 600, 30);
+ BitrateSettings bitrate_settings;
+ bitrate_settings.start_bitrate_bps = 3'000'000;
+ bitrate_settings.max_bitrate_bps = 3'000'000;
+ alice->SetBitrateSettings(bitrate_settings);
+ VideoConfig video(800, 600, 15);
video.stream_label = "alice-video";
video.min_encode_bitrate_bps = 500'000;
video.max_encode_bitrate_bps = 3'000'000;
@@ -330,24 +327,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_HighBitrate) {
audio.sampling_frequency_in_hz = 48000;
alice->SetAudioConfig(std::move(audio));
},
- [](PeerConfigurer* bob) {
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.current_bitrate_bps = 3'000'000;
- bitrate_params.max_bitrate_bps = 3'000'000;
- bob->SetBitrateParameters(bitrate_params);
- VideoConfig video(800, 600, 30);
- video.stream_label = "bob-video";
- video.min_encode_bitrate_bps = 500'000;
- video.max_encode_bitrate_bps = 3'000'000;
- bob->AddVideoConfig(std::move(video));
-
- AudioConfig audio;
- audio.stream_label = "bob-audio";
- audio.mode = AudioConfig::Mode::kFile;
- audio.input_file_name =
- test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
- bob->SetAudioConfig(std::move(audio));
- });
+ [](PeerConfigurer* bob) {});
}
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/peer_connection_quality_test.cc b/test/pc/e2e/peer_connection_quality_test.cc
index 9d79b0e957..ee21b05012 100644
--- a/test/pc/e2e/peer_connection_quality_test.cc
+++ b/test/pc/e2e/peer_connection_quality_test.cc
@@ -14,6 +14,7 @@
#include <set>
#include <utility>
+#include "absl/strings/string_view.h"
#include "api/jsep.h"
#include "api/media_stream_interface.h"
#include "api/peer_connection_interface.h"
@@ -21,6 +22,7 @@
#include "api/rtc_event_log_output_file.h"
#include "api/scoped_refptr.h"
#include "api/task_queue/default_task_queue_factory.h"
+#include "api/test/time_controller.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "pc/sdp_utils.h"
#include "pc/test/mock_peer_connection_observers.h"
@@ -32,6 +34,7 @@
#include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
#include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h"
+#include "test/pc/e2e/cross_media_metrics_reporter.h"
#include "test/pc/e2e/stats_poller.h"
#include "test/pc/e2e/test_peer_factory.h"
#include "test/testsupport/file_utils.h"
@@ -44,7 +47,7 @@ namespace {
using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig;
using VideoCodecConfig = PeerConnectionE2EQualityTestFixture::VideoCodecConfig;
-constexpr int kDefaultTimeoutMs = 10000;
+constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(10);
constexpr char kSignalThreadName[] = "signaling_thread";
// 1 signaling, 2 network, 2 worker and 2 extra for codecs etc.
constexpr int kPeerConnectionUsedThreads = 7;
@@ -57,7 +60,7 @@ constexpr TimeDelta kStatsUpdateInterval = TimeDelta::Seconds(1);
constexpr TimeDelta kAliveMessageLogInterval = TimeDelta::Seconds(30);
-constexpr int kQuickTestModeRunDurationMs = 100;
+constexpr TimeDelta kQuickTestModeRunDuration = TimeDelta::Millis(100);
// Field trials to enable Flex FEC advertising and receiving.
constexpr char kFlexFecEnabledFieldTrials[] =
@@ -102,17 +105,20 @@ class FixturePeerConnectionObserver : public MockPeerConnectionObserver {
PeerConnectionE2EQualityTest::PeerConnectionE2EQualityTest(
std::string test_case_name,
+ TimeController& time_controller,
std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer)
- : clock_(Clock::GetRealTimeClock()),
+ : time_controller_(time_controller),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
test_case_name_(std::move(test_case_name)),
- executor_(std::make_unique<TestActivitiesExecutor>(clock_)) {
+ executor_(std::make_unique<TestActivitiesExecutor>(
+ time_controller_.GetClock())) {
// Create default video quality analyzer. We will always create an analyzer,
// even if there are no video streams, because it will be installed into video
// encoder/decoder factories.
if (video_quality_analyzer == nullptr) {
- video_quality_analyzer = std::make_unique<DefaultVideoQualityAnalyzer>();
+ video_quality_analyzer = std::make_unique<DefaultVideoQualityAnalyzer>(
+ time_controller_.GetClock());
}
encoded_image_id_controller_ =
std::make_unique<SingleProcessEncodedImageDataInjector>();
@@ -187,15 +193,16 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) {
<< "; audio="
<< bob_configurer->params()->audio_config.has_value();
- const std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
- signaling_thread->SetName(kSignalThreadName, nullptr);
- signaling_thread->Start();
+ const std::unique_ptr<rtc::Thread> signaling_thread =
+ time_controller_.CreateThread(kSignalThreadName);
media_helper_ = std::make_unique<MediaHelper>(
video_quality_analyzer_injection_helper_.get(),
task_queue_factory_.get());
// Create a |task_queue_|.
- task_queue_ = std::make_unique<TaskQueueForTest>("pc_e2e_quality_test");
+ task_queue_ = std::make_unique<webrtc::TaskQueueForTest>(
+ time_controller_.GetTaskQueueFactory()->CreateTaskQueue(
+ "pc_e2e_quality_test", webrtc::TaskQueueFactory::Priority::NORMAL));
// Create call participants: Alice and Bob.
// Audio streams are intercepted in AudioDeviceModule, so if it is required to
@@ -205,34 +212,38 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) {
RemotePeerAudioConfig::Create(bob_configurer->params()->audio_config);
absl::optional<RemotePeerAudioConfig> bob_remote_audio_config =
RemotePeerAudioConfig::Create(alice_configurer->params()->audio_config);
- // Copy Alice and Bob video configs to correctly pass them into lambdas.
+ // Copy Alice and Bob video configs and names to correctly pass them into
+ // lambdas.
std::vector<VideoConfig> alice_video_configs =
alice_configurer->params()->video_configs;
+ std::string alice_name = alice_configurer->params()->name.value();
std::vector<VideoConfig> bob_video_configs =
bob_configurer->params()->video_configs;
+ std::string bob_name = bob_configurer->params()->name.value();
- alice_ = TestPeerFactory::CreateTestPeer(
+ TestPeerFactory test_peer_factory(
+ signaling_thread.get(), time_controller_,
+ video_quality_analyzer_injection_helper_.get(), task_queue_.get());
+ alice_ = test_peer_factory.CreateTestPeer(
std::move(alice_configurer),
std::make_unique<FixturePeerConnectionObserver>(
- [this, bob_video_configs](
+ [this, bob_video_configs, alice_name](
rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
- OnTrackCallback(transceiver, bob_video_configs);
+ OnTrackCallback(alice_name, transceiver, bob_video_configs);
},
[this]() { StartVideo(alice_video_sources_); }),
- video_quality_analyzer_injection_helper_.get(), signaling_thread.get(),
alice_remote_audio_config, run_params.video_encoder_bitrate_multiplier,
- run_params.echo_emulation_config, task_queue_.get());
- bob_ = TestPeerFactory::CreateTestPeer(
+ run_params.echo_emulation_config);
+ bob_ = test_peer_factory.CreateTestPeer(
std::move(bob_configurer),
std::make_unique<FixturePeerConnectionObserver>(
- [this, alice_video_configs](
- rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
- OnTrackCallback(transceiver, alice_video_configs);
+ [this, alice_video_configs,
+ bob_name](rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+ OnTrackCallback(bob_name, transceiver, alice_video_configs);
},
[this]() { StartVideo(bob_video_sources_); }),
- video_quality_analyzer_injection_helper_.get(), signaling_thread.get(),
bob_remote_audio_config, run_params.video_encoder_bitrate_multiplier,
- run_params.echo_emulation_config, task_queue_.get());
+ run_params.echo_emulation_config);
int num_cores = CpuInfo::DetectNumberOfCores();
RTC_DCHECK_GE(num_cores, 1);
@@ -246,13 +257,19 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) {
std::min(video_analyzer_threads, kMaxVideoAnalyzerThreads);
RTC_LOG(INFO) << "video_analyzer_threads=" << video_analyzer_threads;
quality_metrics_reporters_.push_back(
- std::make_unique<VideoQualityMetricsReporter>());
+ std::make_unique<VideoQualityMetricsReporter>(
+ time_controller_.GetClock()));
+ quality_metrics_reporters_.push_back(
+ std::make_unique<CrossMediaMetricsReporter>());
- video_quality_analyzer_injection_helper_->Start(test_case_name_,
- video_analyzer_threads);
+ video_quality_analyzer_injection_helper_->Start(
+ test_case_name_,
+ std::vector<std::string>{alice_->params()->name.value(),
+ bob_->params()->name.value()},
+ video_analyzer_threads);
audio_quality_analyzer_->Start(test_case_name_, &analyzer_helper_);
for (auto& reporter : quality_metrics_reporters_) {
- reporter->Start(test_case_name_);
+ reporter->Start(test_case_name_, &analyzer_helper_);
}
// Start RTCEventLog recording if requested.
@@ -299,15 +316,29 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) {
RTC_FROM_HERE,
rtc::Bind(&PeerConnectionE2EQualityTest::SetupCallOnSignalingThread, this,
run_params));
+ std::unique_ptr<SignalingInterceptor> signaling_interceptor =
+ CreateSignalingInterceptor(run_params);
+ // Connect peers.
+ signaling_thread->Invoke<void>(
+ RTC_FROM_HERE,
+ rtc::Bind(&PeerConnectionE2EQualityTest::ExchangeOfferAnswer, this,
+ signaling_interceptor.get()));
+ WaitUntilIceCandidatesGathered(signaling_thread.get());
+
+ signaling_thread->Invoke<void>(
+ RTC_FROM_HERE,
+ rtc::Bind(&PeerConnectionE2EQualityTest::ExchangeIceCandidates, this,
+ signaling_interceptor.get()));
+ WaitUntilPeersAreConnected(signaling_thread.get());
+
executor_->Start(task_queue_.get());
Timestamp start_time = Now();
- rtc::Event done;
bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest");
if (is_quick_test_enabled) {
- done.Wait(kQuickTestModeRunDurationMs);
+ time_controller_.AdvanceTime(kQuickTestModeRunDuration);
} else {
- done.Wait(run_params.run_duration.ms());
+ time_controller_.AdvanceTime(run_params.run_duration);
}
RTC_LOG(INFO) << "Test is done, initiating disconnect sequence.";
@@ -336,7 +367,7 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) {
Timestamp end_time = Now();
RTC_LOG(INFO) << "All peers are disconnected.";
{
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
real_test_duration_ = end_time - start_time;
}
@@ -371,14 +402,17 @@ void PeerConnectionE2EQualityTest::SetupRequiredFieldTrials(
}
void PeerConnectionE2EQualityTest::OnTrackCallback(
+ absl::string_view peer_name,
rtc::scoped_refptr<RtpTransceiverInterface> transceiver,
std::vector<VideoConfig> remote_video_configs) {
const rtc::scoped_refptr<MediaStreamTrackInterface>& track =
transceiver->receiver()->track();
RTC_CHECK_EQ(transceiver->receiver()->stream_ids().size(), 2)
<< "Expected 2 stream ids: 1st - sync group, 2nd - unique stream label";
+ std::string sync_group = transceiver->receiver()->stream_ids()[0];
std::string stream_label = transceiver->receiver()->stream_ids()[1];
- analyzer_helper_.AddTrackToStreamMapping(track->id(), stream_label);
+ analyzer_helper_.AddTrackToStreamMapping(track->id(), stream_label,
+ sync_group);
if (track->kind() != MediaStreamTrackInterface::kVideoKind) {
return;
}
@@ -387,7 +421,7 @@ void PeerConnectionE2EQualityTest::OnTrackCallback(
// track->kind() is kVideoKind.
auto* video_track = static_cast<VideoTrackInterface*>(track.get());
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> video_sink =
- video_quality_analyzer_injection_helper_->CreateVideoSink();
+ video_quality_analyzer_injection_helper_->CreateVideoSink(peer_name);
video_track->AddOrUpdateSink(video_sink.get(), rtc::VideoSinkWants());
output_video_sinks_.push_back(std::move(video_sink));
}
@@ -419,10 +453,14 @@ void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread(
// Because simulcast enabled |run_params.video_codecs| has only 1 element.
if (run_params.video_codecs[0].name == cricket::kVp8CodecName) {
// For Vp8 simulcast we need to add as many RtpEncodingParameters to the
- // track as many simulcast streams requested.
+ // track as many simulcast streams requested. If they specified in
+ // |video_config.simulcast_config| it should be copied from there.
for (int i = 0;
i < video_config.simulcast_config->simulcast_streams_count; ++i) {
RtpEncodingParameters enc_params;
+ if (video_config.simulcast_config->encoding_params.size() > 0) {
+ enc_params = video_config.simulcast_config->encoding_params[i];
+ }
// We need to be sure, that all rids will be unique with all mids.
enc_params.rid = std::to_string(alice_transceivers_counter) + "000" +
std::to_string(i);
@@ -465,8 +503,6 @@ void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread(
SetPeerCodecPreferences(alice_.get(), run_params);
SetPeerCodecPreferences(bob_.get(), run_params);
-
- SetupCall(run_params);
}
void PeerConnectionE2EQualityTest::TearDownCallOnSignalingThread() {
@@ -509,7 +545,9 @@ void PeerConnectionE2EQualityTest::SetPeerCodecPreferences(
}
}
-void PeerConnectionE2EQualityTest::SetupCall(const RunParams& run_params) {
+std::unique_ptr<SignalingInterceptor>
+PeerConnectionE2EQualityTest::CreateSignalingInterceptor(
+ const RunParams& run_params) {
std::map<std::string, int> stream_label_to_simulcast_streams_count;
// We add only Alice here, because simulcast/svc is supported only from the
// first peer.
@@ -523,21 +561,35 @@ void PeerConnectionE2EQualityTest::SetupCall(const RunParams& run_params) {
PatchingParams patching_params(run_params.video_codecs,
run_params.use_conference_mode,
stream_label_to_simulcast_streams_count);
- SignalingInterceptor signaling_interceptor(patching_params);
- // Connect peers.
- ExchangeOfferAnswer(&signaling_interceptor);
- // Do the SDP negotiation, and also exchange ice candidates.
- ASSERT_EQ_WAIT(alice_->signaling_state(), PeerConnectionInterface::kStable,
- kDefaultTimeoutMs);
- ASSERT_TRUE_WAIT(alice_->IsIceGatheringDone(), kDefaultTimeoutMs);
- ASSERT_TRUE_WAIT(bob_->IsIceGatheringDone(), kDefaultTimeoutMs);
-
- ExchangeIceCandidates(&signaling_interceptor);
+ return std::make_unique<SignalingInterceptor>(patching_params);
+}
+
+void PeerConnectionE2EQualityTest::WaitUntilIceCandidatesGathered(
+ rtc::Thread* signaling_thread) {
+ ASSERT_TRUE(time_controller_.Wait(
+ [&]() {
+ return signaling_thread->Invoke<bool>(RTC_FROM_HERE, [&]() {
+ return alice_->IsIceGatheringDone() && bob_->IsIceGatheringDone();
+ });
+ },
+ 2 * kDefaultTimeout));
+}
+
+void PeerConnectionE2EQualityTest::WaitUntilPeersAreConnected(
+ rtc::Thread* signaling_thread) {
// This means that ICE and DTLS are connected.
- WAIT(bob_->IsIceConnected(), kDefaultTimeoutMs);
- bob_connected_ = bob_->IsIceConnected();
- WAIT(alice_->IsIceConnected(), kDefaultTimeoutMs);
- alice_connected_ = alice_->IsIceConnected();
+ alice_connected_ = time_controller_.Wait(
+ [&]() {
+ return signaling_thread->Invoke<bool>(
+ RTC_FROM_HERE, [&]() { return alice_->IsIceConnected(); });
+ },
+ kDefaultTimeout);
+ bob_connected_ = time_controller_.Wait(
+ [&]() {
+ return signaling_thread->Invoke<bool>(
+ RTC_FROM_HERE, [&]() { return bob_->IsIceConnected(); });
+ },
+ kDefaultTimeout);
}
void PeerConnectionE2EQualityTest::ExchangeOfferAnswer(
@@ -645,7 +697,7 @@ void PeerConnectionE2EQualityTest::ReportGeneralTestResults() {
}
Timestamp PeerConnectionE2EQualityTest::Now() const {
- return clock_->CurrentTime();
+ return time_controller_.GetClock()->CurrentTime();
}
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/peer_connection_quality_test.h b/test/pc/e2e/peer_connection_quality_test.h
index bbc3d8a1f2..9ce19a80e4 100644
--- a/test/pc/e2e/peer_connection_quality_test.h
+++ b/test/pc/e2e/peer_connection_quality_test.h
@@ -15,11 +15,14 @@
#include <string>
#include <vector>
+#include "absl/strings/string_view.h"
#include "api/task_queue/task_queue_factory.h"
#include "api/test/audio_quality_analyzer_interface.h"
#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/time_controller.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_annotations.h"
@@ -41,8 +44,6 @@ namespace webrtc_pc_e2e {
class PeerConnectionE2EQualityTest
: public PeerConnectionE2EQualityTestFixture {
public:
- using VideoGeneratorType =
- PeerConnectionE2EQualityTestFixture::VideoGeneratorType;
using RunParams = PeerConnectionE2EQualityTestFixture::RunParams;
using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig;
using VideoSimulcastConfig =
@@ -53,6 +54,7 @@ class PeerConnectionE2EQualityTest
PeerConnectionE2EQualityTest(
std::string test_case_name,
+ TimeController& time_controller,
std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer);
@@ -73,7 +75,7 @@ class PeerConnectionE2EQualityTest
void Run(RunParams run_params) override;
TimeDelta GetRealTestDuration() const override {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
RTC_CHECK_NE(real_test_duration_, TimeDelta::Zero());
return real_test_duration_;
}
@@ -82,13 +84,17 @@ class PeerConnectionE2EQualityTest
// For some functionality some field trials have to be enabled, so we will
// enable them here.
void SetupRequiredFieldTrials(const RunParams& run_params);
- void OnTrackCallback(rtc::scoped_refptr<RtpTransceiverInterface> transceiver,
+ void OnTrackCallback(absl::string_view peer_name,
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver,
std::vector<VideoConfig> remote_video_configs);
// Have to be run on the signaling thread.
void SetupCallOnSignalingThread(const RunParams& run_params);
void TearDownCallOnSignalingThread();
void SetPeerCodecPreferences(TestPeer* peer, const RunParams& run_params);
- void SetupCall(const RunParams& run_params);
+ std::unique_ptr<SignalingInterceptor> CreateSignalingInterceptor(
+ const RunParams& run_params);
+ void WaitUntilIceCandidatesGathered(rtc::Thread* signaling_thread);
+ void WaitUntilPeersAreConnected(rtc::Thread* signaling_thread);
void ExchangeOfferAnswer(SignalingInterceptor* signaling_interceptor);
void ExchangeIceCandidates(SignalingInterceptor* signaling_interceptor);
void StartVideo(
@@ -98,7 +104,7 @@ class PeerConnectionE2EQualityTest
void ReportGeneralTestResults();
Timestamp Now() const;
- Clock* const clock_;
+ TimeController& time_controller_;
const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
std::string test_case_name_;
std::unique_ptr<VideoQualityAnalyzerInjectionHelper>
@@ -126,7 +132,7 @@ class PeerConnectionE2EQualityTest
output_video_sinks_;
AnalyzerHelper analyzer_helper_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
TimeDelta real_test_duration_ RTC_GUARDED_BY(lock_) = TimeDelta::Zero();
// Task queue, that is used for running activities during test call.
diff --git a/test/pc/e2e/peer_connection_quality_test_params.h b/test/pc/e2e/peer_connection_quality_test_params.h
index ccb53492c3..edefc7a008 100644
--- a/test/pc/e2e/peer_connection_quality_test_params.h
+++ b/test/pc/e2e/peer_connection_quality_test_params.h
@@ -20,7 +20,6 @@
#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
#include "api/task_queue/task_queue_factory.h"
#include "api/test/peerconnection_quality_test_fixture.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
@@ -47,7 +46,6 @@ struct PeerConnectionFactoryComponents {
std::unique_ptr<RtcEventLogFactoryInterface> event_log_factory;
std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory;
std::unique_ptr<NetworkControllerFactoryInterface> network_controller_factory;
- std::unique_ptr<MediaTransportFactory> media_transport_factory;
std::unique_ptr<NetEqFactory> neteq_factory;
// Will be passed to MediaEngineInterface, that will be used in
@@ -114,7 +112,7 @@ struct Params {
absl::optional<std::string> aec_dump_path;
PeerConnectionInterface::RTCConfiguration rtc_configuration;
- PeerConnectionInterface::BitrateParameters bitrate_params;
+ BitrateSettings bitrate_settings;
};
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/stats_poller.cc b/test/pc/e2e/stats_poller.cc
index 987f26e7e8..e6973e6af1 100644
--- a/test/pc/e2e/stats_poller.cc
+++ b/test/pc/e2e/stats_poller.cc
@@ -18,14 +18,13 @@ namespace webrtc {
namespace webrtc_pc_e2e {
void InternalStatsObserver::PollStats() {
- peer_->pc()->GetStats(this, nullptr,
- webrtc::PeerConnectionInterface::StatsOutputLevel::
- kStatsOutputLevelStandard);
+ peer_->pc()->GetStats(this);
}
-void InternalStatsObserver::OnComplete(const StatsReports& reports) {
+void InternalStatsObserver::OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
for (auto* observer : observers_) {
- observer->OnStatsReports(pc_label_, reports);
+ observer->OnStatsReports(pc_label_, report);
}
}
diff --git a/test/pc/e2e/stats_poller.h b/test/pc/e2e/stats_poller.h
index 3d0c2d6801..157a147834 100644
--- a/test/pc/e2e/stats_poller.h
+++ b/test/pc/e2e/stats_poller.h
@@ -17,6 +17,7 @@
#include <vector>
#include "api/peer_connection_interface.h"
+#include "api/stats/rtc_stats_collector_callback.h"
#include "api/test/stats_observer_interface.h"
#include "test/pc/e2e/test_peer.h"
@@ -25,7 +26,7 @@ namespace webrtc_pc_e2e {
// Helper class that will notify all the webrtc::test::StatsObserverInterface
// objects subscribed.
-class InternalStatsObserver : public StatsObserver {
+class InternalStatsObserver : public RTCStatsCollectorCallback {
public:
InternalStatsObserver(std::string pc_label,
TestPeer* peer,
@@ -36,7 +37,8 @@ class InternalStatsObserver : public StatsObserver {
void PollStats();
- void OnComplete(const StatsReports& reports) override;
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
private:
std::string pc_label_;
diff --git a/test/pc/e2e/test_activities_executor.cc b/test/pc/e2e/test_activities_executor.cc
index 4ace6ae7d8..ded39920f2 100644
--- a/test/pc/e2e/test_activities_executor.cc
+++ b/test/pc/e2e/test_activities_executor.cc
@@ -24,7 +24,7 @@ namespace webrtc_pc_e2e {
void TestActivitiesExecutor::Start(TaskQueueForTest* task_queue) {
RTC_DCHECK(task_queue);
task_queue_ = task_queue;
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
start_time_ = Now();
while (!scheduled_activities_.empty()) {
PostActivity(std::move(scheduled_activities_.front()));
@@ -39,7 +39,7 @@ void TestActivitiesExecutor::Stop() {
}
task_queue_->SendTask(
[this]() {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
for (auto& handle : repeating_task_handles_) {
handle.Stop();
}
@@ -56,7 +56,7 @@ void TestActivitiesExecutor::ScheduleActivity(
initial_delay_since_start >= TimeDelta::Zero());
RTC_CHECK(!interval ||
(interval->IsFinite() && *interval > TimeDelta::Zero()));
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
ScheduledActivity activity(initial_delay_since_start, interval, func);
if (start_time_.IsInfinite()) {
scheduled_activities_.push(std::move(activity));
diff --git a/test/pc/e2e/test_activities_executor.h b/test/pc/e2e/test_activities_executor.h
index 09bfe4167f..94e73d1e5f 100644
--- a/test/pc/e2e/test_activities_executor.h
+++ b/test/pc/e2e/test_activities_executor.h
@@ -17,7 +17,7 @@
#include "absl/types/optional.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "system_wrappers/include/clock.h"
@@ -63,7 +63,7 @@ class TestActivitiesExecutor {
TaskQueueForTest* task_queue_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
// Time when test was started. Minus infinity means that it wasn't started
// yet.
Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity();
diff --git a/test/pc/e2e/test_peer.cc b/test/pc/e2e/test_peer.cc
index a95cd8db5a..65d3eb36b8 100644
--- a/test/pc/e2e/test_peer.cc
+++ b/test/pc/e2e/test_peer.cc
@@ -42,14 +42,15 @@ TestPeer::TestPeer(
rtc::scoped_refptr<PeerConnectionInterface> pc,
std::unique_ptr<MockPeerConnectionObserver> observer,
std::unique_ptr<Params> params,
- std::vector<std::unique_ptr<test::FrameGeneratorInterface>>
- video_generators,
- rtc::scoped_refptr<AudioProcessing> audio_processing)
- : PeerConnectionWrapper::PeerConnectionWrapper(std::move(pc_factory),
- std::move(pc),
- std::move(observer)),
+ std::vector<PeerConfigurerImpl::VideoSource> video_sources,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
+ std::unique_ptr<rtc::Thread> worker_thread)
+ : worker_thread_(std::move(worker_thread)),
+ wrapper_(std::make_unique<PeerConnectionWrapper>(std::move(pc_factory),
+ std::move(pc),
+ std::move(observer))),
params_(std::move(params)),
- video_generators_(std::move(video_generators)),
+ video_sources_(std::move(video_sources)),
audio_processing_(audio_processing) {}
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/test_peer.h b/test/pc/e2e/test_peer.h
index ae664f4810..4310cbda1c 100644
--- a/test/pc/e2e/test_peer.h
+++ b/test/pc/e2e/test_peer.h
@@ -15,22 +15,75 @@
#include <vector>
#include "absl/memory/memory.h"
+#include "absl/types/variant.h"
#include "api/test/frame_generator_interface.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
#include "pc/peer_connection_wrapper.h"
+#include "test/pc/e2e/peer_configurer.h"
#include "test/pc/e2e/peer_connection_quality_test_params.h"
namespace webrtc {
namespace webrtc_pc_e2e {
// Describes a single participant in the call.
-class TestPeer final : public PeerConnectionWrapper {
+class TestPeer final {
public:
- using PeerConnectionWrapper::PeerConnectionWrapper;
-
Params* params() const { return params_.get(); }
- std::unique_ptr<test::FrameGeneratorInterface> ReleaseVideoGenerator(
- size_t i) {
- return std::move(video_generators_[i]);
+ PeerConfigurerImpl::VideoSource ReleaseVideoSource(size_t i) {
+ return std::move(video_sources_[i]);
+ }
+
+ PeerConnectionFactoryInterface* pc_factory() {
+ return wrapper_->pc_factory();
+ }
+ PeerConnectionInterface* pc() { return wrapper_->pc(); }
+ MockPeerConnectionObserver* observer() { return wrapper_->observer(); }
+
+ std::unique_ptr<SessionDescriptionInterface> CreateOffer() {
+ return wrapper_->CreateOffer();
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswer() {
+ return wrapper_->CreateAnswer();
+ }
+
+ bool SetLocalDescription(std::unique_ptr<SessionDescriptionInterface> desc,
+ std::string* error_out = nullptr) {
+ return wrapper_->SetLocalDescription(std::move(desc), error_out);
+ }
+
+ bool SetRemoteDescription(std::unique_ptr<SessionDescriptionInterface> desc,
+ std::string* error_out = nullptr) {
+ return wrapper_->SetRemoteDescription(std::move(desc), error_out);
+ }
+
+ rtc::scoped_refptr<RtpTransceiverInterface> AddTransceiver(
+ cricket::MediaType media_type,
+ const RtpTransceiverInit& init) {
+ return wrapper_->AddTransceiver(media_type, init);
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids = {}) {
+ return wrapper_->AddTrack(track, stream_ids);
+ }
+
+ rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ const std::string& label) {
+ return wrapper_->CreateDataChannel(label);
+ }
+
+ PeerConnectionInterface::SignalingState signaling_state() {
+ return wrapper_->signaling_state();
+ }
+
+ bool IsIceGatheringDone() { return wrapper_->IsIceGatheringDone(); }
+
+ bool IsIceConnected() { return wrapper_->IsIceConnected(); }
+
+ rtc::scoped_refptr<const RTCStatsReport> GetStats() {
+ return wrapper_->GetStats();
}
void DetachAecDump() {
@@ -49,13 +102,16 @@ class TestPeer final : public PeerConnectionWrapper {
rtc::scoped_refptr<PeerConnectionInterface> pc,
std::unique_ptr<MockPeerConnectionObserver> observer,
std::unique_ptr<Params> params,
- std::vector<std::unique_ptr<test::FrameGeneratorInterface>>
- video_generators,
- rtc::scoped_refptr<AudioProcessing> audio_processing);
+ std::vector<PeerConfigurerImpl::VideoSource> video_sources,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
+ std::unique_ptr<rtc::Thread> worker_thread);
private:
+ // Keeps ownership of worker thread. It has to be destroyed after |wrapper_|.
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ std::unique_ptr<PeerConnectionWrapper> wrapper_;
std::unique_ptr<Params> params_;
- std::vector<std::unique_ptr<test::FrameGeneratorInterface>> video_generators_;
+ std::vector<PeerConfigurerImpl::VideoSource> video_sources_;
rtc::scoped_refptr<AudioProcessing> audio_processing_;
std::vector<std::unique_ptr<IceCandidateInterface>> remote_ice_candidates_;
diff --git a/test/pc/e2e/test_peer_factory.cc b/test/pc/e2e/test_peer_factory.cc
index 009c446a90..634a37e95b 100644
--- a/test/pc/e2e/test_peer_factory.cc
+++ b/test/pc/e2e/test_peer_factory.cc
@@ -12,7 +12,10 @@
#include <utility>
#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
#include "api/task_queue/default_task_queue_factory.h"
+#include "api/test/create_time_controller.h"
+#include "api/test/time_controller.h"
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h"
#include "media/engine/webrtc_media_engine.h"
@@ -42,17 +45,19 @@ constexpr int kDefaultSamplingFrequencyInHz = 48000;
// and |pc_dependencies| if they are omitted. Also setup required
// dependencies, that won't be specially provided by factory and will be just
// transferred to peer connection creation code.
-void SetMandatoryEntities(InjectableComponents* components) {
+void SetMandatoryEntities(InjectableComponents* components,
+ TimeController& time_controller) {
RTC_DCHECK(components->pcf_dependencies);
RTC_DCHECK(components->pc_dependencies);
// Setup required peer connection factory dependencies.
if (components->pcf_dependencies->task_queue_factory == nullptr) {
components->pcf_dependencies->task_queue_factory =
- CreateDefaultTaskQueueFactory();
+ time_controller.CreateTaskQueueFactory();
}
if (components->pcf_dependencies->call_factory == nullptr) {
- components->pcf_dependencies->call_factory = webrtc::CreateCallFactory();
+ components->pcf_dependencies->call_factory =
+ CreateTimeControllerBasedCallFactory(&time_controller);
}
if (components->pcf_dependencies->event_log_factory == nullptr) {
components->pcf_dependencies->event_log_factory =
@@ -172,6 +177,7 @@ std::unique_ptr<cricket::MediaEngineInterface> CreateMediaEngine(
}
void WrapVideoEncoderFactory(
+ absl::string_view peer_name,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
PeerConnectionFactoryComponents* pcf_dependencies,
@@ -184,11 +190,12 @@ void WrapVideoEncoderFactory(
}
pcf_dependencies->video_encoder_factory =
video_analyzer_helper->WrapVideoEncoderFactory(
- std::move(video_encoder_factory), bitrate_multiplier,
+ peer_name, std::move(video_encoder_factory), bitrate_multiplier,
std::move(stream_required_spatial_index));
}
void WrapVideoDecoderFactory(
+ absl::string_view peer_name,
PeerConnectionFactoryComponents* pcf_dependencies,
VideoQualityAnalyzerInjectionHelper* video_analyzer_helper) {
std::unique_ptr<VideoDecoderFactory> video_decoder_factory;
@@ -199,7 +206,7 @@ void WrapVideoDecoderFactory(
}
pcf_dependencies->video_decoder_factory =
video_analyzer_helper->WrapVideoDecoderFactory(
- std::move(video_decoder_factory));
+ peer_name, std::move(video_decoder_factory));
}
// Creates PeerConnectionFactoryDependencies objects, providing entities
@@ -208,10 +215,12 @@ PeerConnectionFactoryDependencies CreatePCFDependencies(
std::unique_ptr<PeerConnectionFactoryComponents> pcf_dependencies,
std::unique_ptr<cricket::MediaEngineInterface> media_engine,
rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
rtc::Thread* network_thread) {
PeerConnectionFactoryDependencies pcf_deps;
- pcf_deps.network_thread = network_thread;
pcf_deps.signaling_thread = signaling_thread;
+ pcf_deps.worker_thread = worker_thread;
+ pcf_deps.network_thread = network_thread;
pcf_deps.media_engine = std::move(media_engine);
pcf_deps.call_factory = std::move(pcf_dependencies->call_factory);
@@ -226,10 +235,6 @@ PeerConnectionFactoryDependencies CreatePCFDependencies(
pcf_deps.network_controller_factory =
std::move(pcf_dependencies->network_controller_factory);
}
- if (pcf_dependencies->media_transport_factory != nullptr) {
- pcf_deps.media_transport_factory =
- std::move(pcf_dependencies->media_transport_factory);
- }
if (pcf_dependencies->neteq_factory != nullptr) {
pcf_deps.neteq_factory = std::move(pcf_dependencies->neteq_factory);
}
@@ -281,21 +286,21 @@ absl::optional<RemotePeerAudioConfig> RemotePeerAudioConfig::Create(
}
std::unique_ptr<TestPeer> TestPeerFactory::CreateTestPeer(
- std::unique_ptr<InjectableComponents> components,
- std::unique_ptr<Params> params,
- std::vector<std::unique_ptr<test::FrameGeneratorInterface>>
- video_generators,
+ std::unique_ptr<PeerConfigurerImpl> configurer,
std::unique_ptr<MockPeerConnectionObserver> observer,
- VideoQualityAnalyzerInjectionHelper* video_analyzer_helper,
- rtc::Thread* signaling_thread,
absl::optional<RemotePeerAudioConfig> remote_audio_config,
double bitrate_multiplier,
- absl::optional<EchoEmulationConfig> echo_emulation_config,
- rtc::TaskQueue* task_queue) {
+ absl::optional<PeerConnectionE2EQualityTestFixture::EchoEmulationConfig>
+ echo_emulation_config) {
+ std::unique_ptr<InjectableComponents> components =
+ configurer->ReleaseComponents();
+ std::unique_ptr<Params> params = configurer->ReleaseParams();
+ std::vector<PeerConfigurerImpl::VideoSource> video_sources =
+ configurer->ReleaseVideoSources();
RTC_DCHECK(components);
RTC_DCHECK(params);
- RTC_DCHECK_EQ(params->video_configs.size(), video_generators.size());
- SetMandatoryEntities(components.get());
+ RTC_DCHECK_EQ(params->video_configs.size(), video_sources.size());
+ SetMandatoryEntities(components.get(), time_controller_);
params->rtc_configuration.sdp_semantics = SdpSemantics::kUnifiedPlan;
// Create peer connection factory.
@@ -303,24 +308,28 @@ std::unique_ptr<TestPeer> TestPeerFactory::CreateTestPeer(
webrtc::AudioProcessingBuilder().Create();
if (params->aec_dump_path && audio_processing) {
audio_processing->CreateAndAttachAecDump(*params->aec_dump_path, -1,
- task_queue);
+ task_queue_);
}
rtc::scoped_refptr<AudioDeviceModule> audio_device_module =
CreateAudioDeviceModule(
params->audio_config, remote_audio_config, echo_emulation_config,
components->pcf_dependencies->task_queue_factory.get());
WrapVideoEncoderFactory(
- bitrate_multiplier,
+ params->name.value(), bitrate_multiplier,
CalculateRequiredSpatialIndexPerStream(params->video_configs),
- components->pcf_dependencies.get(), video_analyzer_helper);
- WrapVideoDecoderFactory(components->pcf_dependencies.get(),
- video_analyzer_helper);
+ components->pcf_dependencies.get(), video_analyzer_helper_);
+ WrapVideoDecoderFactory(params->name.value(),
+ components->pcf_dependencies.get(),
+ video_analyzer_helper_);
std::unique_ptr<cricket::MediaEngineInterface> media_engine =
CreateMediaEngine(components->pcf_dependencies.get(), audio_device_module,
audio_processing);
+
+ std::unique_ptr<rtc::Thread> worker_thread =
+ time_controller_.CreateThread("worker_thread");
PeerConnectionFactoryDependencies pcf_deps = CreatePCFDependencies(
std::move(components->pcf_dependencies), std::move(media_engine),
- signaling_thread, components->network_thread);
+ signaling_thread_, worker_thread.get(), components->network_thread);
rtc::scoped_refptr<PeerConnectionFactoryInterface> peer_connection_factory =
CreateModularPeerConnectionFactory(std::move(pcf_deps));
@@ -330,28 +339,12 @@ std::unique_ptr<TestPeer> TestPeerFactory::CreateTestPeer(
rtc::scoped_refptr<PeerConnectionInterface> peer_connection =
peer_connection_factory->CreatePeerConnection(params->rtc_configuration,
std::move(pc_deps));
- peer_connection->SetBitrate(params->bitrate_params);
+ peer_connection->SetBitrate(params->bitrate_settings);
return absl::WrapUnique(new TestPeer(
peer_connection_factory, peer_connection, std::move(observer),
- std::move(params), std::move(video_generators), audio_processing));
-}
-
-std::unique_ptr<TestPeer> TestPeerFactory::CreateTestPeer(
- std::unique_ptr<PeerConfigurerImpl> configurer,
- std::unique_ptr<MockPeerConnectionObserver> observer,
- VideoQualityAnalyzerInjectionHelper* video_analyzer_helper,
- rtc::Thread* signaling_thread,
- absl::optional<RemotePeerAudioConfig> remote_audio_config,
- double bitrate_multiplier,
- absl::optional<PeerConnectionE2EQualityTestFixture::EchoEmulationConfig>
- echo_emulation_config,
- rtc::TaskQueue* task_queue) {
- return CreateTestPeer(
- configurer->ReleaseComponents(), configurer->ReleaseParams(),
- configurer->ReleaseVideoGenerators(), std::move(observer),
- video_analyzer_helper, signaling_thread, remote_audio_config,
- bitrate_multiplier, echo_emulation_config, task_queue);
+ std::move(params), std::move(video_sources), audio_processing,
+ std::move(worker_thread)));
}
} // namespace webrtc_pc_e2e
diff --git a/test/pc/e2e/test_peer_factory.h b/test/pc/e2e/test_peer_factory.h
index 8f6b56e60b..df33406270 100644
--- a/test/pc/e2e/test_peer_factory.h
+++ b/test/pc/e2e/test_peer_factory.h
@@ -19,6 +19,7 @@
#include "absl/strings/string_view.h"
#include "api/rtc_event_log/rtc_event_log_factory.h"
#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/time_controller.h"
#include "modules/audio_device/include/test_audio_device.h"
#include "rtc_base/task_queue.h"
#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h"
@@ -44,43 +45,40 @@ struct RemotePeerAudioConfig {
class TestPeerFactory {
public:
+ // Creates a test peer factory.
+ // |signaling_thread| will be used as a signaling thread for all peers created
+ // by this factory.
+ // |time_controller| will be used to create required threads, task queue
+ // factories and call factory.
+ // |video_analyzer_helper| will be used to setup video quality analysis for
+ // created peers.
+ // |task_queue| will be used for AEC dump if it is requested.
+ TestPeerFactory(rtc::Thread* signaling_thread,
+ TimeController& time_controller,
+ VideoQualityAnalyzerInjectionHelper* video_analyzer_helper,
+ rtc::TaskQueue* task_queue)
+ : signaling_thread_(signaling_thread),
+ time_controller_(time_controller),
+ video_analyzer_helper_(video_analyzer_helper),
+ task_queue_(task_queue) {}
+
// Setups all components, that should be provided to WebRTC
// PeerConnectionFactory and PeerConnection creation methods,
// also will setup dependencies, that are required for media analyzers
// injection.
- //
- // |signaling_thread| will be provided by test fixture implementation.
- // |params| - describes current peer parameters, like current peer video
- // streams and audio streams
- static std::unique_ptr<TestPeer> CreateTestPeer(
- std::unique_ptr<InjectableComponents> components,
- std::unique_ptr<Params> params,
- std::vector<std::unique_ptr<test::FrameGeneratorInterface>>
- video_generators,
- std::unique_ptr<MockPeerConnectionObserver> observer,
- VideoQualityAnalyzerInjectionHelper* video_analyzer_helper,
- rtc::Thread* signaling_thread,
- absl::optional<RemotePeerAudioConfig> remote_audio_config,
- double bitrate_multiplier,
- absl::optional<PeerConnectionE2EQualityTestFixture::EchoEmulationConfig>
- echo_emulation_config,
- rtc::TaskQueue* task_queue);
- // Setups all components, that should be provided to WebRTC
- // PeerConnectionFactory and PeerConnection creation methods,
- // also will setup dependencies, that are required for media analyzers
- // injection.
- //
- // |signaling_thread| will be provided by test fixture implementation.
- static std::unique_ptr<TestPeer> CreateTestPeer(
+ std::unique_ptr<TestPeer> CreateTestPeer(
std::unique_ptr<PeerConfigurerImpl> configurer,
std::unique_ptr<MockPeerConnectionObserver> observer,
- VideoQualityAnalyzerInjectionHelper* video_analyzer_helper,
- rtc::Thread* signaling_thread,
absl::optional<RemotePeerAudioConfig> remote_audio_config,
double bitrate_multiplier,
absl::optional<PeerConnectionE2EQualityTestFixture::EchoEmulationConfig>
- echo_emulation_config,
- rtc::TaskQueue* task_queue);
+ echo_emulation_config);
+
+ private:
+ rtc::Thread* signaling_thread_;
+ TimeController& time_controller_;
+ VideoQualityAnalyzerInjectionHelper* video_analyzer_helper_;
+ rtc::TaskQueue* task_queue_;
};
} // namespace webrtc_pc_e2e
diff --git a/test/peer_scenario/BUILD.gn b/test/peer_scenario/BUILD.gn
index d702cf539f..bdc77b70c8 100644
--- a/test/peer_scenario/BUILD.gn
+++ b/test/peer_scenario/BUILD.gn
@@ -52,6 +52,8 @@ if (rtc_include_tests) {
"../network:emulated_network",
"../scenario",
"../time_controller",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/memory",
]
diff --git a/test/rtp_header_parser.cc b/test/rtp_header_parser.cc
index 713e64d83c..45686acb4c 100644
--- a/test/rtp_header_parser.cc
+++ b/test/rtp_header_parser.cc
@@ -13,7 +13,7 @@
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -34,9 +34,8 @@ class RtpHeaderParserImpl : public RtpHeaderParser {
bool DeregisterRtpHeaderExtension(RtpExtension extension) override;
private:
- rtc::CriticalSection critical_section_;
- RtpHeaderExtensionMap rtp_header_extension_map_
- RTC_GUARDED_BY(critical_section_);
+ mutable Mutex mutex_;
+ RtpHeaderExtensionMap rtp_header_extension_map_ RTC_GUARDED_BY(mutex_);
};
std::unique_ptr<RtpHeaderParser> RtpHeaderParser::CreateForTest() {
@@ -68,7 +67,7 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet,
RtpHeaderExtensionMap map;
{
- rtc::CritScope cs(&critical_section_);
+ MutexLock lock(&mutex_);
map = rtp_header_extension_map_;
}
@@ -79,24 +78,24 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet,
return true;
}
bool RtpHeaderParserImpl::RegisterRtpHeaderExtension(RtpExtension extension) {
- rtc::CritScope cs(&critical_section_);
+ MutexLock lock(&mutex_);
return rtp_header_extension_map_.RegisterByUri(extension.id, extension.uri);
}
bool RtpHeaderParserImpl::RegisterRtpHeaderExtension(RTPExtensionType type,
uint8_t id) {
- rtc::CritScope cs(&critical_section_);
+ MutexLock lock(&mutex_);
return rtp_header_extension_map_.RegisterByType(id, type);
}
bool RtpHeaderParserImpl::DeregisterRtpHeaderExtension(RtpExtension extension) {
- rtc::CritScope cs(&critical_section_);
+ MutexLock lock(&mutex_);
return rtp_header_extension_map_.Deregister(
rtp_header_extension_map_.GetType(extension.id));
}
bool RtpHeaderParserImpl::DeregisterRtpHeaderExtension(RTPExtensionType type) {
- rtc::CritScope cs(&critical_section_);
+ MutexLock lock(&mutex_);
return rtp_header_extension_map_.Deregister(type) == 0;
}
} // namespace webrtc
diff --git a/test/rtp_rtcp_observer.h b/test/rtp_rtcp_observer.h
index 3bfa475f73..036f5cdc20 100644
--- a/test/rtp_rtcp_observer.h
+++ b/test/rtp_rtcp_observer.h
@@ -18,7 +18,6 @@
#include "api/test/simulated_network.h"
#include "call/simulated_packet_receiver.h"
#include "call/video_send_stream.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "system_wrappers/include/field_trial.h"
#include "test/direct_transport.h"
diff --git a/test/run_loop_unittest.cc b/test/run_loop_unittest.cc
index a356cc265a..160aba0716 100644
--- a/test/run_loop_unittest.cc
+++ b/test/run_loop_unittest.cc
@@ -17,7 +17,6 @@
namespace webrtc {
TEST(RunLoopTest, TaskQueueOnThread) {
- EXPECT_EQ(TaskQueueBase::Current(), nullptr);
test::RunLoop loop;
EXPECT_EQ(TaskQueueBase::Current(), loop.task_queue());
EXPECT_TRUE(loop.task_queue()->IsCurrent());
diff --git a/test/scenario/BUILD.gn b/test/scenario/BUILD.gn
index e2e5f8cef2..3ae13367cd 100644
--- a/test/scenario/BUILD.gn
+++ b/test/scenario/BUILD.gn
@@ -133,6 +133,7 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_task_queue",
"../../rtc_base:safe_minmax",
"../../rtc_base:task_queue_for_test",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/task_utils:repeating_task",
"../../system_wrappers",
@@ -141,6 +142,8 @@ if (rtc_include_tests) {
"../logging:log_writer",
"../network:emulated_network",
"../time_controller",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/memory",
diff --git a/test/scenario/call_client.cc b/test/scenario/call_client.cc
index fb888df694..0107497252 100644
--- a/test/scenario/call_client.cc
+++ b/test/scenario/call_client.cc
@@ -54,7 +54,8 @@ Call* CreateCall(TimeController* time_controller,
RtcEventLog* event_log,
CallClientConfig config,
LoggingNetworkControllerFactory* network_controller_factory,
- rtc::scoped_refptr<AudioState> audio_state) {
+ rtc::scoped_refptr<AudioState> audio_state,
+ rtc::scoped_refptr<SharedModuleThread> call_thread) {
CallConfig call_config(event_log);
call_config.bitrate_config.max_bitrate_bps =
config.transport.rates.max_rate.bps_or(-1);
@@ -67,7 +68,7 @@ Call* CreateCall(TimeController* time_controller,
call_config.audio_state = audio_state;
call_config.trials = config.field_trials;
return Call::Create(call_config, time_controller->GetClock(),
- time_controller->CreateProcessThread("CallModules"),
+ std::move(call_thread),
time_controller->CreateProcessThread("Pacer"));
}
@@ -213,9 +214,14 @@ CallClient::CallClient(
event_log_ = CreateEventLog(time_controller_->GetTaskQueueFactory(),
log_writer_factory_.get());
fake_audio_setup_ = InitAudio(time_controller_);
+ RTC_DCHECK(!module_thread_);
+ module_thread_ = SharedModuleThread::Create(
+ time_controller_->CreateProcessThread("CallThread"),
+ [this]() { module_thread_ = nullptr; });
+
call_.reset(CreateCall(time_controller_, event_log_.get(), config,
&network_controller_factory_,
- fake_audio_setup_.audio_state));
+ fake_audio_setup_.audio_state, module_thread_));
transport_ = std::make_unique<NetworkNodeTransport>(clock_, call_.get());
});
}
@@ -223,6 +229,7 @@ CallClient::CallClient(
CallClient::~CallClient() {
SendTask([&] {
call_.reset();
+ RTC_DCHECK(!module_thread_); // Should be set to null in the lambda above.
fake_audio_setup_ = {};
rtc::Event done;
event_log_->StopLogging([&done] { done.Set(); });
diff --git a/test/scenario/call_client.h b/test/scenario/call_client.h
index 33fa2765cb..80814eb1b3 100644
--- a/test/scenario/call_client.h
+++ b/test/scenario/call_client.h
@@ -157,6 +157,8 @@ class CallClient : public EmulatedNetworkReceiverInterface {
// Defined last so it's destroyed first.
TaskQueueForTest task_queue_;
+ rtc::scoped_refptr<SharedModuleThread> module_thread_;
+
const FieldTrialBasedConfig field_trials_;
};
diff --git a/test/scenario/network_node.cc b/test/scenario/network_node.cc
index aa576dcf53..702789fe73 100644
--- a/test/scenario/network_node.cc
+++ b/test/scenario/network_node.cc
@@ -86,7 +86,7 @@ bool NetworkNodeTransport::SendRtp(const uint8_t* packet,
sent_packet.info.packet_type = rtc::PacketType::kData;
sender_call_->OnSentPacket(sent_packet);
- rtc::CritScope crit(&crit_sect_);
+ MutexLock lock(&mutex_);
if (!endpoint_)
return false;
rtc::CopyOnWriteBuffer buffer(packet, length);
@@ -97,7 +97,7 @@ bool NetworkNodeTransport::SendRtp(const uint8_t* packet,
bool NetworkNodeTransport::SendRtcp(const uint8_t* packet, size_t length) {
rtc::CopyOnWriteBuffer buffer(packet, length);
- rtc::CritScope crit(&crit_sect_);
+ MutexLock lock(&mutex_);
if (!endpoint_)
return false;
endpoint_->SendPacket(local_address_, remote_address_, buffer,
@@ -121,7 +121,7 @@ void NetworkNodeTransport::Connect(EmulatedEndpoint* endpoint,
{
// Only IPv4 address is supported.
RTC_CHECK_EQ(receiver_address.family(), AF_INET);
- rtc::CritScope crit(&crit_sect_);
+ MutexLock lock(&mutex_);
endpoint_ = endpoint;
local_address_ = rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), 0);
remote_address_ = receiver_address;
@@ -134,7 +134,7 @@ void NetworkNodeTransport::Connect(EmulatedEndpoint* endpoint,
}
void NetworkNodeTransport::Disconnect() {
- rtc::CritScope crit(&crit_sect_);
+ MutexLock lock(&mutex_);
current_network_route_.connected = false;
sender_call_->GetTransportControllerSend()->OnNetworkRouteChanged(
kDummyTransportName, current_network_route_);
diff --git a/test/scenario/network_node.h b/test/scenario/network_node.h
index b3d093b84e..ea8eb35daf 100644
--- a/test/scenario/network_node.h
+++ b/test/scenario/network_node.h
@@ -22,6 +22,7 @@
#include "call/simulated_network.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue.h"
#include "test/network/network_emulation.h"
#include "test/scenario/column_printer.h"
@@ -65,19 +66,19 @@ class NetworkNodeTransport : public Transport {
void Disconnect();
DataSize packet_overhead() {
- rtc::CritScope crit(&crit_sect_);
+ MutexLock lock(&mutex_);
return packet_overhead_;
}
private:
- rtc::CriticalSection crit_sect_;
+ Mutex mutex_;
Clock* const sender_clock_;
Call* const sender_call_;
- EmulatedEndpoint* endpoint_ RTC_GUARDED_BY(crit_sect_) = nullptr;
- rtc::SocketAddress local_address_ RTC_GUARDED_BY(crit_sect_);
- rtc::SocketAddress remote_address_ RTC_GUARDED_BY(crit_sect_);
- DataSize packet_overhead_ RTC_GUARDED_BY(crit_sect_) = DataSize::Zero();
- rtc::NetworkRoute current_network_route_ RTC_GUARDED_BY(crit_sect_);
+ EmulatedEndpoint* endpoint_ RTC_GUARDED_BY(mutex_) = nullptr;
+ rtc::SocketAddress local_address_ RTC_GUARDED_BY(mutex_);
+ rtc::SocketAddress remote_address_ RTC_GUARDED_BY(mutex_);
+ DataSize packet_overhead_ RTC_GUARDED_BY(mutex_) = DataSize::Zero();
+ rtc::NetworkRoute current_network_route_ RTC_GUARDED_BY(mutex_);
};
} // namespace test
} // namespace webrtc
diff --git a/test/scenario/scenario_unittest.cc b/test/scenario/scenario_unittest.cc
index 839e6a375e..7c05ea39dd 100644
--- a/test/scenario/scenario_unittest.cc
+++ b/test/scenario/scenario_unittest.cc
@@ -11,6 +11,7 @@
#include <atomic>
+#include "test/field_trial.h"
#include "test/gtest.h"
#include "test/logging/memory_log_writer.h"
#include "test/scenario/stats_collection.h"
@@ -119,7 +120,8 @@ TEST(ScenarioTest, MAYBE_RealTimeEncoding) {
}
// Regression tests based on previous runs.
EXPECT_LT(analyzer.stats().lost_count, 2);
- EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 38, 10);
+ // This far below expected but ensures that we get something.
+ EXPECT_GT(analyzer.stats().psnr_with_freeze.Mean(), 10);
}
TEST(ScenarioTest, SimTimeFakeing) {
@@ -140,5 +142,49 @@ TEST(ScenarioTest, WritesToRtcEventLog) {
EXPECT_GE(storage.logs().at("alice.rtc.dat").size(), 1u);
}
+TEST(ScenarioTest,
+ RetransmitsVideoPacketsInAudioAndVideoCallWithSendSideBweAndLoss) {
+ // Make sure audio packets are included in transport feedback.
+ test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-SendSideBwe/Enabled/WebRTC-Audio-ABWENoTWCC/Disabled/");
+
+ Scenario s;
+ CallClientConfig call_client_config;
+ call_client_config.transport.rates.start_rate = DataRate::KilobitsPerSec(300);
+ auto* alice = s.CreateClient("alice", call_client_config);
+ auto* bob = s.CreateClient("bob", call_client_config);
+ NetworkSimulationConfig network_config;
+ // Add some loss and delay.
+ network_config.delay = TimeDelta::Millis(200);
+ network_config.loss_rate = 0.05;
+ auto alice_net = s.CreateSimulationNode(network_config);
+ auto bob_net = s.CreateSimulationNode(network_config);
+ auto route = s.CreateRoutes(alice, {alice_net}, bob, {bob_net});
+
+ // First add an audio stream, then a video stream.
+ // Needed to make sure audio RTP module is selected first when sending
+ // transport feedback message.
+ AudioStreamConfig audio_stream_config;
+ audio_stream_config.encoder.min_rate = DataRate::KilobitsPerSec(6);
+ audio_stream_config.encoder.max_rate = DataRate::KilobitsPerSec(64);
+ audio_stream_config.encoder.allocate_bitrate = true;
+ audio_stream_config.stream.in_bandwidth_estimation = true;
+ s.CreateAudioStream(route->forward(), audio_stream_config);
+ s.CreateAudioStream(route->reverse(), audio_stream_config);
+
+ VideoStreamConfig video_stream_config;
+ auto video = s.CreateVideoStream(route->forward(), video_stream_config);
+ s.CreateVideoStream(route->reverse(), video_stream_config);
+
+ // Run for 10 seconds.
+ s.RunFor(TimeDelta::Seconds(10));
+ // Make sure retransmissions have happened.
+ int retransmit_packets = 0;
+ for (const auto& substream : video->send()->GetStats().substreams) {
+ retransmit_packets += substream.second.rtp_stats.retransmitted.packets;
+ }
+ EXPECT_GT(retransmit_packets, 0);
+}
+
} // namespace test
} // namespace webrtc
diff --git a/test/scenario/video_stream.cc b/test/scenario/video_stream.cc
index 4bea740074..cad466ecfd 100644
--- a/test/scenario/video_stream.cc
+++ b/test/scenario/video_stream.cc
@@ -373,7 +373,7 @@ SendVideoStream::SendVideoStream(CallClient* sender,
case Encoder::Implementation::kFake:
encoder_factory_ =
std::make_unique<FunctionVideoEncoderFactory>([this]() {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
std::unique_ptr<FakeEncoder> encoder;
if (config_.encoder.codec == Codec::kVideoCodecVP8) {
encoder = std::make_unique<test::FakeVp8Encoder>(sender_->clock_);
@@ -452,7 +452,7 @@ void SendVideoStream::Stop() {
void SendVideoStream::UpdateConfig(
std::function<void(VideoStreamConfig*)> modifier) {
sender_->SendTask([&] {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
VideoStreamConfig prior_config = config_;
modifier(&config_);
if (prior_config.encoder.fake.max_rate != config_.encoder.fake.max_rate) {
@@ -473,7 +473,7 @@ void SendVideoStream::UpdateConfig(
void SendVideoStream::UpdateActiveLayers(std::vector<bool> active_layers) {
sender_->task_queue_.PostTask([=] {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
if (config_.encoder.codec ==
VideoStreamConfig::Encoder::Codec::kVideoCodecVP8) {
send_stream_->UpdateActiveSimulcastLayers(active_layers);
diff --git a/test/scenario/video_stream.h b/test/scenario/video_stream.h
index f0b99db57a..96b6d49f63 100644
--- a/test/scenario/video_stream.h
+++ b/test/scenario/video_stream.h
@@ -14,6 +14,7 @@
#include <vector>
#include "rtc_base/constructor_magic.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/fake_encoder.h"
#include "test/fake_videorenderer.h"
#include "test/frame_generator_capturer.h"
@@ -53,14 +54,14 @@ class SendVideoStream {
Transport* send_transport,
VideoFrameMatcher* matcher);
- rtc::CriticalSection crit_;
+ Mutex mutex_;
std::vector<uint32_t> ssrcs_;
std::vector<uint32_t> rtx_ssrcs_;
VideoSendStream* send_stream_ = nullptr;
CallClient* const sender_;
- VideoStreamConfig config_ RTC_GUARDED_BY(crit_);
+ VideoStreamConfig config_ RTC_GUARDED_BY(mutex_);
std::unique_ptr<VideoEncoderFactory> encoder_factory_;
- std::vector<test::FakeEncoder*> fake_encoders_ RTC_GUARDED_BY(crit_);
+ std::vector<test::FakeEncoder*> fake_encoders_ RTC_GUARDED_BY(mutex_);
std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
std::unique_ptr<FrameGeneratorCapturer> video_capturer_;
std::unique_ptr<ForwardingCapturedFrameTap> frame_tap_;
diff --git a/test/scenario/video_stream_unittest.cc b/test/scenario/video_stream_unittest.cc
index 1f2cad7e8c..873ef639ba 100644
--- a/test/scenario/video_stream_unittest.cc
+++ b/test/scenario/video_stream_unittest.cc
@@ -9,6 +9,7 @@
*/
#include <atomic>
+#include "test/field_trial.h"
#include "test/gtest.h"
#include "test/scenario/scenario.h"
@@ -169,5 +170,98 @@ TEST(VideoStreamTest, SendsFecWithFlexFec) {
VideoSendStream::Stats video_stats = video->send()->GetStats();
EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u);
}
+
+TEST(VideoStreamTest, SendsFecWithDeferredFlexFec) {
+ ScopedFieldTrials trial("WebRTC-DeferredFecGeneration/Enabled/");
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode([](NetworkSimulationConfig* c) {
+ c->loss_rate = 0.1;
+ c->delay = TimeDelta::Millis(100);
+ })},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ auto video = s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->stream.use_flexfec = true;
+ });
+ s.RunFor(TimeDelta::Seconds(5));
+ VideoSendStream::Stats video_stats = video->send()->GetStats();
+ EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u);
+}
+
+TEST(VideoStreamTest, ResolutionAdaptsToAvailableBandwidth) {
+ // Declared before scenario to avoid use after free.
+ std::atomic<size_t> num_qvga_frames_(0);
+ std::atomic<size_t> num_vga_frames_(0);
+
+ Scenario s;
+ // Link has enough capacity for VGA.
+ NetworkSimulationConfig net_conf;
+ net_conf.bandwidth = DataRate::KilobitsPerSec(800);
+ net_conf.delay = TimeDelta::Millis(50);
+ auto* client = s.CreateClient("send", [&](CallClientConfig* c) {
+ c->transport.rates.start_rate = DataRate::KilobitsPerSec(800);
+ });
+ auto send_net = {s.CreateSimulationNode(net_conf)};
+ auto ret_net = {s.CreateSimulationNode(net_conf)};
+ auto* route = s.CreateRoutes(
+ client, send_net, s.CreateClient("return", CallClientConfig()), ret_net);
+
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->hooks.frame_pair_handlers = {[&](const VideoFramePair& info) {
+ if (info.decoded->width() == 640) {
+ ++num_vga_frames_;
+ } else if (info.decoded->width() == 320) {
+ ++num_qvga_frames_;
+ } else {
+ ADD_FAILURE() << "Unexpected resolution: " << info.decoded->width();
+ }
+ }};
+ c->source.framerate = 30;
+ // The resolution must be high enough to allow smaller layers to be
+ // created.
+ c->source.generator.width = 640;
+ c->source.generator.height = 480;
+ c->encoder.implementation = CodecImpl::kSoftware;
+ c->encoder.codec = Codec::kVideoCodecVP9;
+ // Enable SVC.
+ c->encoder.layers.spatial = 2;
+ });
+
+ // Run for a few seconds, until streams have stabilized,
+ // check that we are sending VGA.
+ s.RunFor(TimeDelta::Seconds(5));
+ EXPECT_GT(num_vga_frames_, 0u);
+
+ // Trigger cross traffic, run until we have seen 3 consecutive
+ // seconds with no VGA frames due to reduced available bandwidth.
+ auto cross_traffic =
+ s.net()->StartFakeTcpCrossTraffic(send_net, ret_net, FakeTcpConfig());
+
+ int num_seconds_without_vga = 0;
+ int num_iterations = 0;
+ do {
+ ASSERT_LE(++num_iterations, 100);
+ num_qvga_frames_ = 0;
+ num_vga_frames_ = 0;
+ s.RunFor(TimeDelta::Seconds(1));
+ if (num_qvga_frames_ > 0 && num_vga_frames_ == 0) {
+ ++num_seconds_without_vga;
+ } else {
+ num_seconds_without_vga = 0;
+ }
+ } while (num_seconds_without_vga < 3);
+
+ // Stop cross traffic, make sure we recover and get VGA frames agian.
+ s.net()->StopCrossTraffic(cross_traffic);
+ num_qvga_frames_ = 0;
+ num_vga_frames_ = 0;
+
+ s.RunFor(TimeDelta::Seconds(40));
+ EXPECT_GT(num_qvga_frames_, 0u);
+ EXPECT_GT(num_vga_frames_, 0u);
+}
+
} // namespace test
} // namespace webrtc
diff --git a/test/test_main.cc b/test/test_main.cc
index 8555d5e6da..5046979548 100644
--- a/test/test_main.cc
+++ b/test/test_main.cc
@@ -16,11 +16,10 @@
int main(int argc, char* argv[]) {
// Initialize the symbolizer to get a human-readable stack trace
- // TODO(crbug.com/1050976): Breaks iossim tests, re-enable when fixed.
- // absl::InitializeSymbolizer(argv[0]);
+ absl::InitializeSymbolizer(argv[0]);
- // absl::FailureSignalHandlerOptions options;
- // absl::InstallFailureSignalHandler(options);
+ absl::FailureSignalHandlerOptions options;
+ absl::InstallFailureSignalHandler(options);
std::unique_ptr<webrtc::TestMain> main = webrtc::TestMain::Create();
int err_code = main->Init(&argc, argv);
diff --git a/test/test_main_lib.cc b/test/test_main_lib.cc
index 15318b49e1..f5e02341f3 100644
--- a/test/test_main_lib.cc
+++ b/test/test_main_lib.cc
@@ -17,6 +17,7 @@
#include "absl/flags/flag.h"
#include "absl/flags/parse.h"
#include "absl/memory/memory.h"
+#include "absl/strings/match.h"
#include "absl/types/optional.h"
#include "rtc_base/checks.h"
#include "rtc_base/event_tracer.h"
@@ -100,6 +101,62 @@ namespace {
class TestMainImpl : public TestMain {
public:
+ // In order to set up a fresh rtc::Thread state for each test and avoid
+ // accidentally carrying over pending tasks that might be sent from one test
+ // and executed while another test is running, we inject a TestListener
+ // that sets up a new rtc::Thread instance for the main thread, per test.
+ class TestListener : public ::testing::EmptyTestEventListener {
+ public:
+ TestListener() = default;
+
+ private:
+ bool IsDeathTest(const char* test_case_name, const char* test_name) {
+ // Workaround to avoid wrapping the main thread when we run death tests.
+ // The approach we take for detecting death tests is essentially the same
+ // as gtest does internally. Gtest does this:
+ //
+ // static const char kDeathTestCaseFilter[] = "*DeathTest:*DeathTest/*";
+ // ::testing::internal::UnitTestOptions::MatchesFilter(
+ // test_case_name, kDeathTestCaseFilter);
+ //
+ // Our approach is a little more straight forward.
+ if (absl::EndsWith(test_case_name, "DeathTest"))
+ return true;
+
+ return absl::EndsWith(test_name, "DeathTest");
+ }
+
+ void OnTestStart(const ::testing::TestInfo& test_info) override {
+ if (!IsDeathTest(test_info.test_suite_name(), test_info.name())) {
+ // Ensure that main thread gets wrapped as an rtc::Thread.
+ // TODO(bugs.webrtc.org/9714): It might be better to avoid wrapping the
+ // main thread, or leave it to individual tests that need it. But as
+ // long as we have automatic thread wrapping, we need this to avoid that
+ // some other random thread (which one depending on which tests are run)
+ // gets automatically wrapped.
+ thread_ = rtc::Thread::CreateWithSocketServer();
+ thread_->WrapCurrent();
+ RTC_DCHECK_EQ(rtc::Thread::Current(), thread_.get());
+ } else {
+ RTC_LOG(LS_INFO) << "No thread auto wrap for death test.";
+ }
+ }
+
+ void OnTestEnd(const ::testing::TestInfo& test_info) override {
+ // Terminate the message loop. Note that if the test failed to clean
+ // up pending messages, this may execute part of the test. Ideally we
+ // should print a warning message here, or even fail the test if it leaks.
+ if (thread_) {
+ thread_->Quit(); // Signal quit.
+ thread_->Run(); // Flush + process Quit signal.
+ thread_->UnwrapCurrent();
+ thread_ = nullptr;
+ }
+ }
+
+ std::unique_ptr<rtc::Thread> thread_;
+ };
+
int Init(int* argc, char* argv[]) override {
::testing::InitGoogleMock(argc, argv);
absl::ParseCommandLine(*argc, argv);
@@ -134,14 +191,7 @@ class TestMainImpl : public TestMain {
rtc::InitializeSSL();
rtc::SSLStreamAdapter::EnableTimeCallbackForTesting();
- // Ensure that main thread gets wrapped as an rtc::Thread.
- // TODO(bugs.webrt.org/9714): It might be better to avoid wrapping the main
- // thread, or leave it to individual tests that need it. But as long as we
- // have automatic thread wrapping, we need this to avoid that some other
- // random thread (which one depending on which tests are run) gets
- // automatically wrapped.
- rtc::ThreadManager::Instance()->WrapCurrentThread();
- RTC_CHECK(rtc::Thread::Current());
+ ::testing::UnitTest::GetInstance()->listeners().Append(new TestListener());
return 0;
}
diff --git a/test/test_video_capturer.cc b/test/test_video_capturer.cc
index c0d575dc5e..9ce4aa0637 100644
--- a/test/test_video_capturer.cc
+++ b/test/test_video_capturer.cc
@@ -84,7 +84,7 @@ void TestVideoCapturer::UpdateVideoAdapter() {
}
VideoFrame TestVideoCapturer::MaybePreprocess(const VideoFrame& frame) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
if (preprocessor_ != nullptr) {
return preprocessor_->Preprocess(frame);
} else {
diff --git a/test/test_video_capturer.h b/test/test_video_capturer.h
index 114767a43e..dff529cb15 100644
--- a/test/test_video_capturer.h
+++ b/test/test_video_capturer.h
@@ -18,7 +18,7 @@
#include "api/video/video_source_interface.h"
#include "media/base/video_adapter.h"
#include "media/base/video_broadcaster.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
namespace test {
@@ -38,7 +38,7 @@ class TestVideoCapturer : public rtc::VideoSourceInterface<VideoFrame> {
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
void SetFramePreprocessor(std::unique_ptr<FramePreprocessor> preprocessor) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
preprocessor_ = std::move(preprocessor);
}
@@ -50,7 +50,7 @@ class TestVideoCapturer : public rtc::VideoSourceInterface<VideoFrame> {
void UpdateVideoAdapter();
VideoFrame MaybePreprocess(const VideoFrame& frame);
- rtc::CriticalSection lock_;
+ Mutex lock_;
std::unique_ptr<FramePreprocessor> preprocessor_ RTC_GUARDED_BY(lock_);
rtc::VideoBroadcaster broadcaster_;
cricket::VideoAdapter video_adapter_;
diff --git a/test/testsupport/ivf_video_frame_generator.cc b/test/testsupport/ivf_video_frame_generator.cc
index 81155f80ff..fe836763fa 100644
--- a/test/testsupport/ivf_video_frame_generator.cc
+++ b/test/testsupport/ivf_video_frame_generator.cc
@@ -53,7 +53,7 @@ IvfVideoFrameGenerator::IvfVideoFrameGenerator(const std::string& file_name)
WEBRTC_VIDEO_CODEC_OK);
}
IvfVideoFrameGenerator::~IvfVideoFrameGenerator() {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
if (!file_reader_) {
return;
}
@@ -62,7 +62,7 @@ IvfVideoFrameGenerator::~IvfVideoFrameGenerator() {
// Reset decoder to prevent it from async access to |this|.
video_decoder_.reset();
{
- rtc::CritScope frame_crit(&frame_decode_lock_);
+ MutexLock frame_lock(&frame_decode_lock_);
next_frame_ = absl::nullopt;
// Set event in case another thread is waiting on it.
next_frame_decoded_.Set();
@@ -70,7 +70,7 @@ IvfVideoFrameGenerator::~IvfVideoFrameGenerator() {
}
FrameGeneratorInterface::VideoFrameData IvfVideoFrameGenerator::NextFrame() {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
next_frame_decoded_.Reset();
RTC_CHECK(file_reader_);
if (!file_reader_->HasMoreFrames()) {
@@ -86,7 +86,7 @@ FrameGeneratorInterface::VideoFrameData IvfVideoFrameGenerator::NextFrame() {
RTC_CHECK(decoded) << "Failed to decode next frame in "
<< kMaxNextFrameWaitTemeoutMs << "ms. Can't continue";
- rtc::CritScope frame_crit(&frame_decode_lock_);
+ MutexLock frame_lock(&frame_decode_lock_);
rtc::scoped_refptr<VideoFrameBuffer> buffer =
next_frame_->video_frame_buffer();
if (width_ != static_cast<size_t>(buffer->width()) ||
@@ -102,7 +102,7 @@ FrameGeneratorInterface::VideoFrameData IvfVideoFrameGenerator::NextFrame() {
}
void IvfVideoFrameGenerator::ChangeResolution(size_t width, size_t height) {
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
width_ = width;
height_ = height;
}
@@ -126,7 +126,7 @@ void IvfVideoFrameGenerator::DecodedCallback::Decoded(
}
void IvfVideoFrameGenerator::OnFrameDecoded(const VideoFrame& decoded_frame) {
- rtc::CritScope crit(&frame_decode_lock_);
+ MutexLock lock(&frame_decode_lock_);
next_frame_ = decoded_frame;
next_frame_decoded_.Set();
}
diff --git a/test/testsupport/ivf_video_frame_generator.h b/test/testsupport/ivf_video_frame_generator.h
index 913d882766..32ba21ed26 100644
--- a/test/testsupport/ivf_video_frame_generator.h
+++ b/test/testsupport/ivf_video_frame_generator.h
@@ -20,8 +20,8 @@
#include "api/video/video_frame.h"
#include "api/video_codecs/video_decoder.h"
#include "modules/video_coding/utility/ivf_file_reader.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
namespace webrtc {
@@ -71,11 +71,11 @@ class IvfVideoFrameGenerator : public FrameGeneratorInterface {
// FrameGenerator is injected into PeerConnection via some scoped_ref object
// and it can happen that the last pointer will be destroyed on the different
// thread comparing to the one from which frames were read.
- rtc::CriticalSection lock_;
+ Mutex lock_;
// This lock is used to sync between sending and receiving frame from decoder.
// We can't reuse |lock_| because then generator can be destroyed between
// frame was sent to decoder and decoder callback was invoked.
- rtc::CriticalSection frame_decode_lock_;
+ Mutex frame_decode_lock_;
rtc::Event next_frame_decoded_;
absl::optional<VideoFrame> next_frame_ RTC_GUARDED_BY(frame_decode_lock_);
diff --git a/test/testsupport/ivf_video_frame_generator_unittest.cc b/test/testsupport/ivf_video_frame_generator_unittest.cc
index 0c364dbb1d..0295fab331 100644
--- a/test/testsupport/ivf_video_frame_generator_unittest.cc
+++ b/test/testsupport/ivf_video_frame_generator_unittest.cc
@@ -25,7 +25,6 @@
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "modules/video_coding/utility/ivf_file_writer.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "test/gtest.h"
#include "test/testsupport/file_utils.h"
@@ -34,6 +33,8 @@
#if defined(WEBRTC_USE_H264)
#include "modules/video_coding/codecs/h264/include/h264.h"
+#include "rtc_base/synchronization/mutex.h"
+
#endif
namespace webrtc {
@@ -71,7 +72,7 @@ class IvfFileWriterEncodedCallback : public EncodedImageCallback {
const RTPFragmentationHeader* fragmentation) override {
EXPECT_TRUE(file_writer_->WriteFrame(encoded_image, video_codec_type_));
- rtc::CritScope crit(&lock_);
+ MutexLock lock(&lock_);
received_frames_count_++;
RTC_CHECK_LE(received_frames_count_, expected_frames_count_);
if (received_frames_count_ == expected_frames_count_) {
@@ -89,7 +90,7 @@ class IvfFileWriterEncodedCallback : public EncodedImageCallback {
const VideoCodecType video_codec_type_;
const int expected_frames_count_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
int received_frames_count_ RTC_GUARDED_BY(lock_) = 0;
rtc::Event expected_frames_count_received_;
};
diff --git a/test/testsupport/mock/mock_frame_reader.h b/test/testsupport/mock/mock_frame_reader.h
index 8da3695d23..bda6b1ad2d 100644
--- a/test/testsupport/mock/mock_frame_reader.h
+++ b/test/testsupport/mock/mock_frame_reader.h
@@ -19,11 +19,11 @@ namespace test {
class MockFrameReader : public FrameReader {
public:
- MOCK_METHOD0(Init, bool());
- MOCK_METHOD0(ReadFrame, rtc::scoped_refptr<I420Buffer>());
- MOCK_METHOD0(Close, void());
- MOCK_METHOD0(FrameLength, size_t());
- MOCK_METHOD0(NumberOfFrames, int());
+ MOCK_METHOD(bool, Init, (), (override));
+ MOCK_METHOD(rtc::scoped_refptr<I420Buffer>, ReadFrame, (), (override));
+ MOCK_METHOD(void, Close, (), (override));
+ MOCK_METHOD(size_t, FrameLength, (), (override));
+ MOCK_METHOD(int, NumberOfFrames, (), (override));
};
} // namespace test
diff --git a/test/testsupport/perf_test.cc b/test/testsupport/perf_test.cc
index 310c7e36a5..3a436749fb 100644
--- a/test/testsupport/perf_test.cc
+++ b/test/testsupport/perf_test.cc
@@ -18,7 +18,7 @@
#include <vector>
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/testsupport/perf_test_histogram_writer.h"
namespace webrtc {
@@ -60,7 +60,7 @@ class PlottableCounterPrinter {
PlottableCounterPrinter() : output_(stdout) {}
void SetOutput(FILE* output) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
output_ = output;
}
@@ -68,14 +68,14 @@ class PlottableCounterPrinter {
const std::string& trace_name,
const webrtc::SamplesStatsCounter& counter,
const std::string& units) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
plottable_counters_.push_back({graph_name, trace_name, counter, units});
}
void Print(const std::vector<std::string>& desired_graphs_raw) const {
std::set<std::string> desired_graphs(desired_graphs_raw.begin(),
desired_graphs_raw.end());
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
for (auto& counter : plottable_counters_) {
if (!desired_graphs.empty()) {
auto it = desired_graphs.find(counter.graph_name);
@@ -108,9 +108,9 @@ class PlottableCounterPrinter {
}
private:
- rtc::CriticalSection crit_;
- std::vector<PlottableCounter> plottable_counters_ RTC_GUARDED_BY(&crit_);
- FILE* output_ RTC_GUARDED_BY(&crit_);
+ mutable Mutex mutex_;
+ std::vector<PlottableCounter> plottable_counters_ RTC_GUARDED_BY(&mutex_);
+ FILE* output_ RTC_GUARDED_BY(&mutex_);
};
PlottableCounterPrinter& GetPlottableCounterPrinter() {
@@ -123,7 +123,7 @@ class ResultsLinePrinter {
ResultsLinePrinter() : output_(stdout) {}
void SetOutput(FILE* output) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
output_ = output;
}
@@ -177,7 +177,7 @@ class ResultsLinePrinter {
const std::string& suffix,
const std::string& units,
bool important) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// <*>RESULT <graph_name>: <trace_name>= <value> <units>
// <*>RESULT <graph_name>: <trace_name>= {<mean>, <std deviation>} <units>
// <*>RESULT <graph_name>: <trace_name>= [<value>,value,value,...,] <units>
@@ -186,8 +186,8 @@ class ResultsLinePrinter {
values.c_str(), suffix.c_str(), units.c_str());
}
- rtc::CriticalSection crit_;
- FILE* output_ RTC_GUARDED_BY(&crit_);
+ Mutex mutex_;
+ FILE* output_ RTC_GUARDED_BY(&mutex_);
};
ResultsLinePrinter& GetResultsLinePrinter() {
diff --git a/test/testsupport/perf_test.h b/test/testsupport/perf_test.h
index b0a5607d20..4bb6773336 100644
--- a/test/testsupport/perf_test.h
+++ b/test/testsupport/perf_test.h
@@ -15,7 +15,6 @@
#include <string>
#include <vector>
-#include "absl/flags/flag.h"
#include "api/array_view.h"
#include "rtc_base/numerics/samples_stats_counter.h"
diff --git a/test/testsupport/perf_test_histogram_writer.cc b/test/testsupport/perf_test_histogram_writer.cc
index ad70d6801c..a4f86dc5f0 100644
--- a/test/testsupport/perf_test_histogram_writer.cc
+++ b/test/testsupport/perf_test_histogram_writer.cc
@@ -15,8 +15,8 @@
#include <map>
#include <memory>
-#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
+#include "rtc_base/synchronization/mutex.h"
#include "third_party/catapult/tracing/tracing/value/diagnostics/reserved_infos.h"
#include "third_party/catapult/tracing/tracing/value/histogram.h"
@@ -33,9 +33,9 @@ std::string AsJsonString(const std::string string) {
class PerfTestHistogramWriter : public PerfTestResultWriter {
public:
- PerfTestHistogramWriter() : crit_() {}
+ PerfTestHistogramWriter() : mutex_() {}
void ClearResults() override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
histograms_.clear();
}
@@ -75,7 +75,7 @@ class PerfTestHistogramWriter : public PerfTestResultWriter {
std::string Serialize() const override {
proto::HistogramSet histogram_set;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
for (const auto& histogram : histograms_) {
std::unique_ptr<proto::Histogram> proto = histogram.second->toProto();
histogram_set.mutable_histograms()->AddAllocated(proto.release());
@@ -108,7 +108,7 @@ class PerfTestHistogramWriter : public PerfTestResultWriter {
// parlance). There should be several histograms with the same measurement
// if they're for different stories.
std::string measurement_and_story = graph_name + trace_name;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (histograms_.count(measurement_and_story) == 0) {
proto::UnitAndDirection unit = ParseUnit(units, improve_direction);
std::unique_ptr<catapult::HistogramBuilder> builder =
@@ -182,9 +182,9 @@ class PerfTestHistogramWriter : public PerfTestResultWriter {
}
private:
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
std::map<std::string, std::unique_ptr<catapult::HistogramBuilder>> histograms_
- RTC_GUARDED_BY(&crit_);
+ RTC_GUARDED_BY(&mutex_);
};
} // namespace
diff --git a/test/testsupport/resources_dir_flag.cc b/test/testsupport/resources_dir_flag.cc
index a6ab3b537b..87a449a401 100644
--- a/test/testsupport/resources_dir_flag.cc
+++ b/test/testsupport/resources_dir_flag.cc
@@ -10,6 +10,8 @@
#include "test/testsupport/resources_dir_flag.h"
+#include "absl/flags/flag.h"
+
ABSL_FLAG(std::string,
resources_dir,
"",
diff --git a/test/testsupport/resources_dir_flag.h b/test/testsupport/resources_dir_flag.h
index 055cc82546..7d6f192d9b 100644
--- a/test/testsupport/resources_dir_flag.h
+++ b/test/testsupport/resources_dir_flag.h
@@ -13,7 +13,7 @@
#ifndef TEST_TESTSUPPORT_RESOURCES_DIR_FLAG_H__
#define TEST_TESTSUPPORT_RESOURCES_DIR_FLAG_H__
-#include "absl/flags/flag.h"
+#include "absl/flags/declare.h"
ABSL_DECLARE_FLAG(std::string, resources_dir);
diff --git a/test/testsupport/test_artifacts_unittest.cc b/test/testsupport/test_artifacts_unittest.cc
index 98de9e4bb8..fb577610fb 100644
--- a/test/testsupport/test_artifacts_unittest.cc
+++ b/test/testsupport/test_artifacts_unittest.cc
@@ -14,6 +14,7 @@
#include <string>
+#include "absl/flags/declare.h"
#include "absl/flags/flag.h"
#include "rtc_base/system/file_wrapper.h"
#include "test/gtest.h"
diff --git a/test/testsupport/video_frame_writer.h b/test/testsupport/video_frame_writer.h
index db1d453775..f4af378b12 100644
--- a/test/testsupport/video_frame_writer.h
+++ b/test/testsupport/video_frame_writer.h
@@ -16,7 +16,6 @@
#include "api/video/video_frame.h"
#include "rtc_base/buffer.h"
-#include "rtc_base/critical_section.h"
#include "test/testsupport/frame_writer.h"
namespace webrtc {
diff --git a/test/time_controller/BUILD.gn b/test/time_controller/BUILD.gn
index 7f77f0afec..ac396b9997 100644
--- a/test/time_controller/BUILD.gn
+++ b/test/time_controller/BUILD.gn
@@ -37,12 +37,13 @@ rtc_library("time_controller") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_tests_utils",
"../../rtc_base:rtc_event",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/synchronization:yield_policy",
"../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
if (rtc_include_tests) {
diff --git a/test/time_controller/simulated_process_thread.cc b/test/time_controller/simulated_process_thread.cc
index df90f54ed6..e001841ac0 100644
--- a/test/time_controller/simulated_process_thread.cc
+++ b/test/time_controller/simulated_process_thread.cc
@@ -39,7 +39,7 @@ SimulatedProcessThread::~SimulatedProcessThread() {
void SimulatedProcessThread::RunReady(Timestamp at_time) {
CurrentTaskQueueSetter set_current(this);
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
std::vector<Module*> ready_modules;
for (auto it = delayed_modules_.begin();
it != delayed_modules_.end() && it->first <= at_time;
@@ -63,10 +63,10 @@ void SimulatedProcessThread::RunReady(Timestamp at_time) {
while (!queue_.empty()) {
std::unique_ptr<QueuedTask> task = std::move(queue_.front());
queue_.pop_front();
- lock_.Leave();
+ lock_.Unlock();
bool should_delete = task->Run();
RTC_CHECK(should_delete);
- lock_.Enter();
+ lock_.Lock();
}
RTC_DCHECK(queue_.empty());
if (!delayed_modules_.empty()) {
@@ -81,7 +81,7 @@ void SimulatedProcessThread::RunReady(Timestamp at_time) {
void SimulatedProcessThread::Start() {
std::vector<Module*> starting;
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (process_thread_running_)
return;
process_thread_running_ = true;
@@ -91,7 +91,7 @@ void SimulatedProcessThread::Start() {
module->ProcessThreadAttached(this);
Timestamp at_time = handler_->CurrentTime();
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
for (auto& module : starting)
delayed_modules_[GetNextTime(module, at_time)].push_back(module);
@@ -107,7 +107,7 @@ void SimulatedProcessThread::Start() {
void SimulatedProcessThread::Stop() {
std::vector<Module*> stopping;
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
process_thread_running_ = false;
for (auto& delayed : delayed_modules_) {
@@ -123,7 +123,7 @@ void SimulatedProcessThread::Stop() {
}
void SimulatedProcessThread::WakeUp(Module* module) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
for (auto it = delayed_modules_.begin(); it != delayed_modules_.end(); ++it) {
if (RemoveByValue(&it->second, module))
break;
@@ -136,7 +136,7 @@ void SimulatedProcessThread::WakeUp(Module* module) {
void SimulatedProcessThread::RegisterModule(Module* module,
const rtc::Location& from) {
module->ProcessThreadAttached(this);
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (!process_thread_running_) {
stopped_modules_.push_back(module);
} else {
@@ -149,7 +149,7 @@ void SimulatedProcessThread::RegisterModule(Module* module,
void SimulatedProcessThread::DeRegisterModule(Module* module) {
bool modules_running;
{
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (!process_thread_running_) {
RemoveByValue(&stopped_modules_, module);
} else {
@@ -165,14 +165,14 @@ void SimulatedProcessThread::DeRegisterModule(Module* module) {
}
void SimulatedProcessThread::PostTask(std::unique_ptr<QueuedTask> task) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
queue_.emplace_back(std::move(task));
next_run_time_ = Timestamp::MinusInfinity();
}
void SimulatedProcessThread::PostDelayedTask(std::unique_ptr<QueuedTask> task,
uint32_t milliseconds) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
Timestamp target_time =
handler_->CurrentTime() + TimeDelta::Millis(milliseconds);
delayed_tasks_[target_time].push_back(std::move(task));
diff --git a/test/time_controller/simulated_process_thread.h b/test/time_controller/simulated_process_thread.h
index 6026826d2f..54d5db7df8 100644
--- a/test/time_controller/simulated_process_thread.h
+++ b/test/time_controller/simulated_process_thread.h
@@ -16,6 +16,7 @@
#include <memory>
#include <vector>
+#include "rtc_base/synchronization/mutex.h"
#include "test/time_controller/simulated_time_controller.h"
namespace webrtc {
@@ -29,7 +30,7 @@ class SimulatedProcessThread : public ProcessThread,
void RunReady(Timestamp at_time) override;
Timestamp GetNextRunTime() const override {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
return next_run_time_;
}
@@ -55,7 +56,7 @@ class SimulatedProcessThread : public ProcessThread,
sim_time_impl::SimulatedTimeControllerImpl* const handler_;
// Using char* to be debugger friendly.
char* name_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
Timestamp next_run_time_ RTC_GUARDED_BY(lock_) = Timestamp::PlusInfinity();
std::deque<std::unique_ptr<QueuedTask>> queue_;
diff --git a/test/time_controller/simulated_task_queue.cc b/test/time_controller/simulated_task_queue.cc
index 6bc96c73b9..da675af81e 100644
--- a/test/time_controller/simulated_task_queue.cc
+++ b/test/time_controller/simulated_task_queue.cc
@@ -27,16 +27,22 @@ SimulatedTaskQueue::~SimulatedTaskQueue() {
}
void SimulatedTaskQueue::Delete() {
+ // Need to destroy the tasks outside of the lock because task destruction
+ // can lead to re-entry in SimulatedTaskQueue via custom destructors.
+ std::deque<std::unique_ptr<QueuedTask>> ready_tasks;
+ std::map<Timestamp, std::vector<std::unique_ptr<QueuedTask>>> delayed_tasks;
{
- rtc::CritScope lock(&lock_);
- ready_tasks_.clear();
- delayed_tasks_.clear();
+ MutexLock lock(&lock_);
+ ready_tasks_.swap(ready_tasks);
+ delayed_tasks_.swap(delayed_tasks);
}
+ ready_tasks.clear();
+ delayed_tasks.clear();
delete this;
}
void SimulatedTaskQueue::RunReady(Timestamp at_time) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
for (auto it = delayed_tasks_.begin();
it != delayed_tasks_.end() && it->first <= at_time;
it = delayed_tasks_.erase(it)) {
@@ -48,14 +54,14 @@ void SimulatedTaskQueue::RunReady(Timestamp at_time) {
while (!ready_tasks_.empty()) {
std::unique_ptr<QueuedTask> ready = std::move(ready_tasks_.front());
ready_tasks_.pop_front();
- lock_.Leave();
+ lock_.Unlock();
bool delete_task = ready->Run();
if (delete_task) {
ready.reset();
} else {
ready.release();
}
- lock_.Enter();
+ lock_.Lock();
}
if (!delayed_tasks_.empty()) {
next_run_time_ = delayed_tasks_.begin()->first;
@@ -65,14 +71,14 @@ void SimulatedTaskQueue::RunReady(Timestamp at_time) {
}
void SimulatedTaskQueue::PostTask(std::unique_ptr<QueuedTask> task) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
ready_tasks_.emplace_back(std::move(task));
next_run_time_ = Timestamp::MinusInfinity();
}
void SimulatedTaskQueue::PostDelayedTask(std::unique_ptr<QueuedTask> task,
uint32_t milliseconds) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
Timestamp target_time =
handler_->CurrentTime() + TimeDelta::Millis(milliseconds);
delayed_tasks_[target_time].push_back(std::move(task));
diff --git a/test/time_controller/simulated_task_queue.h b/test/time_controller/simulated_task_queue.h
index 940117c85b..5035f799fc 100644
--- a/test/time_controller/simulated_task_queue.h
+++ b/test/time_controller/simulated_task_queue.h
@@ -15,6 +15,7 @@
#include <memory>
#include <vector>
+#include "rtc_base/synchronization/mutex.h"
#include "test/time_controller/simulated_time_controller.h"
namespace webrtc {
@@ -30,7 +31,7 @@ class SimulatedTaskQueue : public TaskQueueBase,
void RunReady(Timestamp at_time) override;
Timestamp GetNextRunTime() const override {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
return next_run_time_;
}
TaskQueueBase* GetAsTaskQueue() override { return this; }
@@ -46,7 +47,7 @@ class SimulatedTaskQueue : public TaskQueueBase,
// Using char* to be debugger friendly.
char* name_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
std::deque<std::unique_ptr<QueuedTask>> ready_tasks_ RTC_GUARDED_BY(lock_);
std::map<Timestamp, std::vector<std::unique_ptr<QueuedTask>>> delayed_tasks_
diff --git a/test/time_controller/simulated_thread.cc b/test/time_controller/simulated_thread.cc
index 8d1637c352..807126467a 100644
--- a/test/time_controller/simulated_thread.cc
+++ b/test/time_controller/simulated_thread.cc
@@ -59,7 +59,7 @@ void SimulatedThread::RunReady(Timestamp at_time) {
CurrentThreadSetter set_current(this);
ProcessMessages(0);
int delay_ms = GetDelay();
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
if (delay_ms == kForever) {
next_run_time_ = Timestamp::PlusInfinity();
} else {
@@ -95,7 +95,7 @@ void SimulatedThread::Post(const rtc::Location& posted_from,
rtc::MessageData* pdata,
bool time_sensitive) {
rtc::Thread::Post(posted_from, phandler, id, pdata, time_sensitive);
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
next_run_time_ = Timestamp::MinusInfinity();
}
@@ -105,7 +105,7 @@ void SimulatedThread::PostDelayed(const rtc::Location& posted_from,
uint32_t id,
rtc::MessageData* pdata) {
rtc::Thread::PostDelayed(posted_from, delay_ms, phandler, id, pdata);
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
next_run_time_ =
std::min(next_run_time_, Timestamp::Millis(rtc::TimeMillis() + delay_ms));
}
@@ -116,7 +116,7 @@ void SimulatedThread::PostAt(const rtc::Location& posted_from,
uint32_t id,
rtc::MessageData* pdata) {
rtc::Thread::PostAt(posted_from, target_time_ms, phandler, id, pdata);
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
next_run_time_ = std::min(next_run_time_, Timestamp::Millis(target_time_ms));
}
diff --git a/test/time_controller/simulated_thread.h b/test/time_controller/simulated_thread.h
index fd3969670a..b6c1e6e265 100644
--- a/test/time_controller/simulated_thread.h
+++ b/test/time_controller/simulated_thread.h
@@ -12,6 +12,7 @@
#include <memory>
+#include "rtc_base/synchronization/mutex.h"
#include "test/time_controller/simulated_time_controller.h"
namespace webrtc {
@@ -28,7 +29,7 @@ class SimulatedThread : public rtc::Thread,
void RunReady(Timestamp at_time) override;
Timestamp GetNextRunTime() const override {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
return next_run_time_;
}
@@ -61,7 +62,7 @@ class SimulatedThread : public rtc::Thread,
sim_time_impl::SimulatedTimeControllerImpl* const handler_;
// Using char* to be debugger friendly.
char* name_;
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
Timestamp next_run_time_ RTC_GUARDED_BY(lock_) = Timestamp::PlusInfinity();
};
diff --git a/test/time_controller/simulated_time_controller.cc b/test/time_controller/simulated_time_controller.cc
index a81083b4fb..aba8c6600e 100644
--- a/test/time_controller/simulated_time_controller.cc
+++ b/test/time_controller/simulated_time_controller.cc
@@ -57,7 +57,6 @@ SimulatedTimeControllerImpl::CreateTaskQueue(
std::unique_ptr<ProcessThread> SimulatedTimeControllerImpl::CreateProcessThread(
const char* thread_name) {
- rtc::CritScope lock(&lock_);
auto process_thread =
std::make_unique<SimulatedProcessThread>(this, thread_name);
Register(process_thread.get());
@@ -96,7 +95,7 @@ void SimulatedTimeControllerImpl::RunReadyRunners() {
// Using a dummy thread rather than nullptr to avoid implicit thread creation
// by Thread::Current().
SimulatedThread::CurrentThreadSetter set_current(dummy_thread_.get());
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
RTC_DCHECK_EQ(rtc::CurrentThreadId(), thread_id_);
Timestamp current_time = CurrentTime();
// Clearing |ready_runners_| in case this is a recursive call:
@@ -117,23 +116,25 @@ void SimulatedTimeControllerImpl::RunReadyRunners() {
while (!ready_runners_.empty()) {
auto* runner = ready_runners_.front();
ready_runners_.pop_front();
+ lock_.Unlock();
// Note that the RunReady function might indirectly cause a call to
- // Unregister() which will recursively grab |lock_| again to remove items
- // from |ready_runners_|.
+ // Unregister() which will grab |lock_| again to remove items from
+ // |ready_runners_|.
runner->RunReady(current_time);
+ lock_.Lock();
}
}
}
Timestamp SimulatedTimeControllerImpl::CurrentTime() const {
- rtc::CritScope lock(&time_lock_);
+ MutexLock lock(&time_lock_);
return current_time_;
}
Timestamp SimulatedTimeControllerImpl::NextRunTime() const {
Timestamp current_time = CurrentTime();
Timestamp next_time = Timestamp::PlusInfinity();
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
for (auto* runner : runners_) {
Timestamp next_run_time = runner->GetNextRunTime();
if (next_run_time <= current_time)
@@ -144,18 +145,18 @@ Timestamp SimulatedTimeControllerImpl::NextRunTime() const {
}
void SimulatedTimeControllerImpl::AdvanceTime(Timestamp target_time) {
- rtc::CritScope time_lock(&time_lock_);
+ MutexLock time_lock(&time_lock_);
RTC_DCHECK_GE(target_time, current_time_);
current_time_ = target_time;
}
void SimulatedTimeControllerImpl::Register(SimulatedSequenceRunner* runner) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
runners_.push_back(runner);
}
void SimulatedTimeControllerImpl::Unregister(SimulatedSequenceRunner* runner) {
- rtc::CritScope lock(&lock_);
+ MutexLock lock(&lock_);
bool removed = RemoveByValue(&runners_, runner);
RTC_CHECK(removed);
RemoveByValue(&ready_runners_, runner);
@@ -169,6 +170,7 @@ void SimulatedTimeControllerImpl::StartYield(TaskQueueBase* yielding_from) {
void SimulatedTimeControllerImpl::StopYield(TaskQueueBase* yielding_from) {
yielded_.erase(yielding_from);
}
+
} // namespace sim_time_impl
GlobalSimulatedTimeController::GlobalSimulatedTimeController(
diff --git a/test/time_controller/simulated_time_controller.h b/test/time_controller/simulated_time_controller.h
index 758f90989e..6c6dbfab9d 100644
--- a/test/time_controller/simulated_time_controller.h
+++ b/test/time_controller/simulated_time_controller.h
@@ -21,9 +21,9 @@
#include "api/units/timestamp.h"
#include "modules/include/module.h"
#include "modules/utility/include/process_thread.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/yield_policy.h"
#include "rtc_base/thread_checker.h"
@@ -52,32 +52,34 @@ class SimulatedTimeControllerImpl : public TaskQueueFactory,
std::unique_ptr<TaskQueueBase, TaskQueueDeleter> CreateTaskQueue(
absl::string_view name,
- Priority priority) const override;
+ Priority priority) const RTC_LOCKS_EXCLUDED(time_lock_) override;
// Implements the YieldInterface by running ready tasks on all task queues,
// except that if this method is called from a task, the task queue running
// that task is skipped.
- void YieldExecution() override;
+ void YieldExecution() RTC_LOCKS_EXCLUDED(time_lock_, lock_) override;
// Create process thread with the name |thread_name|.
- std::unique_ptr<ProcessThread> CreateProcessThread(const char* thread_name);
+ std::unique_ptr<ProcessThread> CreateProcessThread(const char* thread_name)
+ RTC_LOCKS_EXCLUDED(time_lock_, lock_);
// Create thread using provided |socket_server|.
std::unique_ptr<rtc::Thread> CreateThread(
const std::string& name,
- std::unique_ptr<rtc::SocketServer> socket_server);
+ std::unique_ptr<rtc::SocketServer> socket_server)
+ RTC_LOCKS_EXCLUDED(time_lock_, lock_);
// Runs all runners in |runners_| that has tasks or modules ready for
// execution.
- void RunReadyRunners();
+ void RunReadyRunners() RTC_LOCKS_EXCLUDED(time_lock_, lock_);
// Return |current_time_|.
- Timestamp CurrentTime() const;
+ Timestamp CurrentTime() const RTC_LOCKS_EXCLUDED(time_lock_);
// Return min of runner->GetNextRunTime() for runner in |runners_|.
- Timestamp NextRunTime() const;
+ Timestamp NextRunTime() const RTC_LOCKS_EXCLUDED(lock_);
// Set |current_time_| to |target_time|.
- void AdvanceTime(Timestamp target_time);
+ void AdvanceTime(Timestamp target_time) RTC_LOCKS_EXCLUDED(time_lock_);
// Adds |runner| to |runners_|.
- void Register(SimulatedSequenceRunner* runner);
+ void Register(SimulatedSequenceRunner* runner) RTC_LOCKS_EXCLUDED(lock_);
// Removes |runner| from |runners_|.
- void Unregister(SimulatedSequenceRunner* runner);
+ void Unregister(SimulatedSequenceRunner* runner) RTC_LOCKS_EXCLUDED(lock_);
// Indicates that |yielding_from| is not ready to run.
void StartYield(TaskQueueBase* yielding_from);
@@ -87,9 +89,9 @@ class SimulatedTimeControllerImpl : public TaskQueueFactory,
private:
const rtc::PlatformThreadId thread_id_;
const std::unique_ptr<rtc::Thread> dummy_thread_ = rtc::Thread::Create();
- rtc::CriticalSection time_lock_;
+ mutable Mutex time_lock_;
Timestamp current_time_ RTC_GUARDED_BY(time_lock_);
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
std::vector<SimulatedSequenceRunner*> runners_ RTC_GUARDED_BY(lock_);
// Used in RunReadyRunners() to keep track of ready runners that are to be
// processed in a round robin fashion. the reason it's a member is so that
diff --git a/third_party/libaom/source/libaom/aom/aom_codec.h b/third_party/libaom/source/libaom/aom/aom_codec.h
new file mode 100644
index 0000000000..7268ef9b38
--- /dev/null
+++ b/third_party/libaom/source/libaom/aom/aom_codec.h
@@ -0,0 +1 @@
+#include <aom/aom_codec.h>
diff --git a/third_party/libaom/source/libaom/aom/aom_decoder.h b/third_party/libaom/source/libaom/aom/aom_decoder.h
new file mode 100644
index 0000000000..b3b05f15ba
--- /dev/null
+++ b/third_party/libaom/source/libaom/aom/aom_decoder.h
@@ -0,0 +1 @@
+#include <aom/aom_decoder.h>
diff --git a/third_party/libaom/source/libaom/aom/aom_encoder.h b/third_party/libaom/source/libaom/aom/aom_encoder.h
new file mode 100644
index 0000000000..0acfe16341
--- /dev/null
+++ b/third_party/libaom/source/libaom/aom/aom_encoder.h
@@ -0,0 +1 @@
+#include <aom/aom_encoder.h>
diff --git a/third_party/libaom/source/libaom/aom/aomcx.h b/third_party/libaom/source/libaom/aom/aomcx.h
new file mode 100644
index 0000000000..7b01eee839
--- /dev/null
+++ b/third_party/libaom/source/libaom/aom/aomcx.h
@@ -0,0 +1,12 @@
+#pragma once
+
+#include <aom/aomcx.h>
+
+#include <stdlib.h>
+
+aom_codec_iface_t *aom_codec_av1_cx(void) {
+ // external/libaom doesn't include encoder sources, which makes this function
+ // not available. Defined here to make it build, but ensure it fails fast if
+ // called in runtime.
+ abort();
+}
diff --git a/third_party/libaom/source/libaom/aom/aomdx.h b/third_party/libaom/source/libaom/aom/aomdx.h
new file mode 100644
index 0000000000..8f7ff065d3
--- /dev/null
+++ b/third_party/libaom/source/libaom/aom/aomdx.h
@@ -0,0 +1 @@
+#include <aom/aomdx.h>
diff --git a/third_party/libsrtp/crypto/include/auth.h b/third_party/libsrtp/crypto/include/auth.h
new file mode 100644
index 0000000000..5d10300653
--- /dev/null
+++ b/third_party/libsrtp/crypto/include/auth.h
@@ -0,0 +1 @@
+#include <auth.h>
diff --git a/third_party/libsrtp/crypto/include/crypto_types.h b/third_party/libsrtp/crypto/include/crypto_types.h
new file mode 100644
index 0000000000..1919b948b2
--- /dev/null
+++ b/third_party/libsrtp/crypto/include/crypto_types.h
@@ -0,0 +1 @@
+#include <crypto_types.h>
diff --git a/third_party/libsrtp/include/srtp.h b/third_party/libsrtp/include/srtp.h
new file mode 100644
index 0000000000..d06cc1cc23
--- /dev/null
+++ b/third_party/libsrtp/include/srtp.h
@@ -0,0 +1 @@
+#include <srtp.h>
diff --git a/third_party/libsrtp/include/srtp_priv.h b/third_party/libsrtp/include/srtp_priv.h
new file mode 100644
index 0000000000..537f267d2a
--- /dev/null
+++ b/third_party/libsrtp/include/srtp_priv.h
@@ -0,0 +1 @@
+#include <srtp_priv.h>
diff --git a/third_party/libyuv/include/libyuv.h b/third_party/libyuv/include/libyuv.h
new file mode 100644
index 0000000000..a5cf1a1463
--- /dev/null
+++ b/third_party/libyuv/include/libyuv.h
@@ -0,0 +1 @@
+#include <libyuv.h>
diff --git a/third_party/libyuv/include/libyuv/compare.h b/third_party/libyuv/include/libyuv/compare.h
new file mode 100644
index 0000000000..4824a65daa
--- /dev/null
+++ b/third_party/libyuv/include/libyuv/compare.h
@@ -0,0 +1 @@
+#include <libyuv/compare.h>
diff --git a/third_party/libyuv/include/libyuv/convert.h b/third_party/libyuv/include/libyuv/convert.h
new file mode 100644
index 0000000000..c29c3d89e8
--- /dev/null
+++ b/third_party/libyuv/include/libyuv/convert.h
@@ -0,0 +1 @@
+#include <libyuv/convert.h>
diff --git a/third_party/libyuv/include/libyuv/convert_argb.h b/third_party/libyuv/include/libyuv/convert_argb.h
new file mode 100644
index 0000000000..333b20d819
--- /dev/null
+++ b/third_party/libyuv/include/libyuv/convert_argb.h
@@ -0,0 +1 @@
+#include <libyuv/convert_argb.h>
diff --git a/third_party/libyuv/include/libyuv/convert_from.h b/third_party/libyuv/include/libyuv/convert_from.h
new file mode 100644
index 0000000000..eaf3c30268
--- /dev/null
+++ b/third_party/libyuv/include/libyuv/convert_from.h
@@ -0,0 +1 @@
+#include <libyuv/convert_from.h>
diff --git a/third_party/libyuv/include/libyuv/planar_functions.h b/third_party/libyuv/include/libyuv/planar_functions.h
new file mode 100644
index 0000000000..773117898f
--- /dev/null
+++ b/third_party/libyuv/include/libyuv/planar_functions.h
@@ -0,0 +1 @@
+#include <libyuv/planar_functions.h>
diff --git a/third_party/libyuv/include/libyuv/rotate_argb.h b/third_party/libyuv/include/libyuv/rotate_argb.h
new file mode 100644
index 0000000000..607a75c20f
--- /dev/null
+++ b/third_party/libyuv/include/libyuv/rotate_argb.h
@@ -0,0 +1 @@
+#include <libyuv/rotate_argb.h>
diff --git a/third_party/libyuv/include/libyuv/scale.h b/third_party/libyuv/include/libyuv/scale.h
new file mode 100644
index 0000000000..2cb77398e6
--- /dev/null
+++ b/third_party/libyuv/include/libyuv/scale.h
@@ -0,0 +1 @@
+#include <libyuv/scale.h>
diff --git a/third_party/pffft/src/pffft.h b/third_party/pffft/src/pffft.h
new file mode 100644
index 0000000000..656762f3d3
--- /dev/null
+++ b/third_party/pffft/src/pffft.h
@@ -0,0 +1 @@
+#include <pffft.h>
diff --git a/third_party/rnnoise/src/rnn_activations.h b/third_party/rnnoise/src/rnn_activations.h
new file mode 100644
index 0000000000..dbc9fcd2ba
--- /dev/null
+++ b/third_party/rnnoise/src/rnn_activations.h
@@ -0,0 +1 @@
+#include <rnn_activations.h>
diff --git a/third_party/rnnoise/src/rnn_vad_weights.h b/third_party/rnnoise/src/rnn_vad_weights.h
new file mode 100644
index 0000000000..2943d00dc1
--- /dev/null
+++ b/third_party/rnnoise/src/rnn_vad_weights.h
@@ -0,0 +1 @@
+#include <rnn_vad_weights.h>
diff --git a/tools_webrtc/autoroller/roll_deps.py b/tools_webrtc/autoroller/roll_deps.py
index 77dd7d8d73..0c8df5b262 100755
--- a/tools_webrtc/autoroller/roll_deps.py
+++ b/tools_webrtc/autoroller/roll_deps.py
@@ -97,6 +97,10 @@ class RollError(Exception):
pass
+def StrExpansion():
+ return lambda str_value: str_value
+
+
def VarLookup(local_scope):
return lambda var_name: local_scope['vars'][var_name]
@@ -104,6 +108,7 @@ def VarLookup(local_scope):
def ParseDepsDict(deps_content):
local_scope = {}
global_scope = {
+ 'Str': StrExpansion(),
'Var': VarLookup(local_scope),
'deps_os': {},
}
@@ -598,11 +603,11 @@ def _UploadCL(commit_queue_mode):
- 1: Run trybots but do not submit to CQ.
- 0: Skip CQ, upload only.
"""
- cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks', '--send-mail']
- cmd.extend(['--cc', NOTIFY_EMAIL])
+ cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks']
if commit_queue_mode >= 2:
logging.info('Sending the CL to the CQ...')
cmd.extend(['--use-commit-queue'])
+ cmd.extend(['--send-mail', '--cc', NOTIFY_EMAIL])
elif commit_queue_mode >= 1:
logging.info('Starting CQ dry run...')
cmd.extend(['--cq-dry-run'])
diff --git a/tools_webrtc/autoroller/unittests/roll_deps_test.py b/tools_webrtc/autoroller/unittests/roll_deps_test.py
index 8d68bddc35..8f6d57efea 100755
--- a/tools_webrtc/autoroller/unittests/roll_deps_test.py
+++ b/tools_webrtc/autoroller/unittests/roll_deps_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env vpython
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@@ -24,8 +24,6 @@ from roll_deps import CalculateChangedDeps, FindAddedDeps, \
GetMatchingDepsEntries, ParseDepsDict, ParseLocalDepsFile, UpdateDepsFile, \
ChromiumRevisionUpdate
-SRC_DIR = os.path.join(PARENT_DIR, os.pardir, os.pardir)
-sys.path.append(os.path.join(SRC_DIR, 'third_party', 'pymock'))
import mock
TEST_DATA_VARS = {
diff --git a/tools_webrtc/get_landmines.py b/tools_webrtc/get_landmines.py
index ba8ac9c1bf..399fb0ad72 100755
--- a/tools_webrtc/get_landmines.py
+++ b/tools_webrtc/get_landmines.py
@@ -43,6 +43,7 @@ def print_landmines(): # pylint: disable=invalid-name
'https://codereview.webrtc.org/2786603002')
print ('Clobber due to Win Debug linking errors in '
'https://codereview.webrtc.org/2832063003/')
+ print 'Clobber win x86 bots (issues with isolated files).'
if host_os() == 'mac':
print 'Clobber due to iOS compile errors (crbug.com/694721)'
print 'Clobber to unblock https://codereview.webrtc.org/2709573003'
diff --git a/tools_webrtc/libs/generate_licenses.py b/tools_webrtc/libs/generate_licenses.py
index 04d655cba0..0e5a9ef279 100755
--- a/tools_webrtc/libs/generate_licenses.py
+++ b/tools_webrtc/libs/generate_licenses.py
@@ -78,6 +78,7 @@ LIB_TO_LICENSES_DICT = {
# Compile time dependencies, no license needed:
'yasm': [],
'ow2_asm': [],
+ 'jdk': [],
}
# Third_party library _regex_ to licences mapping. Keys are regular expression
diff --git a/tools_webrtc/libs/generate_licenses_test.py b/tools_webrtc/libs/generate_licenses_test.py
index 03f14459c2..fa05eaa12f 100755
--- a/tools_webrtc/libs/generate_licenses_test.py
+++ b/tools_webrtc/libs/generate_licenses_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env vpython
# pylint: disable=relative-import,protected-access,unused-argument
# Copyright 2017 The WebRTC project authors. All Rights Reserved.
@@ -9,13 +9,6 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
-import os
-import sys
-
-SRC = os.path.abspath(
- os.path.join(os.path.dirname((__file__)), os.pardir, os.pardir))
-sys.path.append(os.path.join(SRC, 'third_party', 'pymock'))
-
import unittest
import mock
diff --git a/tools_webrtc/mb/mb_config.pyl b/tools_webrtc/mb/mb_config.pyl
index 059c724194..20755df9ee 100644
--- a/tools_webrtc/mb/mb_config.pyl
+++ b/tools_webrtc/mb/mb_config.pyl
@@ -108,7 +108,7 @@
# build anything).
# TODO(http://crbug.com/1029452): Nuke these and isolate on builder
# instead?
- 'Perf Android32 (K Nexus5)': 'release_bot_x64',
+ 'Perf Android32 (M Nexus5)': 'release_bot_x64',
'Perf Android32 (M AOSP Nexus6)': 'release_bot_x64',
'Perf Android64 (M Nexus5X)': 'release_bot_x64',
'Perf Android64 (O Pixel2)': 'release_bot_x64',
@@ -404,13 +404,16 @@
'ios', 'release_bot', 'arm64', 'no_ios_code_signing', 'ios_use_goma_rbe'
],
'ios_internal_debug_bot_arm64': [
- 'ios', 'debug_bot', 'arm64', 'ios_use_goma_rbe'
+ 'ios', 'debug_bot', 'arm64', 'ios_use_goma_rbe',
+ 'ios_code_signing_identity_description',
],
'ios_internal_release_bot_arm64': [
- 'ios', 'release_bot', 'arm64', 'ios_use_goma_rbe'
+ 'ios', 'release_bot', 'arm64', 'ios_use_goma_rbe',
+ 'ios_code_signing_identity_description',
],
'ios_internal_pure_release_bot_arm64': [
- 'ios', 'pure_release_bot', 'arm64', 'ios_use_goma_rbe'
+ 'ios', 'pure_release_bot', 'arm64', 'ios_use_goma_rbe',
+ 'ios_code_signing_identity_description',
],
'ios_debug_bot_x64': [
'ios', 'debug_bot', 'x64', 'ios_use_goma_rbe'
@@ -506,6 +509,10 @@
'gn_args': 'use_goma=true',
},
+ 'ios_code_signing_identity_description': {
+ 'gn_args': 'ios_code_signing_identity_description="Apple Development"',
+ },
+
'ios_use_goma_rbe': {
'gn_args': 'ios_use_goma_rbe=true',
},
diff --git a/tools_webrtc/mb/mb_unittest.py b/tools_webrtc/mb/mb_unittest.py
index c1e477c104..0cf93bb7f0 100755
--- a/tools_webrtc/mb/mb_unittest.py
+++ b/tools_webrtc/mb/mb_unittest.py
@@ -243,7 +243,7 @@ class UnitTest(unittest.TestCase):
mbw=mbw, ret=0)
self.assertEqual(
mbw.files['/fake_src/out/Debug/args.gn'],
- 'import("//build/args/bots/fake_master/fake_args_bot.gn")\n')
+ 'import("//build/args/bots/fake_master/fake_args_bot.gn")\n\n')
def test_gen_fails(self):
diff --git a/tools_webrtc/msan/blacklist.txt b/tools_webrtc/msan/suppressions.txt
index 3c9c9b202b..ce8b14292e 100644
--- a/tools_webrtc/msan/blacklist.txt
+++ b/tools_webrtc/msan/suppressions.txt
@@ -10,3 +10,6 @@
# Uninit in zlib. http://crbug.com/116277
fun:*MOZ_Z_deflate*
+# Uninit in H264. http://crbug.com/webrtc/11702
+src:*/third_party/openh264/src/codec/processing/src/vaacalc/vaacalcfuncs.cpp
+
diff --git a/tools_webrtc/sancov/README b/tools_webrtc/sancov/README
deleted file mode 100644
index c9b43e7ae0..0000000000
--- a/tools_webrtc/sancov/README
+++ /dev/null
@@ -1,9 +0,0 @@
-Blacklist for llvm's sancov
-
-See http://clang.llvm.org/docs/SanitizerCoverage.html .
-
-Example usage:
-> cd out/Debug
-> UBSAN_OPTIONS="coverage=1" ./peerconnection_unittests
-> sancov -html-report -blacklist ../../tools/sancov/blacklist.txt \
-> peerconnection_unittests peerconnection_unittests.123.sancov
diff --git a/tools_webrtc/sancov/blacklist.txt b/tools_webrtc/sancov/blacklist.txt
deleted file mode 100644
index 7043a18ef2..0000000000
--- a/tools_webrtc/sancov/blacklist.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-#############################################################################
-# sancov blacklist.
-# Please think twice before you add or remove these rules.
-
-#############################################################################
-# no coverage report for third party
-src:*/chromium/*
-src:*/third_party/*
-
-# OpenH264 triggers some errors that are out of our control.
-src:*/third_party/ffmpeg/libavcodec/*
-src:*/third_party/openh264/*
-
-#############################################################################
-# Ignore system libraries.
-src:*/usr/*
-
-#############################################################################
-# Ignore test source.
-src:*/test/*
-src:*_unittest.cc
diff --git a/tools_webrtc/ubsan/blacklist.txt b/tools_webrtc/ubsan/suppressions.txt
index 50b66e915a..50b66e915a 100644
--- a/tools_webrtc/ubsan/blacklist.txt
+++ b/tools_webrtc/ubsan/suppressions.txt
diff --git a/tools_webrtc/ubsan/vptr_blacklist.txt b/tools_webrtc/ubsan/vptr_suppressions.txt
index 739de36659..739de36659 100644
--- a/tools_webrtc/ubsan/vptr_blacklist.txt
+++ b/tools_webrtc/ubsan/vptr_suppressions.txt
diff --git a/video/BUILD.gn b/video/BUILD.gn
index bfc3b0695c..97ed9655a8 100644
--- a/video/BUILD.gn
+++ b/video/BUILD.gn
@@ -34,6 +34,8 @@ rtc_library("video") {
"rtp_streams_synchronizer2.h",
"rtp_video_stream_receiver.cc",
"rtp_video_stream_receiver.h",
+ "rtp_video_stream_receiver2.cc",
+ "rtp_video_stream_receiver2.h",
"rtp_video_stream_receiver_frame_transformer_delegate.cc",
"rtp_video_stream_receiver_frame_transformer_delegate.h",
"send_delay_stats.cc",
@@ -77,7 +79,6 @@ rtc_library("video") {
"../api/crypto:options",
"../api/rtc_event_log",
"../api/task_queue",
- "../api/transport/media:media_transport_interface",
"../api/units:timestamp",
"../api/video:encoded_image",
"../api/video:recordable_encoded_frame",
@@ -111,6 +112,7 @@ rtc_library("video") {
"../modules/video_coding:nack_module",
"../modules/video_coding:video_codec_interface",
"../modules/video_coding:video_coding_utility",
+ "../modules/video_coding/deprecated:nack_module",
"../modules/video_processing",
"../rtc_base:checks",
"../rtc_base:rate_limiter",
@@ -126,6 +128,7 @@ rtc_library("video") {
"../rtc_base/experiments:min_video_bitrate_experiment",
"../rtc_base/experiments:quality_scaling_experiment",
"../rtc_base/experiments:rate_control_settings",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/system:thread_registry",
"../rtc_base/task_utils:pending_task_safety_flag",
@@ -135,6 +138,8 @@ rtc_library("video") {
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/memory",
@@ -165,9 +170,10 @@ rtc_library("video_stream_decoder_impl") {
"../modules/video_coding",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_task_queue",
+ "../rtc_base/synchronization:mutex",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("frame_dumping_decoder") {
@@ -209,6 +215,7 @@ rtc_library("video_stream_encoder_impl") {
deps = [
"../api:rtp_parameters",
+ "../api/adaptation:resource_adaptation_api",
"../api/task_queue:task_queue",
"../api/units:data_rate",
"../api/video:encoded_image",
@@ -245,11 +252,14 @@ rtc_library("video_stream_encoder_impl") {
"../rtc_base/experiments:quality_scaler_settings",
"../rtc_base/experiments:quality_scaling_experiment",
"../rtc_base/experiments:rate_control_settings",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/task_utils:repeating_task",
"../system_wrappers",
"../system_wrappers:field_trial",
"adaptation:video_adaptation",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
@@ -291,7 +301,6 @@ if (rtc_include_tests) {
"../api/rtc_event_log:rtc_event_log_factory",
"../api/task_queue",
"../api/task_queue:default_task_queue_factory",
- "../api/transport/media:media_transport_interface",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_bitrate_allocator_factory",
"../api/video:video_frame",
@@ -320,6 +329,7 @@ if (rtc_include_tests) {
"../rtc_base:rtc_base_tests_utils",
"../rtc_base:rtc_numerics",
"../rtc_base:task_queue_for_test",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/task_utils:repeating_task",
"../system_wrappers",
"../test:fake_video_codecs",
@@ -333,6 +343,8 @@ if (rtc_include_tests) {
"../test:test_support_test_artifacts",
"../test:video_test_common",
"../test:video_test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
@@ -363,8 +375,11 @@ if (rtc_include_tests) {
"../test:test_common",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -381,6 +396,7 @@ if (rtc_include_tests) {
"../api:network_emulation_manager_api",
"../api:peer_connection_quality_test_fixture_api",
"../api:simulated_network_api",
+ "../api:time_controller",
"../call:simulated_network",
"../media:rtc_vp9_profile",
"../modules/video_coding:webrtc_vp9",
@@ -415,8 +431,11 @@ if (rtc_include_tests) {
"../test:test_renderer",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -485,6 +504,7 @@ if (rtc_include_tests) {
"//testing/gtest",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -528,12 +548,14 @@ if (rtc_include_tests) {
"receive_statistics_proxy2_unittest.cc",
"receive_statistics_proxy_unittest.cc",
"report_block_stats_unittest.cc",
+ "rtp_video_stream_receiver2_unittest.cc",
"rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc",
"rtp_video_stream_receiver_unittest.cc",
"send_delay_stats_unittest.cc",
"send_statistics_proxy_unittest.cc",
"stats_counter_unittest.cc",
"stream_synchronization_unittest.cc",
+ "video_receive_stream2_unittest.cc",
"video_receive_stream_unittest.cc",
"video_send_stream_impl_unittest.cc",
"video_send_stream_tests.cc",
@@ -559,6 +581,7 @@ if (rtc_include_tests) {
"../api:scoped_refptr",
"../api:simulated_network_api",
"../api:transport_api",
+ "../api/adaptation:resource_adaptation_api",
"../api/crypto:options",
"../api/rtc_event_log",
"../api/task_queue",
@@ -614,6 +637,7 @@ if (rtc_include_tests) {
"../modules/video_coding:webrtc_vp9",
"../rtc_base",
"../rtc_base:checks",
+ "../rtc_base:gunit_helpers",
"../rtc_base:rate_limiter",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_base_tests_utils",
@@ -621,6 +645,7 @@ if (rtc_include_tests) {
"../rtc_base:rtc_task_queue",
"../rtc_base:task_queue_for_test",
"../rtc_base/experiments:alr_experiment",
+ "../rtc_base/synchronization:mutex",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/task_utils:to_queued_task",
"../system_wrappers",
@@ -643,6 +668,8 @@ if (rtc_include_tests) {
"../test/time_controller",
"adaptation:video_adaptation",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
diff --git a/video/adaptation/BUILD.gn b/video/adaptation/BUILD.gn
index 122458631c..12e111bbc9 100644
--- a/video/adaptation/BUILD.gn
+++ b/video/adaptation/BUILD.gn
@@ -14,15 +14,22 @@ rtc_library("video_adaptation") {
"encode_usage_resource.h",
"overuse_frame_detector.cc",
"overuse_frame_detector.h",
+ "quality_rampup_experiment_helper.cc",
+ "quality_rampup_experiment_helper.h",
"quality_scaler_resource.cc",
"quality_scaler_resource.h",
+ "video_stream_encoder_resource.cc",
+ "video_stream_encoder_resource.h",
"video_stream_encoder_resource_manager.cc",
"video_stream_encoder_resource_manager.h",
]
deps = [
"../../api:rtp_parameters",
+ "../../api:scoped_refptr",
+ "../../api/adaptation:resource_adaptation_api",
"../../api/task_queue:task_queue",
+ "../../api/units:data_rate",
"../../api/video:video_adaptation",
"../../api/video:video_frame",
"../../api/video:video_stream_encoder",
@@ -35,15 +42,20 @@ rtc_library("video_adaptation") {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_event",
"../../rtc_base:rtc_numerics",
+ "../../rtc_base:rtc_task_queue",
"../../rtc_base:timeutils",
"../../rtc_base/experiments:balanced_degradation_settings",
"../../rtc_base/experiments:field_trial_parser",
"../../rtc_base/experiments:quality_rampup_experiment",
"../../rtc_base/experiments:quality_scaler_settings",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/task_utils:repeating_task",
+ "../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers:field_trial",
"../../system_wrappers:system_wrappers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
@@ -57,10 +69,13 @@ if (rtc_include_tests) {
defines = []
sources = [
"overuse_frame_detector_unittest.cc",
- "video_stream_encoder_resource_manager_unittest.cc",
+ "quality_scaler_resource_unittest.cc",
]
deps = [
":video_adaptation",
+ "../../api:scoped_refptr",
+ "../../api/task_queue:default_task_queue_factory",
+ "../../api/task_queue:task_queue",
"../../api/video:encoded_image",
"../../api/video:video_adaptation",
"../../api/video:video_frame_i420",
@@ -73,12 +88,13 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_base_tests_utils",
"../../rtc_base:rtc_event",
"../../rtc_base:rtc_numerics",
+ "../../rtc_base:rtc_task_queue",
"../../rtc_base:task_queue_for_test",
"../../test:field_trial",
"//test:rtc_expect_death",
"//test:test_support",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/video/adaptation/encode_usage_resource.cc b/video/adaptation/encode_usage_resource.cc
index 7a42878aa9..d6f2334fa1 100644
--- a/video/adaptation/encode_usage_resource.cc
+++ b/video/adaptation/encode_usage_resource.cc
@@ -15,18 +15,35 @@
#include "api/video/video_adaptation_reason.h"
#include "rtc_base/checks.h"
+#include "rtc_base/ref_counted_object.h"
namespace webrtc {
+// static
+rtc::scoped_refptr<EncodeUsageResource> EncodeUsageResource::Create(
+ std::unique_ptr<OveruseFrameDetector> overuse_detector) {
+ return new rtc::RefCountedObject<EncodeUsageResource>(
+ std::move(overuse_detector));
+}
+
EncodeUsageResource::EncodeUsageResource(
std::unique_ptr<OveruseFrameDetector> overuse_detector)
- : overuse_detector_(std::move(overuse_detector)),
+ : VideoStreamEncoderResource("EncoderUsageResource"),
+ overuse_detector_(std::move(overuse_detector)),
is_started_(false),
target_frame_rate_(absl::nullopt) {
RTC_DCHECK(overuse_detector_);
}
+EncodeUsageResource::~EncodeUsageResource() {}
+
+bool EncodeUsageResource::is_started() const {
+ RTC_DCHECK_RUN_ON(encoder_queue());
+ return is_started_;
+}
+
void EncodeUsageResource::StartCheckForOveruse(CpuOveruseOptions options) {
+ RTC_DCHECK_RUN_ON(encoder_queue());
RTC_DCHECK(!is_started_);
overuse_detector_->StartCheckForOveruse(TaskQueueBase::Current(),
std::move(options), this);
@@ -35,12 +52,14 @@ void EncodeUsageResource::StartCheckForOveruse(CpuOveruseOptions options) {
}
void EncodeUsageResource::StopCheckForOveruse() {
+ RTC_DCHECK_RUN_ON(encoder_queue());
overuse_detector_->StopCheckForOveruse();
is_started_ = false;
}
void EncodeUsageResource::SetTargetFrameRate(
absl::optional<double> target_frame_rate) {
+ RTC_DCHECK_RUN_ON(encoder_queue());
if (target_frame_rate == target_frame_rate_)
return;
target_frame_rate_ = target_frame_rate;
@@ -50,6 +69,7 @@ void EncodeUsageResource::SetTargetFrameRate(
void EncodeUsageResource::OnEncodeStarted(const VideoFrame& cropped_frame,
int64_t time_when_first_seen_us) {
+ RTC_DCHECK_RUN_ON(encoder_queue());
// TODO(hbos): Rename FrameCaptured() to something more appropriate (e.g.
// "OnEncodeStarted"?) or revise usage.
overuse_detector_->FrameCaptured(cropped_frame, time_when_first_seen_us);
@@ -60,6 +80,7 @@ void EncodeUsageResource::OnEncodeCompleted(
int64_t time_sent_in_us,
int64_t capture_time_us,
absl::optional<int> encode_duration_us) {
+ RTC_DCHECK_RUN_ON(encoder_queue());
// TODO(hbos): Rename FrameSent() to something more appropriate (e.g.
// "OnEncodeCompleted"?).
overuse_detector_->FrameSent(timestamp, time_sent_in_us, capture_time_us,
@@ -67,14 +88,29 @@ void EncodeUsageResource::OnEncodeCompleted(
}
void EncodeUsageResource::AdaptUp() {
- OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
+ RTC_DCHECK_RUN_ON(encoder_queue());
+ // Reference counting guarantees that this object is still alive by the time
+ // the task is executed.
+ MaybePostTaskToResourceAdaptationQueue(
+ [this_ref = rtc::scoped_refptr<EncodeUsageResource>(this)] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
+ this_ref->OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
+ });
}
void EncodeUsageResource::AdaptDown() {
- OnResourceUsageStateMeasured(ResourceUsageState::kOveruse);
+ RTC_DCHECK_RUN_ON(encoder_queue());
+ // Reference counting guarantees that this object is still alive by the time
+ // the task is executed.
+ MaybePostTaskToResourceAdaptationQueue(
+ [this_ref = rtc::scoped_refptr<EncodeUsageResource>(this)] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
+ this_ref->OnResourceUsageStateMeasured(ResourceUsageState::kOveruse);
+ });
}
int EncodeUsageResource::TargetFrameRateAsInt() {
+ RTC_DCHECK_RUN_ON(encoder_queue());
return target_frame_rate_.has_value()
? static_cast<int>(target_frame_rate_.value())
: std::numeric_limits<int>::max();
diff --git a/video/adaptation/encode_usage_resource.h b/video/adaptation/encode_usage_resource.h
index 7147569ffb..257988fa12 100644
--- a/video/adaptation/encode_usage_resource.h
+++ b/video/adaptation/encode_usage_resource.h
@@ -15,9 +15,12 @@
#include <string>
#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
#include "api/video/video_adaptation_reason.h"
-#include "call/adaptation/resource.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/task_queue.h"
#include "video/adaptation/overuse_frame_detector.h"
+#include "video/adaptation/video_stream_encoder_resource.h"
namespace webrtc {
@@ -26,13 +29,17 @@ namespace webrtc {
// indirectly by usage in the ResourceAdaptationProcessor (which is only tested
// because of its usage in VideoStreamEncoder); all tests are currently in
// video_stream_encoder_unittest.cc.
-class EncodeUsageResource : public Resource,
+class EncodeUsageResource : public VideoStreamEncoderResource,
public OveruseFrameDetectorObserverInterface {
public:
+ static rtc::scoped_refptr<EncodeUsageResource> Create(
+ std::unique_ptr<OveruseFrameDetector> overuse_detector);
+
explicit EncodeUsageResource(
std::unique_ptr<OveruseFrameDetector> overuse_detector);
+ ~EncodeUsageResource() override;
- bool is_started() const { return is_started_; }
+ bool is_started() const;
void StartCheckForOveruse(CpuOveruseOptions options);
void StopCheckForOveruse();
@@ -49,14 +56,13 @@ class EncodeUsageResource : public Resource,
void AdaptUp() override;
void AdaptDown() override;
- std::string name() const override { return "EncoderUsageResource"; }
-
private:
int TargetFrameRateAsInt();
- const std::unique_ptr<OveruseFrameDetector> overuse_detector_;
- bool is_started_;
- absl::optional<double> target_frame_rate_;
+ const std::unique_ptr<OveruseFrameDetector> overuse_detector_
+ RTC_GUARDED_BY(encoder_queue());
+ bool is_started_ RTC_GUARDED_BY(encoder_queue());
+ absl::optional<double> target_frame_rate_ RTC_GUARDED_BY(encoder_queue());
};
} // namespace webrtc
diff --git a/video/adaptation/overuse_frame_detector_unittest.cc b/video/adaptation/overuse_frame_detector_unittest.cc
index bb34224b02..d4bf910faa 100644
--- a/video/adaptation/overuse_frame_detector_unittest.cc
+++ b/video/adaptation/overuse_frame_detector_unittest.cc
@@ -41,8 +41,8 @@ class MockCpuOveruseObserver : public OveruseFrameDetectorObserverInterface {
MockCpuOveruseObserver() {}
virtual ~MockCpuOveruseObserver() {}
- MOCK_METHOD0(AdaptUp, void());
- MOCK_METHOD0(AdaptDown, void());
+ MOCK_METHOD(void, AdaptUp, (), (override));
+ MOCK_METHOD(void, AdaptDown, (), (override));
};
class CpuOveruseObserverImpl : public OveruseFrameDetectorObserverInterface {
diff --git a/video/adaptation/quality_rampup_experiment_helper.cc b/video/adaptation/quality_rampup_experiment_helper.cc
new file mode 100644
index 0000000000..6d82503fc6
--- /dev/null
+++ b/video/adaptation/quality_rampup_experiment_helper.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/adaptation/quality_rampup_experiment_helper.h"
+
+#include <memory>
+#include <utility>
+
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+QualityRampUpExperimentHelper::QualityRampUpExperimentHelper(
+ QualityRampUpExperimentListener* experiment_listener,
+ Clock* clock,
+ QualityRampupExperiment experiment)
+ : experiment_listener_(experiment_listener),
+ clock_(clock),
+ quality_rampup_experiment_(std::move(experiment)),
+ cpu_adapted_(false),
+ qp_resolution_adaptations_(0) {
+ RTC_DCHECK(experiment_listener_);
+ RTC_DCHECK(clock_);
+}
+
+std::unique_ptr<QualityRampUpExperimentHelper>
+QualityRampUpExperimentHelper::CreateIfEnabled(
+ QualityRampUpExperimentListener* experiment_listener,
+ Clock* clock) {
+ QualityRampupExperiment experiment = QualityRampupExperiment::ParseSettings();
+ if (experiment.Enabled()) {
+ return std::unique_ptr<QualityRampUpExperimentHelper>(
+ new QualityRampUpExperimentHelper(experiment_listener, clock,
+ experiment));
+ }
+ return nullptr;
+}
+
+void QualityRampUpExperimentHelper::PerformQualityRampupExperiment(
+ rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource,
+ DataRate bandwidth,
+ DataRate encoder_target_bitrate,
+ DataRate max_bitrate,
+ int pixels) {
+ if (!quality_scaler_resource->is_started())
+ return;
+
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ quality_rampup_experiment_.SetMaxBitrate(pixels, max_bitrate.kbps());
+
+ bool try_quality_rampup = false;
+ if (quality_rampup_experiment_.BwHigh(now_ms, bandwidth.kbps())) {
+ // Verify that encoder is at max bitrate and the QP is low.
+ if (encoder_target_bitrate == max_bitrate &&
+ quality_scaler_resource->QpFastFilterLow()) {
+ try_quality_rampup = true;
+ }
+ }
+ if (try_quality_rampup && qp_resolution_adaptations_ > 0 && !cpu_adapted_) {
+ experiment_listener_->OnQualityRampUp();
+ }
+}
+
+void QualityRampUpExperimentHelper::cpu_adapted(bool cpu_adapted) {
+ cpu_adapted_ = cpu_adapted;
+}
+
+void QualityRampUpExperimentHelper::qp_resolution_adaptations(
+ int qp_resolution_adaptations) {
+ qp_resolution_adaptations_ = qp_resolution_adaptations;
+}
+
+} // namespace webrtc
diff --git a/video/adaptation/quality_rampup_experiment_helper.h b/video/adaptation/quality_rampup_experiment_helper.h
new file mode 100644
index 0000000000..81be982e7c
--- /dev/null
+++ b/video/adaptation/quality_rampup_experiment_helper.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_
+#define VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/units/data_rate.h"
+#include "rtc_base/experiments/quality_rampup_experiment.h"
+#include "system_wrappers/include/clock.h"
+#include "video/adaptation/quality_scaler_resource.h"
+
+namespace webrtc {
+
+class QualityRampUpExperimentListener {
+ public:
+ virtual ~QualityRampUpExperimentListener() = default;
+ virtual void OnQualityRampUp() = 0;
+};
+
+// Helper class for orchestrating the WebRTC-Video-QualityRampupSettings
+// experiment.
+class QualityRampUpExperimentHelper {
+ public:
+ // Returns a QualityRampUpExperimentHelper if the experiment is enabled,
+ // an nullptr otherwise.
+ static std::unique_ptr<QualityRampUpExperimentHelper> CreateIfEnabled(
+ QualityRampUpExperimentListener* experiment_listener,
+ Clock* clock);
+
+ QualityRampUpExperimentHelper(const QualityRampUpExperimentHelper&) = delete;
+ QualityRampUpExperimentHelper& operator=(
+ const QualityRampUpExperimentHelper&) = delete;
+
+ void cpu_adapted(bool cpu_adapted);
+ void qp_resolution_adaptations(int qp_adaptations);
+
+ void PerformQualityRampupExperiment(
+ rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource,
+ DataRate bandwidth,
+ DataRate encoder_target_bitrate,
+ DataRate max_bitrate,
+ int pixels);
+
+ private:
+ QualityRampUpExperimentHelper(
+ QualityRampUpExperimentListener* experiment_listener,
+ Clock* clock,
+ QualityRampupExperiment experiment);
+ QualityRampUpExperimentListener* const experiment_listener_;
+ Clock* clock_;
+ QualityRampupExperiment quality_rampup_experiment_;
+ bool cpu_adapted_;
+ int qp_resolution_adaptations_;
+};
+
+} // namespace webrtc
+
+#endif // VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_
diff --git a/video/adaptation/quality_scaler_resource.cc b/video/adaptation/quality_scaler_resource.cc
index ca317e5a8c..c26c2e193d 100644
--- a/video/adaptation/quality_scaler_resource.cc
+++ b/video/adaptation/quality_scaler_resource.cc
@@ -13,57 +13,107 @@
#include <utility>
#include "rtc_base/experiments/balanced_degradation_settings.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/task_utils/to_queued_task.h"
+#include "rtc_base/time_utils.h"
namespace webrtc {
+namespace {
+
+const int64_t kUnderuseDueToDisabledCooldownMs = 1000;
+
+} // namespace
+
+// static
+rtc::scoped_refptr<QualityScalerResource> QualityScalerResource::Create(
+ DegradationPreferenceProvider* degradation_preference_provider) {
+ return new rtc::RefCountedObject<QualityScalerResource>(
+ degradation_preference_provider);
+}
+
QualityScalerResource::QualityScalerResource(
- ResourceAdaptationProcessorInterface* adaptation_processor)
- : adaptation_processor_(adaptation_processor),
+ DegradationPreferenceProvider* degradation_preference_provider)
+ : VideoStreamEncoderResource("QualityScalerResource"),
quality_scaler_(nullptr),
- pending_qp_usage_callback_(nullptr) {}
+ last_underuse_due_to_disabled_timestamp_ms_(absl::nullopt),
+ num_handled_callbacks_(0),
+ pending_callbacks_(),
+ degradation_preference_provider_(degradation_preference_provider),
+ clear_qp_samples_(false) {
+ RTC_CHECK(degradation_preference_provider_);
+}
+
+QualityScalerResource::~QualityScalerResource() {
+ RTC_DCHECK(!quality_scaler_);
+ RTC_DCHECK(pending_callbacks_.empty());
+}
bool QualityScalerResource::is_started() const {
+ RTC_DCHECK_RUN_ON(encoder_queue());
return quality_scaler_.get();
}
void QualityScalerResource::StartCheckForOveruse(
VideoEncoder::QpThresholds qp_thresholds) {
+ RTC_DCHECK_RUN_ON(encoder_queue());
RTC_DCHECK(!is_started());
quality_scaler_ =
std::make_unique<QualityScaler>(this, std::move(qp_thresholds));
}
void QualityScalerResource::StopCheckForOveruse() {
+ RTC_DCHECK_RUN_ON(encoder_queue());
+ // Ensure we have no pending callbacks. This makes it safe to destroy the
+ // QualityScaler and even task queues with tasks in-flight.
+ AbortPendingCallbacks();
quality_scaler_.reset();
}
void QualityScalerResource::SetQpThresholds(
VideoEncoder::QpThresholds qp_thresholds) {
+ RTC_DCHECK_RUN_ON(encoder_queue());
RTC_DCHECK(is_started());
quality_scaler_->SetQpThresholds(std::move(qp_thresholds));
}
bool QualityScalerResource::QpFastFilterLow() {
+ RTC_DCHECK_RUN_ON(encoder_queue());
RTC_DCHECK(is_started());
return quality_scaler_->QpFastFilterLow();
}
void QualityScalerResource::OnEncodeCompleted(const EncodedImage& encoded_image,
int64_t time_sent_in_us) {
+ RTC_DCHECK_RUN_ON(encoder_queue());
if (quality_scaler_ && encoded_image.qp_ >= 0) {
quality_scaler_->ReportQp(encoded_image.qp_, time_sent_in_us);
} else if (!quality_scaler_) {
+ // Reference counting guarantees that this object is still alive by the time
+ // the task is executed.
// TODO(webrtc:11553): this is a workaround to ensure that all quality
// scaler imposed limitations are removed once qualty scaler is disabled
// mid call.
// Instead it should be done at a higher layer in the same way for all
// resources.
- OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
+ int64_t timestamp_ms = rtc::TimeMillis();
+ if (!last_underuse_due_to_disabled_timestamp_ms_.has_value() ||
+ timestamp_ms - last_underuse_due_to_disabled_timestamp_ms_.value() >=
+ kUnderuseDueToDisabledCooldownMs) {
+ last_underuse_due_to_disabled_timestamp_ms_ = timestamp_ms;
+ MaybePostTaskToResourceAdaptationQueue(
+ [this_ref = rtc::scoped_refptr<QualityScalerResource>(this)] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
+ this_ref->OnResourceUsageStateMeasured(
+ ResourceUsageState::kUnderuse);
+ });
+ }
}
}
void QualityScalerResource::OnFrameDropped(
EncodedImageCallback::DropReason reason) {
+ RTC_DCHECK_RUN_ON(encoder_queue());
if (!quality_scaler_)
return;
switch (reason) {
@@ -78,36 +128,49 @@ void QualityScalerResource::OnFrameDropped(
void QualityScalerResource::OnReportQpUsageHigh(
rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface> callback) {
- RTC_DCHECK(!pending_qp_usage_callback_);
- pending_qp_usage_callback_ = std::move(callback);
- // If this triggers adaptation, OnAdaptationApplied() is called by the
- // processor where we determine if QP should be cleared and we invoke and null
- // the |pending_qp_usage_callback_|.
- OnResourceUsageStateMeasured(ResourceUsageState::kOveruse);
- // If |pending_qp_usage_callback_| has not been nulled yet then we did not
- // just trigger an adaptation and should not clear the QP samples.
- if (pending_qp_usage_callback_) {
- pending_qp_usage_callback_->OnQpUsageHandled(false);
- pending_qp_usage_callback_ = nullptr;
- }
+ RTC_DCHECK_RUN_ON(encoder_queue());
+ size_t callback_id = QueuePendingCallback(callback);
+ // Reference counting guarantees that this object is still alive by the time
+ // the task is executed.
+ MaybePostTaskToResourceAdaptationQueue(
+ [this_ref = rtc::scoped_refptr<QualityScalerResource>(this),
+ callback_id] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
+ this_ref->clear_qp_samples_ = false;
+ // If this OnResourceUsageStateMeasured() triggers an adaptation,
+ // OnAdaptationApplied() will occur between this line and the next. This
+ // allows modifying |clear_qp_samples_| based on the adaptation.
+ this_ref->OnResourceUsageStateMeasured(ResourceUsageState::kOveruse);
+ this_ref->HandlePendingCallback(callback_id,
+ this_ref->clear_qp_samples_);
+ });
}
void QualityScalerResource::OnReportQpUsageLow(
rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface> callback) {
- RTC_DCHECK(!pending_qp_usage_callback_);
- OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
- callback->OnQpUsageHandled(true);
+ RTC_DCHECK_RUN_ON(encoder_queue());
+ size_t callback_id = QueuePendingCallback(callback);
+ // Reference counting guarantees that this object is still alive by the time
+ // the task is executed.
+ MaybePostTaskToResourceAdaptationQueue(
+ [this_ref = rtc::scoped_refptr<QualityScalerResource>(this),
+ callback_id] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
+ this_ref->OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
+ this_ref->HandlePendingCallback(callback_id, true);
+ });
}
void QualityScalerResource::OnAdaptationApplied(
const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) {
+ rtc::scoped_refptr<Resource> reason_resource) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue());
// We only clear QP samples on adaptations triggered by the QualityScaler.
- if (!pending_qp_usage_callback_)
+ if (reason_resource != this)
return;
- bool clear_qp_samples = true;
+ clear_qp_samples_ = true;
// If we're in "balanced" and the frame rate before and after adaptation did
// not differ that much, don't clear the QP samples and instead check for QP
// again in a short amount of time. This may trigger adapting down again soon.
@@ -119,7 +182,7 @@ void QualityScalerResource::OnAdaptationApplied(
// interval whose delay is calculated based on events such as these. Now there
// is much dependency on a specific OnReportQpUsageHigh() event and "balanced"
// but adaptations happening might not align with QualityScaler's CheckQpTask.
- if (adaptation_processor_->effective_degradation_preference() ==
+ if (degradation_preference_provider_->degradation_preference() ==
DegradationPreference::BALANCED &&
DidDecreaseFrameRate(restrictions_before, restrictions_after)) {
absl::optional<int> min_diff = BalancedDegradationSettings().MinFpsDiff(
@@ -128,12 +191,50 @@ void QualityScalerResource::OnAdaptationApplied(
int fps_diff = input_state.frames_per_second() -
restrictions_after.max_frame_rate().value();
if (fps_diff < min_diff.value()) {
- clear_qp_samples = false;
+ clear_qp_samples_ = false;
}
}
}
- pending_qp_usage_callback_->OnQpUsageHandled(clear_qp_samples);
- pending_qp_usage_callback_ = nullptr;
+}
+
+size_t QualityScalerResource::QueuePendingCallback(
+ rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface> callback) {
+ RTC_DCHECK_RUN_ON(encoder_queue());
+ pending_callbacks_.push(callback);
+ // The ID of a callback is its sequence number (1, 2, 3...).
+ return num_handled_callbacks_ + pending_callbacks_.size();
+}
+
+void QualityScalerResource::HandlePendingCallback(size_t callback_id,
+ bool clear_qp_samples) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue());
+ // Reference counting guarantees that this object is still alive by the time
+ // the task is executed.
+ encoder_queue()->PostTask(
+ ToQueuedTask([this_ref = rtc::scoped_refptr<QualityScalerResource>(this),
+ callback_id, clear_qp_samples] {
+ RTC_DCHECK_RUN_ON(this_ref->encoder_queue());
+ if (this_ref->num_handled_callbacks_ >= callback_id) {
+ // The callback with this ID has already been handled.
+ // This happens if AbortPendingCallbacks() is called while the task is
+ // in flight.
+ return;
+ }
+ RTC_DCHECK(!this_ref->pending_callbacks_.empty());
+ this_ref->pending_callbacks_.front()->OnQpUsageHandled(
+ clear_qp_samples);
+ ++this_ref->num_handled_callbacks_;
+ this_ref->pending_callbacks_.pop();
+ }));
+}
+
+void QualityScalerResource::AbortPendingCallbacks() {
+ RTC_DCHECK_RUN_ON(encoder_queue());
+ while (!pending_callbacks_.empty()) {
+ pending_callbacks_.front()->OnQpUsageHandled(false);
+ ++num_handled_callbacks_;
+ pending_callbacks_.pop();
+ }
}
} // namespace webrtc
diff --git a/video/adaptation/quality_scaler_resource.h b/video/adaptation/quality_scaler_resource.h
index 30796c3db9..286413132a 100644
--- a/video/adaptation/quality_scaler_resource.h
+++ b/video/adaptation/quality_scaler_resource.h
@@ -12,26 +12,34 @@
#define VIDEO_ADAPTATION_QUALITY_SCALER_RESOURCE_H_
#include <memory>
+#include <queue>
#include <string>
+#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
#include "api/video/video_adaptation_reason.h"
#include "api/video_codecs/video_encoder.h"
-#include "call/adaptation/resource.h"
+#include "call/adaptation/adaptation_listener.h"
+#include "call/adaptation/degradation_preference_provider.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
#include "modules/video_coding/utility/quality_scaler.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/task_queue.h"
+#include "video/adaptation/video_stream_encoder_resource.h"
namespace webrtc {
// Handles interaction with the QualityScaler.
-// TODO(hbos): Add unittests specific to this class, it is currently only tested
-// indirectly by usage in the ResourceAdaptationProcessor (which is only tested
-// because of its usage in VideoStreamEncoder); all tests are currently in
-// video_stream_encoder_unittest.cc.
-class QualityScalerResource : public Resource,
+class QualityScalerResource : public VideoStreamEncoderResource,
+ public AdaptationListener,
public QualityScalerQpUsageHandlerInterface {
public:
+ static rtc::scoped_refptr<QualityScalerResource> Create(
+ DegradationPreferenceProvider* degradation_preference_provider);
+
explicit QualityScalerResource(
- ResourceAdaptationProcessorInterface* adaptation_processor);
+ DegradationPreferenceProvider* degradation_preference_provider);
+ ~QualityScalerResource() override;
bool is_started() const;
@@ -52,19 +60,40 @@ class QualityScalerResource : public Resource,
rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface> callback)
override;
- std::string name() const override { return "QualityScalerResource"; }
-
- // Resource implementation.
- void OnAdaptationApplied(const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) override;
+ // AdaptationListener implementation.
+ void OnAdaptationApplied(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) override;
private:
- ResourceAdaptationProcessorInterface* const adaptation_processor_;
- std::unique_ptr<QualityScaler> quality_scaler_;
- rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface>
- pending_qp_usage_callback_;
+ size_t QueuePendingCallback(
+ rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface>
+ callback);
+ void HandlePendingCallback(size_t callback_id, bool clear_qp_samples);
+ void AbortPendingCallbacks();
+
+ // Members accessed on the encoder queue.
+ std::unique_ptr<QualityScaler> quality_scaler_
+ RTC_GUARDED_BY(encoder_queue());
+ // The timestamp of the last time we reported underuse because this resource
+ // was disabled in order to prevent getting stuck with QP adaptations. Used to
+ // make sure underuse reporting is not too spammy.
+ absl::optional<int64_t> last_underuse_due_to_disabled_timestamp_ms_
+ RTC_GUARDED_BY(encoder_queue());
+ // Every OnReportQpUsageHigh/Low() operation has a callback that MUST be
+ // invoked on the encoder_queue(). Because usage measurements are reported on
+ // the encoder_queue() but handled by the processor on the the
+ // resource_adaptation_queue_(), handling a measurement entails a task queue
+ // "ping" round-trip. Multiple callbacks in-flight is thus possible.
+ size_t num_handled_callbacks_ RTC_GUARDED_BY(encoder_queue());
+ std::queue<rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface>>
+ pending_callbacks_ RTC_GUARDED_BY(encoder_queue());
+ DegradationPreferenceProvider* const degradation_preference_provider_;
+
+ // Members accessed on the adaptation queue.
+ bool clear_qp_samples_ RTC_GUARDED_BY(resource_adaptation_queue());
};
} // namespace webrtc
diff --git a/video/adaptation/quality_scaler_resource_unittest.cc b/video/adaptation/quality_scaler_resource_unittest.cc
new file mode 100644
index 0000000000..0dca7327fd
--- /dev/null
+++ b/video/adaptation/quality_scaler_resource_unittest.cc
@@ -0,0 +1,170 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/adaptation/quality_scaler_resource.h"
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/video_codecs/video_encoder.h"
+#include "rtc_base/event.h"
+#include "rtc_base/task_queue.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+
+const int kDefaultTimeout = 5000;
+
+class FakeQualityScalerQpUsageHandlerCallback
+ : public QualityScalerQpUsageHandlerCallbackInterface {
+ public:
+ explicit FakeQualityScalerQpUsageHandlerCallback(
+ rtc::TaskQueue* encoder_queue)
+ : QualityScalerQpUsageHandlerCallbackInterface(),
+ encoder_queue_(encoder_queue),
+ is_handled_(false),
+ qp_usage_handled_event_(true /* manual_reset */, false),
+ clear_qp_samples_result_(absl::nullopt) {}
+ ~FakeQualityScalerQpUsageHandlerCallback() override {
+ RTC_DCHECK(is_handled_)
+ << "The callback was destroyed without being invoked.";
+ }
+
+ void OnQpUsageHandled(bool clear_qp_samples) override {
+ ASSERT_TRUE(encoder_queue_->IsCurrent());
+ RTC_DCHECK(!is_handled_);
+ clear_qp_samples_result_ = clear_qp_samples;
+ is_handled_ = true;
+ qp_usage_handled_event_.Set();
+ }
+
+ bool is_handled() const { return is_handled_; }
+ rtc::Event* qp_usage_handled_event() { return &qp_usage_handled_event_; }
+ absl::optional<bool> clear_qp_samples_result() const {
+ return clear_qp_samples_result_;
+ }
+
+ private:
+ rtc::TaskQueue* const encoder_queue_;
+ bool is_handled_;
+ rtc::Event qp_usage_handled_event_;
+ absl::optional<bool> clear_qp_samples_result_;
+};
+
+class FakeDegradationPreferenceProvider : public DegradationPreferenceProvider {
+ public:
+ ~FakeDegradationPreferenceProvider() override {}
+
+ DegradationPreference degradation_preference() const override {
+ return DegradationPreference::MAINTAIN_FRAMERATE;
+ }
+};
+
+} // namespace
+
+class QualityScalerResourceTest : public ::testing::Test {
+ public:
+ QualityScalerResourceTest()
+ : task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ resource_adaptation_queue_(task_queue_factory_->CreateTaskQueue(
+ "ResourceAdaptationQueue",
+ TaskQueueFactory::Priority::NORMAL)),
+ encoder_queue_(task_queue_factory_->CreateTaskQueue(
+ "EncoderQueue",
+ TaskQueueFactory::Priority::NORMAL)),
+ degradation_preference_provider_(),
+ quality_scaler_resource_(
+ QualityScalerResource::Create(&degradation_preference_provider_)) {
+ quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_.Get());
+ quality_scaler_resource_->RegisterAdaptationTaskQueue(
+ resource_adaptation_queue_.Get());
+ rtc::Event event;
+ encoder_queue_.PostTask([this, &event] {
+ quality_scaler_resource_->StartCheckForOveruse(
+ VideoEncoder::QpThresholds());
+ event.Set();
+ });
+ event.Wait(kDefaultTimeout);
+ }
+
+ ~QualityScalerResourceTest() {
+ rtc::Event event;
+ encoder_queue_.PostTask([this, &event] {
+ quality_scaler_resource_->StopCheckForOveruse();
+ event.Set();
+ });
+ event.Wait(kDefaultTimeout);
+ }
+
+ protected:
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ rtc::TaskQueue resource_adaptation_queue_;
+ rtc::TaskQueue encoder_queue_;
+ FakeDegradationPreferenceProvider degradation_preference_provider_;
+ rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource_;
+};
+
+TEST_F(QualityScalerResourceTest, ReportQpHigh) {
+ rtc::scoped_refptr<FakeQualityScalerQpUsageHandlerCallback> callback =
+ new FakeQualityScalerQpUsageHandlerCallback(&encoder_queue_);
+ encoder_queue_.PostTask([this, callback] {
+ quality_scaler_resource_->OnReportQpUsageHigh(callback);
+ });
+ callback->qp_usage_handled_event()->Wait(kDefaultTimeout);
+}
+
+TEST_F(QualityScalerResourceTest, ReportQpLow) {
+ rtc::scoped_refptr<FakeQualityScalerQpUsageHandlerCallback> callback =
+ new FakeQualityScalerQpUsageHandlerCallback(&encoder_queue_);
+ encoder_queue_.PostTask([this, callback] {
+ quality_scaler_resource_->OnReportQpUsageLow(callback);
+ });
+ callback->qp_usage_handled_event()->Wait(kDefaultTimeout);
+}
+
+TEST_F(QualityScalerResourceTest, MultipleCallbacksInFlight) {
+ rtc::scoped_refptr<FakeQualityScalerQpUsageHandlerCallback> callback1 =
+ new FakeQualityScalerQpUsageHandlerCallback(&encoder_queue_);
+ rtc::scoped_refptr<FakeQualityScalerQpUsageHandlerCallback> callback2 =
+ new FakeQualityScalerQpUsageHandlerCallback(&encoder_queue_);
+ rtc::scoped_refptr<FakeQualityScalerQpUsageHandlerCallback> callback3 =
+ new FakeQualityScalerQpUsageHandlerCallback(&encoder_queue_);
+ encoder_queue_.PostTask([this, callback1, callback2, callback3] {
+ quality_scaler_resource_->OnReportQpUsageHigh(callback1);
+ quality_scaler_resource_->OnReportQpUsageLow(callback2);
+ quality_scaler_resource_->OnReportQpUsageHigh(callback3);
+ });
+ callback1->qp_usage_handled_event()->Wait(kDefaultTimeout);
+ callback2->qp_usage_handled_event()->Wait(kDefaultTimeout);
+ callback3->qp_usage_handled_event()->Wait(kDefaultTimeout);
+}
+
+TEST_F(QualityScalerResourceTest, AbortPendingCallbacksAndStartAgain) {
+ rtc::scoped_refptr<FakeQualityScalerQpUsageHandlerCallback> callback1 =
+ new FakeQualityScalerQpUsageHandlerCallback(&encoder_queue_);
+ rtc::scoped_refptr<FakeQualityScalerQpUsageHandlerCallback> callback2 =
+ new FakeQualityScalerQpUsageHandlerCallback(&encoder_queue_);
+ encoder_queue_.PostTask([this, callback1, callback2] {
+ quality_scaler_resource_->OnReportQpUsageHigh(callback1);
+ quality_scaler_resource_->StopCheckForOveruse();
+ EXPECT_TRUE(callback1->qp_usage_handled_event()->Wait(0));
+ quality_scaler_resource_->StartCheckForOveruse(
+ VideoEncoder::QpThresholds());
+ quality_scaler_resource_->OnReportQpUsageHigh(callback2);
+ });
+ callback1->qp_usage_handled_event()->Wait(kDefaultTimeout);
+ callback2->qp_usage_handled_event()->Wait(kDefaultTimeout);
+}
+
+} // namespace webrtc
diff --git a/video/adaptation/video_stream_encoder_resource.cc b/video/adaptation/video_stream_encoder_resource.cc
new file mode 100644
index 0000000000..df76df48ac
--- /dev/null
+++ b/video/adaptation/video_stream_encoder_resource.cc
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/adaptation/video_stream_encoder_resource.h"
+
+#include <algorithm>
+#include <utility>
+
+namespace webrtc {
+
+VideoStreamEncoderResource::VideoStreamEncoderResource(std::string name)
+ : lock_(),
+ name_(std::move(name)),
+ encoder_queue_(nullptr),
+ resource_adaptation_queue_(nullptr),
+ listener_(nullptr) {}
+
+VideoStreamEncoderResource::~VideoStreamEncoderResource() {
+ RTC_DCHECK(!listener_)
+ << "There is a listener depending on a VideoStreamEncoderResource being "
+ << "destroyed.";
+}
+
+void VideoStreamEncoderResource::RegisterEncoderTaskQueue(
+ TaskQueueBase* encoder_queue) {
+ RTC_DCHECK(!encoder_queue_);
+ RTC_DCHECK(encoder_queue);
+ encoder_queue_ = encoder_queue;
+}
+
+void VideoStreamEncoderResource::RegisterAdaptationTaskQueue(
+ TaskQueueBase* resource_adaptation_queue) {
+ MutexLock lock(&lock_);
+ RTC_DCHECK(!resource_adaptation_queue_);
+ RTC_DCHECK(resource_adaptation_queue);
+ resource_adaptation_queue_ = resource_adaptation_queue;
+}
+
+void VideoStreamEncoderResource::UnregisterAdaptationTaskQueue() {
+ MutexLock lock(&lock_);
+ RTC_DCHECK(resource_adaptation_queue_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ resource_adaptation_queue_ = nullptr;
+}
+
+void VideoStreamEncoderResource::SetResourceListener(
+ ResourceListener* listener) {
+ // If you want to change listener you need to unregister the old listener by
+ // setting it to null first.
+ MutexLock crit(&listener_lock_);
+ RTC_DCHECK(!listener_ || !listener) << "A listener is already set";
+ listener_ = listener;
+}
+
+std::string VideoStreamEncoderResource::Name() const {
+ return name_;
+}
+
+void VideoStreamEncoderResource::OnResourceUsageStateMeasured(
+ ResourceUsageState usage_state) {
+ MutexLock crit(&listener_lock_);
+ if (listener_) {
+ listener_->OnResourceUsageStateMeasured(this, usage_state);
+ }
+}
+
+TaskQueueBase* VideoStreamEncoderResource::encoder_queue() const {
+ return encoder_queue_;
+}
+
+TaskQueueBase* VideoStreamEncoderResource::resource_adaptation_queue() const {
+ MutexLock lock(&lock_);
+ RTC_DCHECK(resource_adaptation_queue_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ return resource_adaptation_queue_;
+}
+
+} // namespace webrtc
diff --git a/video/adaptation/video_stream_encoder_resource.h b/video/adaptation/video_stream_encoder_resource.h
new file mode 100644
index 0000000000..08994c135d
--- /dev/null
+++ b/video/adaptation/video_stream_encoder_resource.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_
+#define VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_
+
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
+#include "api/task_queue/task_queue_base.h"
+#include "call/adaptation/adaptation_constraint.h"
+#include "call/adaptation/adaptation_listener.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+
+namespace webrtc {
+
+class VideoStreamEncoderResource : public Resource {
+ public:
+ ~VideoStreamEncoderResource() override;
+
+ // Registering task queues must be performed as part of initialization.
+ void RegisterEncoderTaskQueue(TaskQueueBase* encoder_queue);
+
+ // Resource implementation.
+ std::string Name() const override;
+ void SetResourceListener(ResourceListener* listener) override;
+
+ // Provides a pointer to the adaptation task queue. After this call, all
+ // methods defined in this interface, including
+ // UnregisterAdaptationTaskQueue() MUST be invoked on the adaptation task
+ // queue. Registering the adaptation task queue may, however, happen off the
+ // adaptation task queue.
+ void RegisterAdaptationTaskQueue(TaskQueueBase* resource_adaptation_queue);
+ // Signals that the adaptation task queue is no longer safe to use. No
+ // assumptions must be made as to whether or not tasks in-flight will run.
+ void UnregisterAdaptationTaskQueue();
+
+ protected:
+ explicit VideoStreamEncoderResource(std::string name);
+
+ void OnResourceUsageStateMeasured(ResourceUsageState usage_state);
+
+ // The caller is responsible for ensuring the task queue is still valid.
+ TaskQueueBase* encoder_queue() const;
+ // Validity of returned pointer is ensured by only allowing this method to be
+ // called on the adaptation task queue. Designed for use with RTC_GUARDED_BY.
+ // For posting from a different queue, use
+ // MaybePostTaskToResourceAdaptationQueue() instead, which only posts if the
+ // task queue is currently registered.
+ TaskQueueBase* resource_adaptation_queue() const;
+ template <typename Closure>
+ void MaybePostTaskToResourceAdaptationQueue(Closure&& closure) {
+ MutexLock lock(&lock_);
+ if (!resource_adaptation_queue_)
+ return;
+ resource_adaptation_queue_->PostTask(ToQueuedTask(closure));
+ }
+
+ private:
+ mutable Mutex lock_;
+ const std::string name_;
+ // Treated as const after initialization.
+ TaskQueueBase* encoder_queue_;
+ TaskQueueBase* resource_adaptation_queue_ RTC_GUARDED_BY(lock_);
+ mutable Mutex listener_lock_;
+ ResourceListener* listener_ RTC_GUARDED_BY(listener_lock_);
+};
+
+} // namespace webrtc
+
+#endif // VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_
diff --git a/video/adaptation/video_stream_encoder_resource_manager.cc b/video/adaptation/video_stream_encoder_resource_manager.cc
index b8179d0f71..dc19e1c787 100644
--- a/video/adaptation/video_stream_encoder_resource_manager.cc
+++ b/video/adaptation/video_stream_encoder_resource_manager.cc
@@ -10,22 +10,21 @@
#include "video/adaptation/video_stream_encoder_resource_manager.h"
-#include <algorithm>
#include <cmath>
#include <limits>
#include <memory>
-#include <string>
#include <utility>
#include "absl/algorithm/container.h"
#include "absl/base/macros.h"
+#include "api/adaptation/resource.h"
#include "api/task_queue/task_queue_base.h"
#include "api/video/video_adaptation_reason.h"
#include "api/video/video_source_interface.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/video_source_restrictions.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/ref_counted_object.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/time_utils.h"
@@ -55,20 +54,12 @@ std::string ToString(VideoAdaptationReason reason) {
}
}
-VideoAdaptationReason OtherReason(VideoAdaptationReason reason) {
- switch (reason) {
- case VideoAdaptationReason::kQuality:
- return VideoAdaptationReason::kCpu;
- case VideoAdaptationReason::kCpu:
- return VideoAdaptationReason::kQuality;
- }
-}
-
} // namespace
class VideoStreamEncoderResourceManager::InitialFrameDropper {
public:
- explicit InitialFrameDropper(QualityScalerResource* quality_scaler_resource)
+ explicit InitialFrameDropper(
+ rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource)
: quality_scaler_resource_(quality_scaler_resource),
quality_scaler_settings_(QualityScalerSettings::ParseFromFieldTrials()),
has_seen_first_bwe_drop_(false),
@@ -128,7 +119,7 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper {
// achieve desired bitrate.
static const int kMaxInitialFramedrop = 4;
- const QualityScalerResource* quality_scaler_resource_;
+ const rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource_;
const QualityScalerSettings quality_scaler_settings_;
bool has_seen_first_bwe_drop_;
DataRate set_start_bitrate_;
@@ -137,56 +128,60 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper {
int initial_framedrop_;
};
-VideoStreamEncoderResourceManager::PreventAdaptUpDueToActiveCounts::
- PreventAdaptUpDueToActiveCounts(VideoStreamEncoderResourceManager* manager)
- : manager_(manager) {}
+VideoStreamEncoderResourceManager::BitrateConstraint::BitrateConstraint(
+ VideoStreamEncoderResourceManager* manager)
+ : manager_(manager),
+ resource_adaptation_queue_(nullptr),
+ encoder_settings_(absl::nullopt),
+ encoder_target_bitrate_bps_(absl::nullopt) {}
+
+void VideoStreamEncoderResourceManager::BitrateConstraint::SetAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) {
+ resource_adaptation_queue_ = resource_adaptation_queue;
+}
+
+void VideoStreamEncoderResourceManager::BitrateConstraint::
+ OnEncoderSettingsUpdated(absl::optional<EncoderSettings> encoder_settings) {
+ RTC_DCHECK_RUN_ON(manager_->encoder_queue_);
+ resource_adaptation_queue_->PostTask(
+ ToQueuedTask([this_ref = rtc::scoped_refptr<BitrateConstraint>(this),
+ encoder_settings] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue_);
+ this_ref->encoder_settings_ = std::move(encoder_settings);
+ }));
+}
+
+void VideoStreamEncoderResourceManager::BitrateConstraint::
+ OnEncoderTargetBitrateUpdated(
+ absl::optional<uint32_t> encoder_target_bitrate_bps) {
+ RTC_DCHECK_RUN_ON(manager_->encoder_queue_);
+ resource_adaptation_queue_->PostTask(
+ ToQueuedTask([this_ref = rtc::scoped_refptr<BitrateConstraint>(this),
+ encoder_target_bitrate_bps] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue_);
+ this_ref->encoder_target_bitrate_bps_ = encoder_target_bitrate_bps;
+ }));
+}
-bool VideoStreamEncoderResourceManager::PreventAdaptUpDueToActiveCounts::
+bool VideoStreamEncoderResourceManager::BitrateConstraint::
IsAdaptationUpAllowed(const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const {
- VideoAdaptationReason reason =
- manager_->GetReasonFromResource(reason_resource);
- // We can't adapt up if we're already at the highest setting.
- // Note that this only includes counts relevant to the current degradation
- // preference. e.g. we previously adapted resolution, now prefer adpating fps,
- // only count the fps adaptations and not the previous resolution adaptations.
- // TODO(hbos): Why would the reason matter? If a particular resource doesn't
- // want us to go up it should prevent us from doing so itself rather than to
- // have this catch-all reason- and stats-based approach.
- int num_downgrades = FilterVideoAdaptationCountersByDegradationPreference(
- manager_->active_counts_[reason],
- manager_->effective_degradation_preference_)
- .Total();
- RTC_DCHECK_GE(num_downgrades, 0);
- return num_downgrades > 0;
-}
-
-VideoStreamEncoderResourceManager::
- PreventIncreaseResolutionDueToBitrateResource::
- PreventIncreaseResolutionDueToBitrateResource(
- VideoStreamEncoderResourceManager* manager)
- : manager_(manager) {}
-
-bool VideoStreamEncoderResourceManager::
- PreventIncreaseResolutionDueToBitrateResource::IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const {
+ rtc::scoped_refptr<Resource> reason_resource) const {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
VideoAdaptationReason reason =
manager_->GetReasonFromResource(reason_resource);
// If increasing resolution due to kQuality, make sure bitrate limits are not
// violated.
- // TODO(hbos): Why are we allowing violating bitrate constraints if adapting
- // due to CPU? Shouldn't this condition be checked regardless of reason?
+ // TODO(https://crbug.com/webrtc/11771): Why are we allowing violating bitrate
+ // constraints if adapting due to CPU? Shouldn't this condition be checked
+ // regardless of reason?
if (reason == VideoAdaptationReason::kQuality &&
DidIncreaseResolution(restrictions_before, restrictions_after)) {
- uint32_t bitrate_bps = manager_->encoder_target_bitrate_bps_.value_or(0);
+ uint32_t bitrate_bps = encoder_target_bitrate_bps_.value_or(0);
absl::optional<VideoEncoder::ResolutionBitrateLimits> bitrate_limits =
- manager_->encoder_settings_.has_value()
- ? manager_->encoder_settings_->encoder_info()
+ encoder_settings_.has_value()
+ ? encoder_settings_->encoder_info()
.GetEncoderBitrateLimitsForResolution(
// Need some sort of expected resulting pixels to be used
// instead of unrestricted.
@@ -203,28 +198,54 @@ bool VideoStreamEncoderResourceManager::
return true;
}
-VideoStreamEncoderResourceManager::PreventAdaptUpInBalancedResource::
- PreventAdaptUpInBalancedResource(VideoStreamEncoderResourceManager* manager)
- : manager_(manager) {}
+VideoStreamEncoderResourceManager::BalancedConstraint::BalancedConstraint(
+ VideoStreamEncoderResourceManager* manager,
+ DegradationPreferenceProvider* degradation_preference_provider)
+ : manager_(manager),
+ resource_adaptation_queue_(nullptr),
+ encoder_target_bitrate_bps_(absl::nullopt),
+ degradation_preference_provider_(degradation_preference_provider) {
+ RTC_DCHECK(manager_);
+ RTC_DCHECK(degradation_preference_provider_);
+}
-bool VideoStreamEncoderResourceManager::PreventAdaptUpInBalancedResource::
+void VideoStreamEncoderResourceManager::BalancedConstraint::SetAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) {
+ resource_adaptation_queue_ = resource_adaptation_queue;
+}
+
+void VideoStreamEncoderResourceManager::BalancedConstraint::
+ OnEncoderTargetBitrateUpdated(
+ absl::optional<uint32_t> encoder_target_bitrate_bps) {
+ RTC_DCHECK_RUN_ON(manager_->encoder_queue_);
+ resource_adaptation_queue_->PostTask(
+ ToQueuedTask([this_ref = rtc::scoped_refptr<BalancedConstraint>(this),
+ encoder_target_bitrate_bps] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue_);
+ this_ref->encoder_target_bitrate_bps_ = encoder_target_bitrate_bps;
+ }));
+}
+
+bool VideoStreamEncoderResourceManager::BalancedConstraint::
IsAdaptationUpAllowed(const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const {
+ rtc::scoped_refptr<Resource> reason_resource) const {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
VideoAdaptationReason reason =
manager_->GetReasonFromResource(reason_resource);
// Don't adapt if BalancedDegradationSettings applies and determines this will
// exceed bitrate constraints.
- // TODO(hbos): Why are we allowing violating balanced settings if adapting due
- // CPU? Shouldn't this condition be checked regardless of reason?
+ // TODO(https://crbug.com/webrtc/11771): Why are we allowing violating
+ // balanced settings if adapting due CPU? Shouldn't this condition be checked
+ // regardless of reason?
if (reason == VideoAdaptationReason::kQuality &&
- manager_->effective_degradation_preference_ ==
+ degradation_preference_provider_->degradation_preference() ==
DegradationPreference::BALANCED &&
!manager_->balanced_settings_.CanAdaptUp(
input_state.video_codec_type(),
input_state.frame_size_pixels().value(),
- manager_->encoder_target_bitrate_bps_.value_or(0))) {
+ encoder_target_bitrate_bps_.value_or(0))) {
return false;
}
if (reason == VideoAdaptationReason::kQuality &&
@@ -232,7 +253,7 @@ bool VideoStreamEncoderResourceManager::PreventAdaptUpInBalancedResource::
!manager_->balanced_settings_.CanAdaptUpResolution(
input_state.video_codec_type(),
input_state.frame_size_pixels().value(),
- manager_->encoder_target_bitrate_bps_.value_or(0))) {
+ encoder_target_bitrate_bps_.value_or(0))) {
return false;
}
return true;
@@ -240,178 +261,265 @@ bool VideoStreamEncoderResourceManager::PreventAdaptUpInBalancedResource::
VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager(
VideoStreamInputStateProvider* input_state_provider,
- ResourceAdaptationProcessorInterface* adaptation_processor,
VideoStreamEncoderObserver* encoder_stats_observer,
Clock* clock,
bool experiment_cpu_load_estimator,
- std::unique_ptr<OveruseFrameDetector> overuse_detector)
- : prevent_adapt_up_due_to_active_counts_(this),
- prevent_increase_resolution_due_to_bitrate_resource_(this),
- prevent_adapt_up_in_balanced_resource_(this),
- encode_usage_resource_(std::move(overuse_detector)),
- quality_scaler_resource_(adaptation_processor),
+ std::unique_ptr<OveruseFrameDetector> overuse_detector,
+ DegradationPreferenceProvider* degradation_preference_provider)
+ : degradation_preference_provider_(degradation_preference_provider),
+ bitrate_constraint_(new rtc::RefCountedObject<BitrateConstraint>(this)),
+ balanced_constraint_(new rtc::RefCountedObject<BalancedConstraint>(
+ this,
+ degradation_preference_provider_)),
+ encode_usage_resource_(
+ EncodeUsageResource::Create(std::move(overuse_detector))),
+ quality_scaler_resource_(
+ QualityScalerResource::Create(degradation_preference_provider_)),
+ encoder_queue_(nullptr),
+ resource_adaptation_queue_(nullptr),
input_state_provider_(input_state_provider),
- adaptation_processor_(adaptation_processor),
+ adaptation_processor_(nullptr),
encoder_stats_observer_(encoder_stats_observer),
degradation_preference_(DegradationPreference::DISABLED),
- effective_degradation_preference_(DegradationPreference::DISABLED),
video_source_restrictions_(),
clock_(clock),
experiment_cpu_load_estimator_(experiment_cpu_load_estimator),
initial_frame_dropper_(
- std::make_unique<InitialFrameDropper>(&quality_scaler_resource_)),
+ std::make_unique<InitialFrameDropper>(quality_scaler_resource_)),
quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()),
encoder_target_bitrate_bps_(absl::nullopt),
- quality_rampup_done_(false),
- quality_rampup_experiment_(QualityRampupExperiment::ParseSettings()),
- encoder_settings_(absl::nullopt),
- active_counts_() {
- RTC_DCHECK(encoder_stats_observer_);
- MapResourceToReason(&prevent_adapt_up_due_to_active_counts_,
- VideoAdaptationReason::kQuality);
- MapResourceToReason(&prevent_increase_resolution_due_to_bitrate_resource_,
- VideoAdaptationReason::kQuality);
- MapResourceToReason(&prevent_adapt_up_in_balanced_resource_,
- VideoAdaptationReason::kQuality);
- MapResourceToReason(&encode_usage_resource_, VideoAdaptationReason::kCpu);
- MapResourceToReason(&quality_scaler_resource_,
+ quality_rampup_experiment_(
+ QualityRampUpExperimentHelper::CreateIfEnabled(this, clock_)),
+ encoder_settings_(absl::nullopt) {
+ RTC_CHECK(degradation_preference_provider_);
+ RTC_CHECK(encoder_stats_observer_);
+ MapResourceToReason(encode_usage_resource_, VideoAdaptationReason::kCpu);
+ MapResourceToReason(quality_scaler_resource_,
VideoAdaptationReason::kQuality);
}
-VideoStreamEncoderResourceManager::~VideoStreamEncoderResourceManager() {
- RTC_DCHECK(!encode_usage_resource_.is_started());
+VideoStreamEncoderResourceManager::~VideoStreamEncoderResourceManager() {}
+
+void VideoStreamEncoderResourceManager::Initialize(
+ rtc::TaskQueue* encoder_queue,
+ rtc::TaskQueue* resource_adaptation_queue) {
+ RTC_DCHECK(!encoder_queue_);
+ RTC_DCHECK(encoder_queue);
+ RTC_DCHECK(!resource_adaptation_queue_);
+ RTC_DCHECK(resource_adaptation_queue);
+ encoder_queue_ = encoder_queue;
+ resource_adaptation_queue_ = resource_adaptation_queue;
+ bitrate_constraint_->SetAdaptationQueue(resource_adaptation_queue_->Get());
+ balanced_constraint_->SetAdaptationQueue(resource_adaptation_queue_->Get());
+ encode_usage_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get());
+ encode_usage_resource_->RegisterAdaptationTaskQueue(
+ resource_adaptation_queue_->Get());
+ quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get());
+ quality_scaler_resource_->RegisterAdaptationTaskQueue(
+ resource_adaptation_queue_->Get());
+}
+
+void VideoStreamEncoderResourceManager::SetAdaptationProcessor(
+ ResourceAdaptationProcessorInterface* adaptation_processor,
+ VideoStreamAdapter* stream_adapter) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ adaptation_processor_ = adaptation_processor;
+ stream_adapter_ = stream_adapter;
}
void VideoStreamEncoderResourceManager::SetDegradationPreferences(
- DegradationPreference degradation_preference,
- DegradationPreference effective_degradation_preference) {
+ DegradationPreference degradation_preference) {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
degradation_preference_ = degradation_preference;
- effective_degradation_preference_ = effective_degradation_preference;
UpdateStatsAdaptationSettings();
}
+DegradationPreference
+VideoStreamEncoderResourceManager::degradation_preference() const {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ return degradation_preference_;
+}
+
void VideoStreamEncoderResourceManager::StartEncodeUsageResource() {
- RTC_DCHECK(!encode_usage_resource_.is_started());
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ RTC_DCHECK(!encode_usage_resource_->is_started());
RTC_DCHECK(encoder_settings_.has_value());
- encode_usage_resource_.StartCheckForOveruse(GetCpuOveruseOptions());
+ encode_usage_resource_->StartCheckForOveruse(GetCpuOveruseOptions());
}
void VideoStreamEncoderResourceManager::StopManagedResources() {
- encode_usage_resource_.StopCheckForOveruse();
- quality_scaler_resource_.StopCheckForOveruse();
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ encode_usage_resource_->StopCheckForOveruse();
+ quality_scaler_resource_->StopCheckForOveruse();
}
void VideoStreamEncoderResourceManager::MapResourceToReason(
- Resource* resource,
+ rtc::scoped_refptr<Resource> resource,
VideoAdaptationReason reason) {
+ MutexLock lock(&resource_lock_);
RTC_DCHECK(resource);
RTC_DCHECK(absl::c_find_if(resources_,
[resource](const ResourceAndReason& r) {
return r.resource == resource;
}) == resources_.end())
- << "Resource " << resource->name() << " already was inserted";
+ << "Resource " << resource->Name() << " already was inserted";
resources_.emplace_back(resource, reason);
}
-std::vector<Resource*> VideoStreamEncoderResourceManager::MappedResources()
- const {
- std::vector<Resource*> resources;
+std::vector<rtc::scoped_refptr<Resource>>
+VideoStreamEncoderResourceManager::MappedResources() const {
+ MutexLock lock(&resource_lock_);
+ std::vector<rtc::scoped_refptr<Resource>> resources;
for (auto const& resource_and_reason : resources_) {
resources.push_back(resource_and_reason.resource);
}
return resources;
}
-QualityScalerResource*
+std::vector<AdaptationConstraint*>
+VideoStreamEncoderResourceManager::AdaptationConstraints() const {
+ return {bitrate_constraint_, balanced_constraint_};
+}
+
+std::vector<AdaptationListener*>
+VideoStreamEncoderResourceManager::AdaptationListeners() const {
+ return {quality_scaler_resource_};
+}
+
+rtc::scoped_refptr<QualityScalerResource>
VideoStreamEncoderResourceManager::quality_scaler_resource_for_testing() {
- return &quality_scaler_resource_;
+ MutexLock lock(&resource_lock_);
+ return quality_scaler_resource_;
}
void VideoStreamEncoderResourceManager::SetEncoderSettings(
EncoderSettings encoder_settings) {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
encoder_settings_ = std::move(encoder_settings);
-
- quality_rampup_experiment_.SetMaxBitrate(
- LastInputFrameSizeOrDefault(),
- encoder_settings_->video_codec().maxBitrate);
+ bitrate_constraint_->OnEncoderSettingsUpdated(encoder_settings_);
MaybeUpdateTargetFrameRate();
}
void VideoStreamEncoderResourceManager::SetStartBitrate(
DataRate start_bitrate) {
- if (!start_bitrate.IsZero())
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ if (!start_bitrate.IsZero()) {
encoder_target_bitrate_bps_ = start_bitrate.bps();
+ bitrate_constraint_->OnEncoderTargetBitrateUpdated(
+ encoder_target_bitrate_bps_);
+ balanced_constraint_->OnEncoderTargetBitrateUpdated(
+ encoder_target_bitrate_bps_);
+ }
initial_frame_dropper_->SetStartBitrate(start_bitrate,
clock_->TimeInMicroseconds());
}
void VideoStreamEncoderResourceManager::SetTargetBitrate(
DataRate target_bitrate) {
- if (!target_bitrate.IsZero())
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ if (!target_bitrate.IsZero()) {
encoder_target_bitrate_bps_ = target_bitrate.bps();
+ bitrate_constraint_->OnEncoderTargetBitrateUpdated(
+ encoder_target_bitrate_bps_);
+ balanced_constraint_->OnEncoderTargetBitrateUpdated(
+ encoder_target_bitrate_bps_);
+ }
initial_frame_dropper_->SetTargetBitrate(target_bitrate,
clock_->TimeInMilliseconds());
}
void VideoStreamEncoderResourceManager::SetEncoderRates(
const VideoEncoder::RateControlParameters& encoder_rates) {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
encoder_rates_ = encoder_rates;
}
void VideoStreamEncoderResourceManager::OnFrameDroppedDueToSize() {
- adaptation_processor_->TriggerAdaptationDueToFrameDroppedDueToSize(
- quality_scaler_resource_);
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ // The VideoStreamEncoder makes the manager outlive the adaptation queue. This
+ // means that if the task gets executed, |this| has not been freed yet.
+ // TODO(https://crbug.com/webrtc/11565): When the manager no longer outlives
+ // the adaptation queue, add logic to prevent use-after-free on |this|.
+ resource_adaptation_queue_->PostTask([this] {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ if (!adaptation_processor_) {
+ // The processor nulled before this task had a chance to execute. This
+ // happens if the processor is destroyed. No action needed.
+ return;
+ }
+ Adaptation reduce_resolution = stream_adapter_->GetAdaptDownResolution();
+ if (reduce_resolution.status() == Adaptation::Status::kValid) {
+ stream_adapter_->ApplyAdaptation(reduce_resolution,
+ quality_scaler_resource_);
+ }
+ });
initial_frame_dropper_->OnFrameDroppedDueToSize();
}
void VideoStreamEncoderResourceManager::OnEncodeStarted(
const VideoFrame& cropped_frame,
int64_t time_when_first_seen_us) {
- encode_usage_resource_.OnEncodeStarted(cropped_frame,
- time_when_first_seen_us);
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ encode_usage_resource_->OnEncodeStarted(cropped_frame,
+ time_when_first_seen_us);
}
void VideoStreamEncoderResourceManager::OnEncodeCompleted(
const EncodedImage& encoded_image,
int64_t time_sent_in_us,
absl::optional<int> encode_duration_us) {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
// Inform |encode_usage_resource_| of the encode completed event.
uint32_t timestamp = encoded_image.Timestamp();
int64_t capture_time_us =
encoded_image.capture_time_ms_ * rtc::kNumMicrosecsPerMillisec;
- encode_usage_resource_.OnEncodeCompleted(timestamp, time_sent_in_us,
- capture_time_us, encode_duration_us);
+ encode_usage_resource_->OnEncodeCompleted(
+ timestamp, time_sent_in_us, capture_time_us, encode_duration_us);
// Inform |quality_scaler_resource_| of the encode completed event.
- quality_scaler_resource_.OnEncodeCompleted(encoded_image, time_sent_in_us);
+ quality_scaler_resource_->OnEncodeCompleted(encoded_image, time_sent_in_us);
}
void VideoStreamEncoderResourceManager::OnFrameDropped(
EncodedImageCallback::DropReason reason) {
- quality_scaler_resource_.OnFrameDropped(reason);
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ quality_scaler_resource_->OnFrameDropped(reason);
}
bool VideoStreamEncoderResourceManager::DropInitialFrames() const {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
return initial_frame_dropper_->DropInitialFrames();
}
void VideoStreamEncoderResourceManager::OnMaybeEncodeFrame() {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
initial_frame_dropper_->OnMaybeEncodeFrame();
- MaybePerformQualityRampupExperiment();
+ if (quality_rampup_experiment_) {
+ DataRate bandwidth = encoder_rates_.has_value()
+ ? encoder_rates_->bandwidth_allocation
+ : DataRate::Zero();
+ quality_rampup_experiment_->PerformQualityRampupExperiment(
+ quality_scaler_resource_, bandwidth,
+ DataRate::BitsPerSec(encoder_target_bitrate_bps_.value_or(0)),
+ DataRate::KilobitsPerSec(encoder_settings_->video_codec().maxBitrate),
+ LastInputFrameSizeOrDefault());
+ }
}
void VideoStreamEncoderResourceManager::UpdateQualityScalerSettings(
absl::optional<VideoEncoder::QpThresholds> qp_thresholds) {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
if (qp_thresholds.has_value()) {
- quality_scaler_resource_.StopCheckForOveruse();
- quality_scaler_resource_.StartCheckForOveruse(qp_thresholds.value());
+ quality_scaler_resource_->StopCheckForOveruse();
+ quality_scaler_resource_->StartCheckForOveruse(qp_thresholds.value());
} else {
- quality_scaler_resource_.StopCheckForOveruse();
+ quality_scaler_resource_->StopCheckForOveruse();
}
initial_frame_dropper_->OnQualityScalerSettingsUpdated();
}
void VideoStreamEncoderResourceManager::ConfigureQualityScaler(
const VideoEncoder::EncoderInfo& encoder_info) {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
const auto scaling_settings = encoder_info.scaling_settings;
const bool quality_scaling_allowed =
IsResolutionScalingEnabled(degradation_preference_) &&
@@ -420,7 +528,7 @@ void VideoStreamEncoderResourceManager::ConfigureQualityScaler(
// TODO(https://crbug.com/webrtc/11222): Should this move to
// QualityScalerResource?
if (quality_scaling_allowed) {
- if (!quality_scaler_resource_.is_started()) {
+ if (!quality_scaler_resource_->is_started()) {
// Quality scaler has not already been configured.
// Use experimental thresholds if available.
@@ -439,26 +547,27 @@ void VideoStreamEncoderResourceManager::ConfigureQualityScaler(
// Set the qp-thresholds to the balanced settings if balanced mode.
if (degradation_preference_ == DegradationPreference::BALANCED &&
- quality_scaler_resource_.is_started()) {
+ quality_scaler_resource_->is_started()) {
absl::optional<VideoEncoder::QpThresholds> thresholds =
balanced_settings_.GetQpThresholds(
GetVideoCodecTypeOrGeneric(encoder_settings_),
LastInputFrameSizeOrDefault());
if (thresholds) {
- quality_scaler_resource_.SetQpThresholds(*thresholds);
+ quality_scaler_resource_->SetQpThresholds(*thresholds);
}
}
UpdateStatsAdaptationSettings();
}
VideoAdaptationReason VideoStreamEncoderResourceManager::GetReasonFromResource(
- const Resource& resource) const {
+ rtc::scoped_refptr<Resource> resource) const {
+ MutexLock lock(&resource_lock_);
const auto& registered_resource =
absl::c_find_if(resources_, [&resource](const ResourceAndReason& r) {
- return r.resource == &resource;
+ return r.resource == resource;
});
RTC_DCHECK(registered_resource != resources_.end())
- << resource.name() << " not found.";
+ << resource->Name() << " not found.";
return registered_resource->reason;
}
@@ -468,6 +577,7 @@ VideoAdaptationReason VideoStreamEncoderResourceManager::GetReasonFromResource(
// remotely cope with the load right now.
CpuOveruseOptions VideoStreamEncoderResourceManager::GetCpuOveruseOptions()
const {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
// This is already ensured by the only caller of this method:
// StartResourceAdaptation().
RTC_DCHECK(encoder_settings_.has_value());
@@ -485,6 +595,7 @@ CpuOveruseOptions VideoStreamEncoderResourceManager::GetCpuOveruseOptions()
}
int VideoStreamEncoderResourceManager::LastInputFrameSizeOrDefault() const {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
return input_state_provider_->InputState().frame_size_pixels().value_or(
kDefaultInputPixelsWidth * kDefaultInputPixelsHeight);
}
@@ -492,35 +603,70 @@ int VideoStreamEncoderResourceManager::LastInputFrameSizeOrDefault() const {
void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated(
VideoSourceRestrictions restrictions,
const VideoAdaptationCounters& adaptation_counters,
- const Resource* reason) {
- video_source_restrictions_ = restrictions;
- VideoAdaptationCounters previous_adaptation_counters =
- active_counts_[VideoAdaptationReason::kQuality] +
- active_counts_[VideoAdaptationReason::kCpu];
- int adaptation_counters_total_abs_diff = std::abs(
- adaptation_counters.Total() - previous_adaptation_counters.Total());
- if (reason) {
- // A resource signal triggered this adaptation. The adaptation counters have
- // to be updated every time the adaptation counter is incremented or
- // decremented due to a resource.
- RTC_DCHECK_EQ(adaptation_counters_total_abs_diff, 1);
- VideoAdaptationReason reason_type = GetReasonFromResource(*reason);
- UpdateAdaptationStats(adaptation_counters, reason_type);
- } else if (adaptation_counters.Total() == 0) {
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ // TODO(bugs.webrtc.org/11553) Remove reason parameter and add reset callback.
+ if (!reason && adaptation_counters.Total() == 0) {
// Adaptation was manually reset - clear the per-reason counters too.
- ResetActiveCounts();
encoder_stats_observer_->ClearAdaptationStats();
- } else {
- // If a reason did not increase or decrease the Total() by 1 and the
- // restrictions were not just reset, the adaptation counters MUST not have
- // been modified and there is nothing to do stats-wise.
- RTC_DCHECK_EQ(adaptation_counters_total_abs_diff, 0);
}
- RTC_LOG(LS_INFO) << ActiveCountsToString();
- MaybeUpdateTargetFrameRate();
+
+ // The VideoStreamEncoder makes the manager outlive the encoder queue. This
+ // means that if the task gets executed, |this| has not been freed yet.
+ encoder_queue_->PostTask([this, restrictions] {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ video_source_restrictions_ = FilterRestrictionsByDegradationPreference(
+ restrictions, degradation_preference_);
+ MaybeUpdateTargetFrameRate();
+ });
+}
+
+void VideoStreamEncoderResourceManager::OnResourceLimitationChanged(
+ rtc::scoped_refptr<Resource> resource,
+ const std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters>&
+ resource_limitations) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ if (!resource) {
+ encoder_stats_observer_->ClearAdaptationStats();
+ return;
+ }
+
+ std::map<VideoAdaptationReason, VideoAdaptationCounters> limitations;
+ for (auto& resource_counter : resource_limitations) {
+ std::map<VideoAdaptationReason, VideoAdaptationCounters>::iterator it;
+ bool inserted;
+ std::tie(it, inserted) = limitations.emplace(
+ GetReasonFromResource(resource_counter.first), resource_counter.second);
+ if (!inserted && it->second.Total() < resource_counter.second.Total()) {
+ it->second = resource_counter.second;
+ }
+ }
+
+ VideoAdaptationReason adaptation_reason = GetReasonFromResource(resource);
+ encoder_stats_observer_->OnAdaptationChanged(
+ adaptation_reason, limitations[VideoAdaptationReason::kCpu],
+ limitations[VideoAdaptationReason::kQuality]);
+
+ encoder_queue_->PostTask(ToQueuedTask(
+ [cpu_limited = limitations.at(VideoAdaptationReason::kCpu).Total() > 0,
+ qp_resolution_adaptations =
+ limitations.at(VideoAdaptationReason::kQuality)
+ .resolution_adaptations,
+ this]() {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ if (quality_rampup_experiment_) {
+ quality_rampup_experiment_->cpu_adapted(cpu_limited);
+ quality_rampup_experiment_->qp_resolution_adaptations(
+ qp_resolution_adaptations);
+ }
+ }));
+
+ RTC_LOG(LS_INFO) << ActiveCountsToString(limitations);
}
void VideoStreamEncoderResourceManager::MaybeUpdateTargetFrameRate() {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
absl::optional<double> codec_max_frame_rate =
encoder_settings_.has_value()
? absl::optional<double>(
@@ -537,152 +683,36 @@ void VideoStreamEncoderResourceManager::MaybeUpdateTargetFrameRate() {
codec_max_frame_rate.value() < target_frame_rate.value())) {
target_frame_rate = codec_max_frame_rate;
}
- encode_usage_resource_.SetTargetFrameRate(target_frame_rate);
-}
-
-void VideoStreamEncoderResourceManager::OnAdaptationCountChanged(
- const VideoAdaptationCounters& adaptation_count,
- VideoAdaptationCounters* active_count,
- VideoAdaptationCounters* other_active) {
- RTC_DCHECK(active_count);
- RTC_DCHECK(other_active);
- const int active_total = active_count->Total();
- const int other_total = other_active->Total();
- const VideoAdaptationCounters prev_total = *active_count + *other_active;
- const int delta_resolution_adaptations =
- adaptation_count.resolution_adaptations -
- prev_total.resolution_adaptations;
- const int delta_fps_adaptations =
- adaptation_count.fps_adaptations - prev_total.fps_adaptations;
-
- RTC_DCHECK_EQ(
- std::abs(delta_resolution_adaptations) + std::abs(delta_fps_adaptations),
- 1)
- << "Adaptation took more than one step!";
-
- if (delta_resolution_adaptations > 0) {
- ++active_count->resolution_adaptations;
- } else if (delta_resolution_adaptations < 0) {
- if (active_count->resolution_adaptations == 0) {
- RTC_DCHECK_GT(active_count->fps_adaptations, 0) << "No downgrades left";
- RTC_DCHECK_GT(other_active->resolution_adaptations, 0)
- << "No resolution adaptation to borrow from";
- // Lend an fps adaptation to other and take one resolution adaptation.
- --active_count->fps_adaptations;
- ++other_active->fps_adaptations;
- --other_active->resolution_adaptations;
- } else {
- --active_count->resolution_adaptations;
- }
- }
- if (delta_fps_adaptations > 0) {
- ++active_count->fps_adaptations;
- } else if (delta_fps_adaptations < 0) {
- if (active_count->fps_adaptations == 0) {
- RTC_DCHECK_GT(active_count->resolution_adaptations, 0)
- << "No downgrades left";
- RTC_DCHECK_GT(other_active->fps_adaptations, 0)
- << "No fps adaptation to borrow from";
- // Lend a resolution adaptation to other and take one fps adaptation.
- --active_count->resolution_adaptations;
- ++other_active->resolution_adaptations;
- --other_active->fps_adaptations;
- } else {
- --active_count->fps_adaptations;
- }
- }
-
- RTC_DCHECK(*active_count + *other_active == adaptation_count);
- RTC_DCHECK_EQ(other_active->Total(), other_total);
- RTC_DCHECK_EQ(
- active_count->Total(),
- active_total + delta_resolution_adaptations + delta_fps_adaptations);
- RTC_DCHECK_GE(active_count->resolution_adaptations, 0);
- RTC_DCHECK_GE(active_count->fps_adaptations, 0);
- RTC_DCHECK_GE(other_active->resolution_adaptations, 0);
- RTC_DCHECK_GE(other_active->fps_adaptations, 0);
-}
-
-void VideoStreamEncoderResourceManager::UpdateAdaptationStats(
- const VideoAdaptationCounters& total_counts,
- VideoAdaptationReason reason) {
- // Update active counts
- VideoAdaptationCounters& active_count = active_counts_[reason];
- VideoAdaptationCounters& other_active = active_counts_[OtherReason(reason)];
-
- OnAdaptationCountChanged(total_counts, &active_count, &other_active);
-
- encoder_stats_observer_->OnAdaptationChanged(
- reason, active_counts_[VideoAdaptationReason::kCpu],
- active_counts_[VideoAdaptationReason::kQuality]);
+ encode_usage_resource_->SetTargetFrameRate(target_frame_rate);
}
void VideoStreamEncoderResourceManager::UpdateStatsAdaptationSettings() const {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
VideoStreamEncoderObserver::AdaptationSettings cpu_settings(
IsResolutionScalingEnabled(degradation_preference_),
IsFramerateScalingEnabled(degradation_preference_));
VideoStreamEncoderObserver::AdaptationSettings quality_settings =
- quality_scaler_resource_.is_started()
+ quality_scaler_resource_->is_started()
? cpu_settings
: VideoStreamEncoderObserver::AdaptationSettings();
encoder_stats_observer_->UpdateAdaptationSettings(cpu_settings,
quality_settings);
}
-void VideoStreamEncoderResourceManager::MaybePerformQualityRampupExperiment() {
- if (!quality_scaler_resource_.is_started())
- return;
-
- if (quality_rampup_done_)
- return;
-
- int64_t now_ms = clock_->TimeInMilliseconds();
- uint32_t bw_kbps = encoder_rates_.has_value()
- ? encoder_rates_.value().bandwidth_allocation.kbps()
- : 0;
-
- bool try_quality_rampup = false;
- if (quality_rampup_experiment_.BwHigh(now_ms, bw_kbps)) {
- // Verify that encoder is at max bitrate and the QP is low.
- if (encoder_settings_ &&
- encoder_target_bitrate_bps_.value_or(0) ==
- encoder_settings_->video_codec().maxBitrate * 1000 &&
- quality_scaler_resource_.QpFastFilterLow()) {
- try_quality_rampup = true;
- }
- }
- // TODO(https://crbug.com/webrtc/11392): See if we can rely on the total
- // counts or the stats, and not the active counts.
- const VideoAdaptationCounters& qp_counts =
- active_counts_[VideoAdaptationReason::kQuality];
- const VideoAdaptationCounters& cpu_counts =
- active_counts_[VideoAdaptationReason::kCpu];
- if (try_quality_rampup && qp_counts.resolution_adaptations > 0 &&
- cpu_counts.Total() == 0) {
- RTC_LOG(LS_INFO) << "Reset quality limitations.";
- adaptation_processor_->ResetVideoSourceRestrictions();
- quality_rampup_done_ = true;
- }
-}
-
-void VideoStreamEncoderResourceManager::ResetActiveCounts() {
- active_counts_.clear();
- active_counts_[VideoAdaptationReason::kCpu] = VideoAdaptationCounters();
- active_counts_[VideoAdaptationReason::kQuality] = VideoAdaptationCounters();
-}
-
-std::string VideoStreamEncoderResourceManager::ActiveCountsToString() const {
- RTC_DCHECK_EQ(2, active_counts_.size());
+// static
+std::string VideoStreamEncoderResourceManager::ActiveCountsToString(
+ const std::map<VideoAdaptationReason, VideoAdaptationCounters>&
+ active_counts) {
rtc::StringBuilder ss;
ss << "Downgrade counts: fps: {";
- for (auto& reason_count : active_counts_) {
+ for (auto& reason_count : active_counts) {
ss << ToString(reason_count.first) << ":";
ss << reason_count.second.fps_adaptations;
}
ss << "}, resolution {";
- for (auto& reason_count : active_counts_) {
+ for (auto& reason_count : active_counts) {
ss << ToString(reason_count.first) << ":";
ss << reason_count.second.resolution_adaptations;
}
@@ -690,4 +720,22 @@ std::string VideoStreamEncoderResourceManager::ActiveCountsToString() const {
return ss.Release();
}
+
+void VideoStreamEncoderResourceManager::OnQualityRampUp() {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ // The VideoStreamEncoder makes the manager outlive the adaptation queue.
+ // This means that if the task gets executed, |this| has not been freed yet.
+ // TODO(https://crbug.com/webrtc/11565): When the manager no longer outlives
+ // the adaptation queue, add logic to prevent use-after-free on |this|.
+ resource_adaptation_queue_->PostTask([this] {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ if (!stream_adapter_) {
+ // The processor nulled before this task had a chance to execute. This
+ // happens if the processor is destroyed. No action needed.
+ return;
+ }
+ stream_adapter_->ClearRestrictions();
+ });
+ quality_rampup_experiment_.reset();
+}
} // namespace webrtc
diff --git a/video/adaptation/video_stream_encoder_resource_manager.h b/video/adaptation/video_stream_encoder_resource_manager.h
index d0e5455529..742d87bf1c 100644
--- a/video/adaptation/video_stream_encoder_resource_manager.h
+++ b/video/adaptation/video_stream_encoder_resource_manager.h
@@ -11,6 +11,7 @@
#ifndef VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_MANAGER_H_
#define VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_MANAGER_H_
+#include <atomic>
#include <map>
#include <memory>
#include <string>
@@ -19,7 +20,10 @@
#include <vector>
#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
#include "api/rtp_parameters.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/task_queue_base.h"
#include "api/video/video_adaptation_counters.h"
#include "api/video/video_adaptation_reason.h"
#include "api/video/video_frame.h"
@@ -28,17 +32,20 @@
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_config.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
#include "call/adaptation/video_stream_adapter.h"
#include "call/adaptation/video_stream_input_state_provider.h"
-#include "rtc_base/experiments/quality_rampup_experiment.h"
#include "rtc_base/experiments/quality_scaler_settings.h"
+#include "rtc_base/ref_count.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue.h"
#include "system_wrappers/include/clock.h"
#include "video/adaptation/encode_usage_resource.h"
#include "video/adaptation/overuse_frame_detector.h"
+#include "video/adaptation/quality_rampup_experiment_helper.h"
#include "video/adaptation/quality_scaler_resource.h"
+#include "video/adaptation/video_stream_encoder_resource.h"
namespace webrtc {
@@ -57,20 +64,30 @@ extern const int kDefaultInputPixelsHeight;
// The manager is also involved with various mitigations not part of the
// ResourceAdaptationProcessor code such as the inital frame dropping.
class VideoStreamEncoderResourceManager
- : public ResourceAdaptationProcessorListener {
+ : public VideoSourceRestrictionsListener,
+ public ResourceLimitationsListener,
+ public QualityRampUpExperimentListener {
public:
VideoStreamEncoderResourceManager(
VideoStreamInputStateProvider* input_state_provider,
- ResourceAdaptationProcessorInterface* adaptation_processor,
VideoStreamEncoderObserver* encoder_stats_observer,
Clock* clock,
bool experiment_cpu_load_estimator,
- std::unique_ptr<OveruseFrameDetector> overuse_detector);
+ std::unique_ptr<OveruseFrameDetector> overuse_detector,
+ DegradationPreferenceProvider* degradation_preference_provider);
~VideoStreamEncoderResourceManager() override;
- void SetDegradationPreferences(
- DegradationPreference degradation_preference,
- DegradationPreference effective_degradation_preference);
+ void Initialize(rtc::TaskQueue* encoder_queue,
+ rtc::TaskQueue* resource_adaptation_queue);
+ void SetAdaptationProcessor(
+ ResourceAdaptationProcessorInterface* adaptation_processor,
+ VideoStreamAdapter* stream_adapter);
+
+ // TODO(https://crbug.com/webrtc/11563): The degradation preference is a
+ // setting of the Processor, it does not belong to the Manager - can we get
+ // rid of this?
+ void SetDegradationPreferences(DegradationPreference degradation_preference);
+ DegradationPreference degradation_preference() const;
// Starts the encode usage resource. The quality scaler resource is
// automatically started on being configured.
@@ -99,48 +116,42 @@ class VideoStreamEncoderResourceManager
void OnFrameDropped(EncodedImageCallback::DropReason reason);
// Resources need to be mapped to an AdaptReason (kCpu or kQuality) in order
- // to be able to update |active_counts_|, which is used...
- // - Legacy getStats() purposes.
- // - Preventing adapting up in some circumstances (which may be questionable).
- // TODO(hbos): Can we get rid of this?
- void MapResourceToReason(Resource* resource, VideoAdaptationReason reason);
- std::vector<Resource*> MappedResources() const;
- QualityScalerResource* quality_scaler_resource_for_testing();
+ // to update legacy getStats().
+ void MapResourceToReason(rtc::scoped_refptr<Resource> resource,
+ VideoAdaptationReason reason);
+ std::vector<rtc::scoped_refptr<Resource>> MappedResources() const;
+ std::vector<AdaptationConstraint*> AdaptationConstraints() const;
+ std::vector<AdaptationListener*> AdaptationListeners() const;
+ rtc::scoped_refptr<QualityScalerResource>
+ quality_scaler_resource_for_testing();
// If true, the VideoStreamEncoder should eexecute its logic to maybe drop
// frames baseed on size and bitrate.
bool DropInitialFrames() const;
- // ResourceAdaptationProcessorListener implementation.
- // Updates |video_source_restrictions_| and |active_counts_|.
+ // VideoSourceRestrictionsListener implementation.
+ // Updates |video_source_restrictions_|.
void OnVideoSourceRestrictionsUpdated(
VideoSourceRestrictions restrictions,
const VideoAdaptationCounters& adaptation_counters,
- const Resource* reason) override;
-
- // For reasons of adaptation and statistics, we not only count the total
- // number of adaptations, but we also count the number of adaptations per
- // reason.
- // This method takes the new total number of adaptations and allocates that to
- // the "active" count - number of adaptations for the current reason.
- // The "other" count is the number of adaptations for the other reason.
- // This must be called for each adaptation step made.
- static void OnAdaptationCountChanged(
- const VideoAdaptationCounters& adaptation_count,
- VideoAdaptationCounters* active_count,
- VideoAdaptationCounters* other_active);
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) override;
+ void OnResourceLimitationChanged(
+ rtc::scoped_refptr<Resource> resource,
+ const std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters>&
+ resource_limitations) override;
+
+ // QualityRampUpExperimentListener implementation.
+ void OnQualityRampUp() override;
private:
class InitialFrameDropper;
- VideoAdaptationReason GetReasonFromResource(const Resource& resource) const;
+ VideoAdaptationReason GetReasonFromResource(
+ rtc::scoped_refptr<Resource> resource) const;
CpuOveruseOptions GetCpuOveruseOptions() const;
int LastInputFrameSizeOrDefault() const;
- // Makes |video_source_restrictions_| up-to-date and informs the
- // |adaptation_listener_| if restrictions are changed, allowing the listener
- // to reconfigure the source accordingly.
- void MaybeUpdateVideoSourceRestrictions(const Resource* reason_resource);
// Calculates an up-to-date value of the target frame rate and informs the
// |encode_usage_resource_| of the new value.
void MaybeUpdateTargetFrameRate();
@@ -149,129 +160,125 @@ class VideoStreamEncoderResourceManager
void UpdateQualityScalerSettings(
absl::optional<VideoEncoder::QpThresholds> qp_thresholds);
- void UpdateAdaptationStats(const VideoAdaptationCounters& total_counts,
- VideoAdaptationReason reason);
void UpdateStatsAdaptationSettings() const;
- // Checks to see if we should execute the quality rampup experiment. The
- // experiment resets all video restrictions at the start of the call in the
- // case the bandwidth estimate is high enough.
- // TODO(https://crbug.com/webrtc/11222) Move experiment details into an inner
- // class.
- void MaybePerformQualityRampupExperiment();
-
- void ResetActiveCounts();
- std::string ActiveCountsToString() const;
-
- // TODO(hbos): Consider moving all of the manager's resources into separate
- // files for testability.
-
- // Does not trigger adaptations, only prevents adapting up based on
- // |active_counts_|.
- class PreventAdaptUpDueToActiveCounts final : public Resource {
- public:
- explicit PreventAdaptUpDueToActiveCounts(
- VideoStreamEncoderResourceManager* manager);
- ~PreventAdaptUpDueToActiveCounts() override = default;
-
- std::string name() const override {
- return "PreventAdaptUpDueToActiveCounts";
- }
-
- bool IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const override;
-
- private:
- VideoStreamEncoderResourceManager* manager_;
- } prevent_adapt_up_due_to_active_counts_;
+ static std::string ActiveCountsToString(
+ const std::map<VideoAdaptationReason, VideoAdaptationCounters>&
+ active_counts);
+ // TODO(hbos): Add tests for manager's constraints.
// Does not trigger adaptations, only prevents adapting up resolution.
- class PreventIncreaseResolutionDueToBitrateResource final : public Resource {
+ class BitrateConstraint : public rtc::RefCountInterface,
+ public AdaptationConstraint {
public:
- explicit PreventIncreaseResolutionDueToBitrateResource(
- VideoStreamEncoderResourceManager* manager);
- ~PreventIncreaseResolutionDueToBitrateResource() override = default;
+ explicit BitrateConstraint(VideoStreamEncoderResourceManager* manager);
+ ~BitrateConstraint() override = default;
- std::string name() const override {
- return "PreventIncreaseResolutionDueToBitrateResource";
- }
+ void SetAdaptationQueue(TaskQueueBase* resource_adaptation_queue);
+ void OnEncoderSettingsUpdated(
+ absl::optional<EncoderSettings> encoder_settings);
+ void OnEncoderTargetBitrateUpdated(
+ absl::optional<uint32_t> encoder_target_bitrate_bps);
+ // AdaptationConstraint implementation.
+ std::string Name() const override { return "BitrateConstraint"; }
bool IsAdaptationUpAllowed(
const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const override;
+ rtc::scoped_refptr<Resource> reason_resource) const override;
private:
- VideoStreamEncoderResourceManager* manager_;
- } prevent_increase_resolution_due_to_bitrate_resource_;
+ // The |manager_| must be alive as long as this resource is added to the
+ // ResourceAdaptationProcessor, i.e. when IsAdaptationUpAllowed() is called.
+ VideoStreamEncoderResourceManager* const manager_;
+ TaskQueueBase* resource_adaptation_queue_;
+ absl::optional<EncoderSettings> encoder_settings_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ absl::optional<uint32_t> encoder_target_bitrate_bps_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ };
// Does not trigger adaptations, only prevents adapting up in BALANCED.
- class PreventAdaptUpInBalancedResource final : public Resource {
+ class BalancedConstraint : public rtc::RefCountInterface,
+ public AdaptationConstraint {
public:
- explicit PreventAdaptUpInBalancedResource(
- VideoStreamEncoderResourceManager* manager);
- ~PreventAdaptUpInBalancedResource() override = default;
+ BalancedConstraint(
+ VideoStreamEncoderResourceManager* manager,
+ DegradationPreferenceProvider* degradation_preference_provider);
+ ~BalancedConstraint() override = default;
- std::string name() const override {
- return "PreventAdaptUpInBalancedResource";
- }
+ void SetAdaptationQueue(TaskQueueBase* resource_adaptation_queue);
+ void OnEncoderTargetBitrateUpdated(
+ absl::optional<uint32_t> encoder_target_bitrate_bps);
+ // AdaptationConstraint implementation.
+ std::string Name() const override { return "BalancedConstraint"; }
bool IsAdaptationUpAllowed(
const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
- const Resource& reason_resource) const override;
+ rtc::scoped_refptr<Resource> reason_resource) const override;
private:
- VideoStreamEncoderResourceManager* manager_;
- } prevent_adapt_up_in_balanced_resource_;
-
- EncodeUsageResource encode_usage_resource_;
- QualityScalerResource quality_scaler_resource_;
+ // The |manager_| must be alive as long as this resource is added to the
+ // ResourceAdaptationProcessor, i.e. when IsAdaptationUpAllowed() is called.
+ VideoStreamEncoderResourceManager* const manager_;
+ TaskQueueBase* resource_adaptation_queue_;
+ absl::optional<uint32_t> encoder_target_bitrate_bps_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ DegradationPreferenceProvider* degradation_preference_provider_;
+ };
- VideoStreamInputStateProvider* const input_state_provider_;
- ResourceAdaptationProcessorInterface* const adaptation_processor_;
+ DegradationPreferenceProvider* const degradation_preference_provider_;
+ const rtc::scoped_refptr<BitrateConstraint> bitrate_constraint_;
+ const rtc::scoped_refptr<BalancedConstraint> balanced_constraint_;
+ const rtc::scoped_refptr<EncodeUsageResource> encode_usage_resource_;
+ const rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource_;
+
+ rtc::TaskQueue* encoder_queue_;
+ rtc::TaskQueue* resource_adaptation_queue_;
+ VideoStreamInputStateProvider* const input_state_provider_
+ RTC_GUARDED_BY(encoder_queue_);
+ ResourceAdaptationProcessorInterface* adaptation_processor_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ VideoStreamAdapter* stream_adapter_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ // Thread-safe.
VideoStreamEncoderObserver* const encoder_stats_observer_;
- DegradationPreference degradation_preference_;
- DegradationPreference effective_degradation_preference_;
- VideoSourceRestrictions video_source_restrictions_;
+ DegradationPreference degradation_preference_ RTC_GUARDED_BY(encoder_queue_);
+ VideoSourceRestrictions video_source_restrictions_
+ RTC_GUARDED_BY(encoder_queue_);
const BalancedDegradationSettings balanced_settings_;
- Clock* clock_;
- const bool experiment_cpu_load_estimator_;
- const std::unique_ptr<InitialFrameDropper> initial_frame_dropper_;
- const bool quality_scaling_experiment_enabled_;
- absl::optional<uint32_t> encoder_target_bitrate_bps_;
- absl::optional<VideoEncoder::RateControlParameters> encoder_rates_;
- bool quality_rampup_done_;
- QualityRampupExperiment quality_rampup_experiment_;
- absl::optional<EncoderSettings> encoder_settings_;
+ Clock* clock_ RTC_GUARDED_BY(encoder_queue_);
+ const bool experiment_cpu_load_estimator_ RTC_GUARDED_BY(encoder_queue_);
+ const std::unique_ptr<InitialFrameDropper> initial_frame_dropper_
+ RTC_GUARDED_BY(encoder_queue_);
+ const bool quality_scaling_experiment_enabled_ RTC_GUARDED_BY(encoder_queue_);
+ absl::optional<uint32_t> encoder_target_bitrate_bps_
+ RTC_GUARDED_BY(encoder_queue_);
+ absl::optional<VideoEncoder::RateControlParameters> encoder_rates_
+ RTC_GUARDED_BY(encoder_queue_);
+ std::unique_ptr<QualityRampUpExperimentHelper> quality_rampup_experiment_
+ RTC_GUARDED_BY(encoder_queue_);
+ absl::optional<EncoderSettings> encoder_settings_
+ RTC_GUARDED_BY(encoder_queue_);
// Ties a resource to a reason for statistical reporting. This AdaptReason is
// also used by this module to make decisions about how to adapt up/down.
struct ResourceAndReason {
- ResourceAndReason(Resource* resource, VideoAdaptationReason reason)
+ ResourceAndReason(rtc::scoped_refptr<Resource> resource,
+ VideoAdaptationReason reason)
: resource(resource), reason(reason) {}
virtual ~ResourceAndReason() = default;
- Resource* const resource;
+ const rtc::scoped_refptr<Resource> resource;
const VideoAdaptationReason reason;
};
- std::vector<ResourceAndReason> resources_;
- // One AdaptationCounter for each reason, tracking the number of times we have
- // adapted for each reason. The sum of active_counts_ MUST always equal the
- // total adaptation provided by the VideoSourceRestrictions.
- // TODO(https://crbug.com/webrtc/11392): Move all active count logic to
- // encoder_stats_observer_; Counters used for deciding if the video resolution
- // or framerate is currently restricted, and if so, why, on a per degradation
- // preference basis.
- std::unordered_map<VideoAdaptationReason, VideoAdaptationCounters>
- active_counts_;
+ mutable Mutex resource_lock_;
+ std::vector<ResourceAndReason> resources_ RTC_GUARDED_BY(&resource_lock_);
};
} // namespace webrtc
diff --git a/video/adaptation/video_stream_encoder_resource_manager_unittest.cc b/video/adaptation/video_stream_encoder_resource_manager_unittest.cc
deleted file mode 100644
index 38ebba6334..0000000000
--- a/video/adaptation/video_stream_encoder_resource_manager_unittest.cc
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "video/adaptation/video_stream_encoder_resource_manager.h"
-
-#include "api/video/video_adaptation_counters.h"
-#include "test/gmock.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-
-TEST(VideoStreamEncoderResourceManagerTest, FirstAdaptationDown_Fps) {
- VideoAdaptationCounters cpu;
- VideoAdaptationCounters qp;
- VideoAdaptationCounters total(0, 1);
-
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
- VideoAdaptationCounters expected_cpu(0, 1);
- VideoAdaptationCounters expected_qp;
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, FirstAdaptationDown_Resolution) {
- VideoAdaptationCounters cpu;
- VideoAdaptationCounters qp;
- VideoAdaptationCounters total(1, 0);
-
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
- VideoAdaptationCounters expected_cpu(1, 0);
- VideoAdaptationCounters expected_qp;
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, LastAdaptUp_Fps) {
- VideoAdaptationCounters cpu(0, 1);
- VideoAdaptationCounters qp;
- VideoAdaptationCounters total;
-
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
- VideoAdaptationCounters expected_cpu;
- VideoAdaptationCounters expected_qp;
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, LastAdaptUp_Resolution) {
- VideoAdaptationCounters cpu(1, 0);
- VideoAdaptationCounters qp;
- VideoAdaptationCounters total;
-
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
- VideoAdaptationCounters expected_cpu;
- VideoAdaptationCounters expected_qp;
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, AdaptUpWithBorrow_Resolution) {
- VideoAdaptationCounters cpu(0, 1);
- VideoAdaptationCounters qp(1, 0);
- VideoAdaptationCounters total(0, 1);
-
- // CPU adaptation for resolution, but no resolution adaptation left from CPU.
- // We then borrow the resolution adaptation from qp, and give qp the fps
- // adaptation from CPU.
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
-
- VideoAdaptationCounters expected_cpu(0, 0);
- VideoAdaptationCounters expected_qp(0, 1);
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, AdaptUpWithBorrow_Fps) {
- VideoAdaptationCounters cpu(1, 0);
- VideoAdaptationCounters qp(0, 1);
- VideoAdaptationCounters total(1, 0);
-
- // CPU adaptation for fps, but no fps adaptation left from CPU. We then borrow
- // the fps adaptation from qp, and give qp the resolution adaptation from CPU.
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
-
- VideoAdaptationCounters expected_cpu(0, 0);
- VideoAdaptationCounters expected_qp(1, 0);
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-} // namespace webrtc
diff --git a/video/call_stats.cc b/video/call_stats.cc
index 27e00ee7ca..d575e114d8 100644
--- a/video/call_stats.cc
+++ b/video/call_stats.cc
@@ -129,7 +129,7 @@ void CallStats::Process() {
max_rtt_ms_ = GetMaxRttMs(reports_);
avg_rtt_ms = GetNewAvgRttMs(reports_, avg_rtt_ms);
{
- rtc::CritScope lock(&avg_rtt_ms_lock_);
+ MutexLock lock(&avg_rtt_ms_lock_);
avg_rtt_ms_ = avg_rtt_ms;
}
@@ -178,7 +178,7 @@ int64_t CallStats::LastProcessedRtt() const {
// allow only reading this from the process thread (or TQ once we get there)
// so that the lock isn't necessary.
- rtc::CritScope cs(&avg_rtt_ms_lock_);
+ MutexLock lock(&avg_rtt_ms_lock_);
return avg_rtt_ms_;
}
diff --git a/video/call_stats.h b/video/call_stats.h
index 80030012b6..3bfb632446 100644
--- a/video/call_stats.h
+++ b/video/call_stats.h
@@ -18,7 +18,7 @@
#include "modules/include/module_common_types.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_checker.h"
#include "system_wrappers/include/clock.h"
@@ -90,7 +90,7 @@ class CallStats : public Module, public RtcpRttStats {
int64_t avg_rtt_ms_;
// Protects |avg_rtt_ms_|.
- rtc::CriticalSection avg_rtt_ms_lock_;
+ mutable Mutex avg_rtt_ms_lock_;
// |sum_avg_rtt_ms_|, |num_avg_rtt_| and |time_of_first_rtt_ms_| are only used
// on the ProcessThread when running. When the Process Thread is not running,
diff --git a/video/call_stats2.cc b/video/call_stats2.cc
index af0da0f702..faf08d69bc 100644
--- a/video/call_stats2.cc
+++ b/video/call_stats2.cc
@@ -12,6 +12,7 @@
#include <algorithm>
#include <memory>
+#include <utility>
#include "absl/algorithm/container.h"
#include "modules/utility/include/process_thread.h"
@@ -64,9 +65,10 @@ int64_t GetNewAvgRttMs(const std::list<CallStats::RttTime>& reports,
} // namespace
+constexpr TimeDelta CallStats::kUpdateInterval;
+
CallStats::CallStats(Clock* clock, TaskQueueBase* task_queue)
: clock_(clock),
- last_process_time_(clock_->TimeInMilliseconds()),
max_rtt_ms_(-1),
avg_rtt_ms_(-1),
sum_avg_rtt_ms_(0),
@@ -75,56 +77,36 @@ CallStats::CallStats(Clock* clock, TaskQueueBase* task_queue)
task_queue_(task_queue) {
RTC_DCHECK(task_queue_);
process_thread_checker_.Detach();
- task_queue_->PostDelayedTask(
- ToQueuedTask(task_safety_flag_, [this]() { RunTimer(); }),
- kUpdateIntervalMs);
+ repeating_task_ =
+ RepeatingTaskHandle::DelayedStart(task_queue_, kUpdateInterval, [this]() {
+ UpdateAndReport();
+ return kUpdateInterval;
+ });
}
CallStats::~CallStats() {
RTC_DCHECK_RUN_ON(&construction_thread_checker_);
RTC_DCHECK(observers_.empty());
- task_safety_flag_->SetNotAlive();
+ repeating_task_.Stop();
UpdateHistograms();
}
-void CallStats::RunTimer() {
- RTC_DCHECK_RUN_ON(&construction_thread_checker_);
-
- UpdateAndReport();
-
- uint32_t interval =
- last_process_time_ + kUpdateIntervalMs - clock_->TimeInMilliseconds();
-
- task_queue_->PostDelayedTask(
- ToQueuedTask(task_safety_flag_, [this]() { RunTimer(); }), interval);
-}
-
void CallStats::UpdateAndReport() {
RTC_DCHECK_RUN_ON(&construction_thread_checker_);
- int64_t now = clock_->TimeInMilliseconds();
- last_process_time_ = now;
-
- // |avg_rtt_ms_| is allowed to be read on the construction thread since that's
- // the only thread that modifies the value.
- int64_t avg_rtt_ms = avg_rtt_ms_;
- RemoveOldReports(now, &reports_);
+ RemoveOldReports(clock_->CurrentTime().ms(), &reports_);
max_rtt_ms_ = GetMaxRttMs(reports_);
- avg_rtt_ms = GetNewAvgRttMs(reports_, avg_rtt_ms);
- {
- rtc::CritScope lock(&avg_rtt_ms_lock_);
- avg_rtt_ms_ = avg_rtt_ms;
- }
+ avg_rtt_ms_ = GetNewAvgRttMs(reports_, avg_rtt_ms_);
// If there is a valid rtt, update all observers with the max rtt.
if (max_rtt_ms_ >= 0) {
- RTC_DCHECK_GE(avg_rtt_ms, 0);
+ RTC_DCHECK_GE(avg_rtt_ms_, 0);
for (CallStatsObserver* observer : observers_)
- observer->OnRttUpdate(avg_rtt_ms, max_rtt_ms_);
+ observer->OnRttUpdate(avg_rtt_ms_, max_rtt_ms_);
// Sum for Histogram of average RTT reported over the entire call.
- sum_avg_rtt_ms_ += avg_rtt_ms;
+ sum_avg_rtt_ms_ += avg_rtt_ms_;
++num_avg_rtt_;
}
}
@@ -146,23 +128,24 @@ int64_t CallStats::LastProcessedRtt() const {
return avg_rtt_ms_;
}
-int64_t CallStats::LastProcessedRttFromProcessThread() const {
- RTC_DCHECK_RUN_ON(&process_thread_checker_);
- rtc::CritScope lock(&avg_rtt_ms_lock_);
- return avg_rtt_ms_;
-}
-
void CallStats::OnRttUpdate(int64_t rtt) {
- RTC_DCHECK_RUN_ON(&process_thread_checker_);
-
+ // This callback may for some RtpRtcp module instances (video send stream) be
+ // invoked from a separate task queue, in other cases, we should already be
+ // on the correct TQ.
int64_t now_ms = clock_->TimeInMilliseconds();
- task_queue_->PostTask(ToQueuedTask(task_safety_flag_, [this, rtt, now_ms]() {
+ auto update = [this, rtt, now_ms]() {
RTC_DCHECK_RUN_ON(&construction_thread_checker_);
reports_.push_back(RttTime(rtt, now_ms));
if (time_of_first_rtt_ms_ == -1)
time_of_first_rtt_ms_ = now_ms;
UpdateAndReport();
- }));
+ };
+
+ if (task_queue_->IsCurrent()) {
+ update();
+ } else {
+ task_queue_->PostTask(ToQueuedTask(task_safety_, std::move(update)));
+ }
}
void CallStats::UpdateHistograms() {
diff --git a/video/call_stats2.h b/video/call_stats2.h
index f06d33daf7..822685320f 100644
--- a/video/call_stats2.h
+++ b/video/call_stats2.h
@@ -14,13 +14,14 @@
#include <list>
#include <memory>
+#include "api/units/timestamp.h"
#include "modules/include/module_common_types.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/task_utils/repeating_task.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -29,7 +30,7 @@ namespace internal {
class CallStats {
public:
// Time interval for updating the observers.
- static constexpr int64_t kUpdateIntervalMs = 1000;
+ static constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(1000);
CallStats(Clock* clock, TaskQueueBase* task_queue);
~CallStats();
@@ -68,9 +69,6 @@ class CallStats {
private:
// Part of the RtcpRttStats implementation. Called by RtcpRttStatsImpl.
void OnRttUpdate(int64_t rtt);
- int64_t LastProcessedRttFromProcessThread() const;
-
- void RunTimer();
void UpdateAndReport();
@@ -80,41 +78,41 @@ class CallStats {
class RtcpRttStatsImpl : public RtcpRttStats {
public:
- explicit RtcpRttStatsImpl(CallStats* owner) : owner_(owner) {
- process_thread_checker_.Detach();
- }
+ explicit RtcpRttStatsImpl(CallStats* owner) : owner_(owner) {}
~RtcpRttStatsImpl() override = default;
private:
void OnRttUpdate(int64_t rtt) override {
- RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ // For video send streams (video/video_send_stream.cc), the RtpRtcp module
+ // is currently created on a transport worker TaskQueue and not the worker
+ // thread - which is what happens in other cases. We should probably fix
+ // that so that the call consistently comes in on the right thread.
owner_->OnRttUpdate(rtt);
}
int64_t LastProcessedRtt() const override {
- RTC_DCHECK_RUN_ON(&process_thread_checker_);
- return owner_->LastProcessedRttFromProcessThread();
+ // This call path shouldn't be used anymore. This impl is only for
+ // propagating the rtt from the RtpRtcp module, which does not call
+ // LastProcessedRtt(). Down the line we should consider removing
+ // LastProcessedRtt() and use the interface for event notifications only.
+ RTC_NOTREACHED() << "Legacy call path";
+ return 0;
}
CallStats* const owner_;
- SequenceChecker process_thread_checker_;
} rtcp_rtt_stats_impl_{this};
Clock* const clock_;
- // The last time 'Process' resulted in statistic update.
- int64_t last_process_time_ RTC_GUARDED_BY(construction_thread_checker_);
+ // Used to regularly call UpdateAndReport().
+ RepeatingTaskHandle repeating_task_
+ RTC_GUARDED_BY(construction_thread_checker_);
+
// The last RTT in the statistics update (zero if there is no valid estimate).
int64_t max_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_);
- // Accessed from two separate threads.
- // |avg_rtt_ms_| may be read on the construction thread without a lock.
- // |avg_rtt_ms_lock_| must be held elsewhere for reading.
- // |avg_rtt_ms_lock_| must be held on the construction thread for writing.
- int64_t avg_rtt_ms_;
-
- // Protects |avg_rtt_ms_|.
- rtc::CriticalSection avg_rtt_ms_lock_;
+ // Last reported average RTT value.
+ int64_t avg_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_);
// |sum_avg_rtt_ms_|, |num_avg_rtt_| and |time_of_first_rtt_ms_| are only used
// on the ProcessThread when running. When the Process Thread is not running,
@@ -139,8 +137,7 @@ class CallStats {
TaskQueueBase* const task_queue_;
// Used to signal destruction to potentially pending tasks.
- PendingTaskSafetyFlag::Pointer task_safety_flag_ =
- PendingTaskSafetyFlag::Create();
+ ScopedTaskSafety task_safety_;
RTC_DISALLOW_COPY_AND_ASSIGN(CallStats);
};
diff --git a/video/call_stats2_unittest.cc b/video/call_stats2_unittest.cc
index 58af6fd386..b3d43cb92a 100644
--- a/video/call_stats2_unittest.cc
+++ b/video/call_stats2_unittest.cc
@@ -33,7 +33,7 @@ class MockStatsObserver : public CallStatsObserver {
MockStatsObserver() {}
virtual ~MockStatsObserver() {}
- MOCK_METHOD2(OnRttUpdate, void(int64_t, int64_t));
+ MOCK_METHOD(void, OnRttUpdate, (int64_t, int64_t), (override));
};
class CallStats2Test : public ::testing::Test {
@@ -96,12 +96,13 @@ TEST_F(CallStats2Test, ProcessTime) {
.Times(2)
.WillOnce(InvokeWithoutArgs([this] {
// Advance clock and verify we get an update.
- fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateIntervalMs);
+ fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateInterval.ms());
}))
.WillRepeatedly(InvokeWithoutArgs([this] {
AsyncSimulateRttUpdate(kRtt2);
// Advance clock just too little to get an update.
- fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateIntervalMs - 1);
+ fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateInterval.ms() -
+ 1);
}));
// In case you're reading this and wondering how this number is arrived at,
@@ -256,7 +257,7 @@ TEST_F(CallStats2Test, LastProcessedRtt) {
.Times(AnyNumber())
.WillOnce(InvokeWithoutArgs([this] {
EXPECT_EQ(kAvgRtt1, call_stats_.LastProcessedRtt());
- fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateIntervalMs);
+ fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateInterval.ms());
AsyncSimulateRttUpdate(kRttLow);
AsyncSimulateRttUpdate(kRttHigh);
}))
@@ -272,7 +273,7 @@ TEST_F(CallStats2Test, LastProcessedRtt) {
// Set a first values and verify that LastProcessedRtt initially returns the
// average rtt.
- fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateIntervalMs);
+ fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateInterval.ms());
AsyncSimulateRttUpdate(kRttLow);
loop_.Run();
EXPECT_EQ(kAvgRtt2, call_stats_.LastProcessedRtt());
@@ -292,7 +293,7 @@ TEST_F(CallStats2Test, ProducesHistogramMetrics) {
AsyncSimulateRttUpdate(kRtt);
loop_.Run();
fake_clock_.AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds *
- CallStats::kUpdateIntervalMs);
+ CallStats::kUpdateInterval.ms());
AsyncSimulateRttUpdate(kRtt);
loop_.Run();
diff --git a/video/call_stats_unittest.cc b/video/call_stats_unittest.cc
index c560ccbee6..e85c4f8c54 100644
--- a/video/call_stats_unittest.cc
+++ b/video/call_stats_unittest.cc
@@ -32,7 +32,7 @@ class MockStatsObserver : public CallStatsObserver {
MockStatsObserver() {}
virtual ~MockStatsObserver() {}
- MOCK_METHOD2(OnRttUpdate, void(int64_t, int64_t));
+ MOCK_METHOD(void, OnRttUpdate, (int64_t, int64_t), (override));
};
class CallStatsTest : public ::testing::Test {
diff --git a/video/encoder_rtcp_feedback.cc b/video/encoder_rtcp_feedback.cc
index a736d83b82..b81ff6120f 100644
--- a/video/encoder_rtcp_feedback.cc
+++ b/video/encoder_rtcp_feedback.cc
@@ -56,7 +56,7 @@ void EncoderRtcpFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) {
RTC_DCHECK(HasSsrc(ssrc));
{
int64_t now_ms = clock_->TimeInMilliseconds();
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (time_last_intra_request_ms_ + min_keyframe_send_interval_ms_ > now_ms) {
return;
}
diff --git a/video/encoder_rtcp_feedback.h b/video/encoder_rtcp_feedback.h
index b5dd0288f3..3bd1cb91f0 100644
--- a/video/encoder_rtcp_feedback.h
+++ b/video/encoder_rtcp_feedback.h
@@ -15,7 +15,7 @@
#include "api/video/video_stream_encoder_interface.h"
#include "call/rtp_video_sender_interface.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -50,8 +50,8 @@ class EncoderRtcpFeedback : public RtcpIntraFrameObserver,
const RtpVideoSenderInterface* rtp_video_sender_;
VideoStreamEncoderInterface* const video_stream_encoder_;
- rtc::CriticalSection crit_;
- int64_t time_last_intra_request_ms_ RTC_GUARDED_BY(crit_);
+ Mutex mutex_;
+ int64_t time_last_intra_request_ms_ RTC_GUARDED_BY(mutex_);
const int min_keyframe_send_interval_ms_;
};
diff --git a/video/end_to_end_tests/bandwidth_tests.cc b/video/end_to_end_tests/bandwidth_tests.cc
index 16b35d68f8..6428207e62 100644
--- a/video/end_to_end_tests/bandwidth_tests.cc
+++ b/video/end_to_end_tests/bandwidth_tests.cc
@@ -16,8 +16,9 @@
#include "api/video/video_bitrate_allocation.h"
#include "call/fake_network_pipe.h"
#include "call/simulated_network.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "rtc_base/rate_limiter.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "system_wrappers/include/sleep.h"
@@ -205,8 +206,9 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) {
~BweObserver() override {
// Block until all already posted tasks run to avoid races when such task
- // accesses |this|.
- SendTask(RTC_FROM_HERE, task_queue_, [] {});
+ // accesses |this|. Also make sure we free |rtp_rtcp_| on the correct
+ // thread/task queue.
+ SendTask(RTC_FROM_HERE, task_queue_, [this]() { rtp_rtcp_ = nullptr; });
}
std::unique_ptr<test::PacketTransport> CreateReceiveTransport(
@@ -237,13 +239,13 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) {
encoder_config->max_bitrate_bps = 2000000;
ASSERT_EQ(1u, receive_configs->size());
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.receiver_only = true;
config.clock = clock_;
config.outgoing_transport = receive_transport_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
config.local_media_ssrc = (*receive_configs)[0].rtp.local_ssrc;
- rtp_rtcp_ = RtpRtcp::Create(config);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config);
rtp_rtcp_->SetRemoteSSRC((*receive_configs)[0].rtp.remote_ssrc);
rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize);
}
@@ -302,7 +304,7 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) {
Clock* const clock_;
uint32_t sender_ssrc_;
int remb_bitrate_bps_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
test::PacketTransport* receive_transport_;
TestState state_;
RateLimiter retransmission_rate_limiter_;
@@ -352,7 +354,7 @@ TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) {
// Make sure not to trigger on any default zero bitrates.
if (parameters.bitrate.get_sum_bps() == 0)
return;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
bitrate_kbps_ = parameters.bitrate.get_sum_kbps();
observation_complete_.Set();
}
@@ -374,7 +376,7 @@ TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) {
for (int i = 0; i < kDefaultTimeoutMs; ++i) {
VideoSendStream::Stats stats = send_stream_->GetStats();
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if ((stats.target_media_bitrate_bps + 500) / 1000 ==
static_cast<int>(bitrate_kbps_)) {
return;
@@ -398,11 +400,11 @@ TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) {
private:
TaskQueueBase* const task_queue_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
VideoSendStream* send_stream_;
test::VideoEncoderProxyFactory encoder_factory_;
std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
- uint32_t bitrate_kbps_ RTC_GUARDED_BY(crit_);
+ uint32_t bitrate_kbps_ RTC_GUARDED_BY(mutex_);
} test(task_queue());
RunBaseTest(&test);
diff --git a/video/end_to_end_tests/codec_tests.cc b/video/end_to_end_tests/codec_tests.cc
index b73b289ec8..d10e08daf1 100644
--- a/video/end_to_end_tests/codec_tests.cc
+++ b/video/end_to_end_tests/codec_tests.cc
@@ -34,18 +34,14 @@ enum : int { // The first valid value is 1.
};
} // namespace
-class CodecEndToEndTest : public test::CallTest,
- public ::testing::WithParamInterface<std::string> {
+class CodecEndToEndTest : public test::CallTest {
public:
- CodecEndToEndTest() : field_trial_(GetParam()) {
+ CodecEndToEndTest() {
RegisterRtpExtension(
RtpExtension(RtpExtension::kColorSpaceUri, kColorSpaceExtensionId));
RegisterRtpExtension(RtpExtension(RtpExtension::kVideoRotationUri,
kVideoRotationExtensionId));
}
-
- private:
- test::ScopedFieldTrials field_trial_;
};
class CodecObserver : public test::EndToEndTest,
@@ -121,13 +117,7 @@ class CodecObserver : public test::EndToEndTest,
int frame_counter_;
};
-INSTANTIATE_TEST_SUITE_P(
- GenericDescriptor,
- CodecEndToEndTest,
- ::testing::Values("WebRTC-GenericDescriptor/Disabled/",
- "WebRTC-GenericDescriptor/Enabled/"));
-
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP8) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP8) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP8Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -137,7 +127,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP8) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP8Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -148,7 +138,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) {
}
#if defined(RTC_ENABLE_VP9)
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP9) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP9) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP9Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -158,7 +148,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP9) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP9Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -168,7 +158,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP9Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -179,7 +169,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest,
+TEST_F(CodecEndToEndTest,
SendsAndReceivesVP9ExplicitColorSpaceWithHdrMetadata) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP9Encoder::Create(); });
@@ -192,7 +182,7 @@ TEST_P(CodecEndToEndTest,
}
// Mutiplex tests are using VP9 as the underlying implementation.
-TEST_P(CodecEndToEndTest, SendsAndReceivesMultiplex) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplex) {
InternalEncoderFactory internal_encoder_factory;
InternalDecoderFactory internal_decoder_factory;
test::FunctionVideoEncoderFactory encoder_factory(
@@ -211,7 +201,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesMultiplex) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest, SendsAndReceivesMultiplexVideoRotation90) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplexVideoRotation90) {
InternalEncoderFactory internal_encoder_factory;
InternalDecoderFactory internal_decoder_factory;
test::FunctionVideoEncoderFactory encoder_factory(
diff --git a/video/end_to_end_tests/extended_reports_tests.cc b/video/end_to_end_tests/extended_reports_tests.cc
index f4938c943b..ed73800fd7 100644
--- a/video/end_to_end_tests/extended_reports_tests.cc
+++ b/video/end_to_end_tests/extended_reports_tests.cc
@@ -31,8 +31,8 @@
#include "call/video_send_stream.h"
#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
#include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
#include "test/call_test.h"
@@ -83,7 +83,7 @@ class RtcpXrObserver : public test::EndToEndTest {
private:
// Receive stream should send RR packets (and RRTR packets if enabled).
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
test::RtcpPacketParser parser;
EXPECT_TRUE(parser.Parse(packet, length));
@@ -100,7 +100,7 @@ class RtcpXrObserver : public test::EndToEndTest {
}
// Send stream should send SR packets (and DLRR packets if enabled).
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
test::RtcpPacketParser parser;
EXPECT_TRUE(parser.Parse(packet, length));
@@ -198,16 +198,16 @@ class RtcpXrObserver : public test::EndToEndTest {
static const int kNumRtcpReportPacketsToObserve = 5;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
const bool enable_rrtr_;
const bool expect_target_bitrate_;
const bool enable_zero_target_bitrate_;
const VideoEncoderConfig::ContentType content_type_;
int sent_rtcp_sr_;
- int sent_rtcp_rr_ RTC_GUARDED_BY(&crit_);
- int sent_rtcp_rrtr_ RTC_GUARDED_BY(&crit_);
- bool sent_rtcp_target_bitrate_ RTC_GUARDED_BY(&crit_);
- bool sent_zero_rtcp_target_bitrate_ RTC_GUARDED_BY(&crit_);
+ int sent_rtcp_rr_ RTC_GUARDED_BY(&mutex_);
+ int sent_rtcp_rrtr_ RTC_GUARDED_BY(&mutex_);
+ bool sent_rtcp_target_bitrate_ RTC_GUARDED_BY(&mutex_);
+ bool sent_zero_rtcp_target_bitrate_ RTC_GUARDED_BY(&mutex_);
int sent_rtcp_dlrr_;
BuiltInNetworkBehaviorConfig forward_transport_config_;
SimulatedNetwork* send_simulated_network_;
diff --git a/video/end_to_end_tests/fec_tests.cc b/video/end_to_end_tests/fec_tests.cc
index c55c4dbee1..c9e022642f 100644
--- a/video/end_to_end_tests/fec_tests.cc
+++ b/video/end_to_end_tests/fec_tests.cc
@@ -20,6 +20,7 @@
#include "modules/rtp_rtcp/source/byte_io.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/call_test.h"
#include "test/field_trial.h"
#include "test/gmock.h"
@@ -59,7 +60,7 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -98,7 +99,7 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) {
}
void OnFrame(const VideoFrame& video_frame) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// Rendering frame with timestamp of packet that was dropped -> FEC
// protection worked.
auto it = dropped_timestamps_.find(video_frame.timestamp());
@@ -137,15 +138,15 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) {
<< "Timed out waiting for dropped frames to be rendered.";
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
std::unique_ptr<VideoEncoder> encoder_;
test::FunctionVideoEncoderFactory encoder_factory_;
InternalDecoderFactory decoder_factory_;
- std::set<uint32_t> dropped_sequence_numbers_ RTC_GUARDED_BY(crit_);
+ std::set<uint32_t> dropped_sequence_numbers_ RTC_GUARDED_BY(mutex_);
// Several packets can have the same timestamp.
- std::multiset<uint32_t> dropped_timestamps_ RTC_GUARDED_BY(crit_);
+ std::multiset<uint32_t> dropped_timestamps_ RTC_GUARDED_BY(mutex_);
Random random_;
- int num_packets_sent_ RTC_GUARDED_BY(crit_);
+ int num_packets_sent_ RTC_GUARDED_BY(mutex_);
} test;
RunBaseTest(&test);
@@ -169,7 +170,7 @@ class FlexfecRenderObserver : public test::EndToEndTest,
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -247,7 +248,7 @@ class FlexfecRenderObserver : public test::EndToEndTest,
EXPECT_EQ(1U, report_blocks.size());
EXPECT_EQ(test::CallTest::kFlexfecSendSsrc,
report_blocks[0].source_ssrc());
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
received_flexfec_rtcp_ = true;
}
}
@@ -273,7 +274,7 @@ class FlexfecRenderObserver : public test::EndToEndTest,
void OnFrame(const VideoFrame& video_frame) override {
EXPECT_EQ(kVideoRotation_90, video_frame.rotation());
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// Rendering frame with timestamp of packet that was dropped -> FEC
// protection worked.
auto it = dropped_timestamps_.find(video_frame.timestamp());
@@ -321,13 +322,13 @@ class FlexfecRenderObserver : public test::EndToEndTest,
<< "Timed out waiting for dropped frames to be rendered.";
}
- rtc::CriticalSection crit_;
- std::set<uint32_t> dropped_sequence_numbers_ RTC_GUARDED_BY(crit_);
+ Mutex mutex_;
+ std::set<uint32_t> dropped_sequence_numbers_ RTC_GUARDED_BY(mutex_);
// Several packets can have the same timestamp.
- std::multiset<uint32_t> dropped_timestamps_ RTC_GUARDED_BY(crit_);
+ std::multiset<uint32_t> dropped_timestamps_ RTC_GUARDED_BY(mutex_);
const bool enable_nack_;
const bool expect_flexfec_rtcp_;
- bool received_flexfec_rtcp_ RTC_GUARDED_BY(crit_);
+ bool received_flexfec_rtcp_ RTC_GUARDED_BY(mutex_);
Random random_;
int num_packets_sent_;
};
@@ -360,7 +361,7 @@ TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock_(&crit_);
+ MutexLock lock_(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -424,7 +425,7 @@ TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock_(&crit_);
+ MutexLock lock_(&mutex_);
if (state_ == kVerifyUlpfecPacketNotInNackList) {
test::RtcpPacketParser rtcp_parser;
rtcp_parser.Parse(packet, length);
@@ -503,8 +504,8 @@ TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) {
kVerifyUlpfecPacketNotInNackList,
} state_;
- rtc::CriticalSection crit_;
- uint16_t ulpfec_sequence_number_ RTC_GUARDED_BY(&crit_);
+ Mutex mutex_;
+ uint16_t ulpfec_sequence_number_ RTC_GUARDED_BY(&mutex_);
bool has_last_sequence_number_;
uint16_t last_sequence_number_;
test::FunctionVideoEncoderFactory encoder_factory_;
diff --git a/video/end_to_end_tests/histogram_tests.cc b/video/end_to_end_tests/histogram_tests.cc
index dd6de2543d..fa71c15e98 100644
--- a/video/end_to_end_tests/histogram_tests.cc
+++ b/video/end_to_end_tests/histogram_tests.cc
@@ -11,6 +11,7 @@
#include "absl/types/optional.h"
#include "api/test/video/function_video_encoder_factory.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/metrics.h"
#include "test/call_test.h"
#include "test/gtest.h"
@@ -59,7 +60,7 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx,
if (video_frame.ntp_time_ms() > 0 &&
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >=
video_frame.ntp_time_ms()) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
++num_frames_received_;
}
}
@@ -82,7 +83,7 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx,
bool MinNumberOfFramesReceived() const {
const int kMinRequiredHistogramSamples = 200;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return num_frames_received_ > kMinRequiredHistogramSamples;
}
@@ -131,13 +132,13 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx,
EXPECT_TRUE(Wait()) << "Timed out waiting for min frames to be received.";
}
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
const bool use_rtx_;
const bool use_fec_;
const bool screenshare_;
test::FunctionVideoEncoderFactory encoder_factory_;
absl::optional<int64_t> start_runtime_ms_;
- int num_frames_received_ RTC_GUARDED_BY(&crit_);
+ int num_frames_received_ RTC_GUARDED_BY(&mutex_);
} test(use_rtx, use_fec, screenshare);
metrics::Reset();
diff --git a/video/end_to_end_tests/multi_codec_receive_tests.cc b/video/end_to_end_tests/multi_codec_receive_tests.cc
index 354ee44072..d8ff5dd1cc 100644
--- a/video/end_to_end_tests/multi_codec_receive_tests.cc
+++ b/video/end_to_end_tests/multi_codec_receive_tests.cc
@@ -19,6 +19,7 @@
#include "modules/video_coding/codecs/h264/include/h264.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "test/call_test.h"
#include "test/gmock.h"
@@ -65,7 +66,7 @@ class FrameObserver : public test::RtpRtcpObserver,
FrameObserver() : test::RtpRtcpObserver(test::CallTest::kDefaultTimeoutMs) {}
void Reset(uint8_t expected_payload_type) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
num_sent_frames_ = 0;
num_rendered_frames_ = 0;
expected_payload_type_ = expected_payload_type;
@@ -74,7 +75,7 @@ class FrameObserver : public test::RtpRtcpObserver,
private:
// Sends kFramesToObserve.
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -103,7 +104,7 @@ class FrameObserver : public test::RtpRtcpObserver,
// Verifies that all sent frames are decoded and rendered.
void OnFrame(const VideoFrame& rendered_frame) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
EXPECT_THAT(sent_timestamps_, Contains(rendered_frame.timestamp()));
// Remove old timestamps too, only the newest decoded frame is rendered.
@@ -116,12 +117,12 @@ class FrameObserver : public test::RtpRtcpObserver,
}
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
absl::optional<uint32_t> last_timestamp_; // Only accessed from pacer thread.
- absl::optional<uint8_t> expected_payload_type_ RTC_GUARDED_BY(crit_);
- int num_sent_frames_ RTC_GUARDED_BY(crit_) = 0;
- int num_rendered_frames_ RTC_GUARDED_BY(crit_) = 0;
- std::vector<uint32_t> sent_timestamps_ RTC_GUARDED_BY(crit_);
+ absl::optional<uint8_t> expected_payload_type_ RTC_GUARDED_BY(mutex_);
+ int num_sent_frames_ RTC_GUARDED_BY(mutex_) = 0;
+ int num_rendered_frames_ RTC_GUARDED_BY(mutex_) = 0;
+ std::vector<uint32_t> sent_timestamps_ RTC_GUARDED_BY(mutex_);
};
} // namespace
diff --git a/video/end_to_end_tests/network_state_tests.cc b/video/end_to_end_tests/network_state_tests.cc
index a0977ac773..9abde3bb32 100644
--- a/video/end_to_end_tests/network_state_tests.cc
+++ b/video/end_to_end_tests/network_state_tests.cc
@@ -15,6 +15,7 @@
#include "call/fake_network_pipe.h"
#include "call/simulated_network.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "system_wrappers/include/sleep.h"
#include "test/call_test.h"
@@ -60,19 +61,19 @@ class NetworkStateEndToEndTest : public test::CallTest {
bool SendRtp(const uint8_t* packet,
size_t length,
const PacketOptions& options) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
need_rtp_ = false;
return true;
}
bool SendRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
need_rtcp_ = false;
return true;
}
bool need_rtp_;
bool need_rtcp_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
};
void VerifyNewVideoSendStreamsRespectNetworkState(
MediaType network_to_bring_up,
@@ -177,7 +178,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
down_frames_(0) {}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&test_crit_);
+ MutexLock lock(&test_mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
if (rtp_packet.payload_size() == 0)
@@ -188,7 +189,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
}
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&test_crit_);
+ MutexLock lock(&test_mutex_);
++sender_rtcp_;
packet_event_.Set();
return SEND_PACKET;
@@ -200,7 +201,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&test_crit_);
+ MutexLock lock(&test_mutex_);
++receiver_rtcp_;
packet_event_.Set();
return SEND_PACKET;
@@ -239,7 +240,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
// Sender-side network down.
sender_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkDown);
{
- rtc::CritScope lock(&test_crit_);
+ MutexLock lock(&test_mutex_);
// After network goes down we shouldn't be encoding more frames.
sender_state_ = kNetworkDown;
}
@@ -259,7 +260,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
// Network back up again for both.
{
- rtc::CritScope lock(&test_crit_);
+ MutexLock lock(&test_mutex_);
// It's OK to encode frames again, as we're about to bring up the
// network.
sender_state_ = kNetworkUp;
@@ -277,7 +278,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
int32_t Encode(const VideoFrame& input_image,
const std::vector<VideoFrameType>* frame_types) override {
{
- rtc::CritScope lock(&test_crit_);
+ MutexLock lock(&test_mutex_);
if (sender_state_ == kNetworkDown) {
++down_frames_;
EXPECT_LE(down_frames_, 1)
@@ -298,7 +299,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
int initial_sender_rtcp;
int initial_receiver_rtcp;
{
- rtc::CritScope lock(&test_crit_);
+ MutexLock lock(&test_mutex_);
initial_sender_rtp = sender_rtp_;
initial_sender_rtcp = sender_rtcp_;
initial_receiver_rtcp = receiver_rtcp_;
@@ -308,7 +309,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
while (!sender_done || !receiver_done) {
packet_event_.Wait(kSilenceTimeoutMs);
int64_t time_now_ms = clock_->TimeInMilliseconds();
- rtc::CritScope lock(&test_crit_);
+ MutexLock lock(&test_mutex_);
if (sender_down) {
ASSERT_LE(sender_rtp_ - initial_sender_rtp - sender_padding_,
kNumAcceptedDowntimeRtp)
@@ -340,18 +341,18 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
}
TaskQueueBase* const task_queue_;
- rtc::CriticalSection test_crit_;
+ Mutex test_mutex_;
rtc::Event encoded_frames_;
rtc::Event packet_event_;
Call* sender_call_;
Call* receiver_call_;
test::VideoEncoderProxyFactory encoder_factory_;
- NetworkState sender_state_ RTC_GUARDED_BY(test_crit_);
- int sender_rtp_ RTC_GUARDED_BY(test_crit_);
- int sender_padding_ RTC_GUARDED_BY(test_crit_);
- int sender_rtcp_ RTC_GUARDED_BY(test_crit_);
- int receiver_rtcp_ RTC_GUARDED_BY(test_crit_);
- int down_frames_ RTC_GUARDED_BY(test_crit_);
+ NetworkState sender_state_ RTC_GUARDED_BY(test_mutex_);
+ int sender_rtp_ RTC_GUARDED_BY(test_mutex_);
+ int sender_padding_ RTC_GUARDED_BY(test_mutex_);
+ int sender_rtcp_ RTC_GUARDED_BY(test_mutex_);
+ int receiver_rtcp_ RTC_GUARDED_BY(test_mutex_);
+ int down_frames_ RTC_GUARDED_BY(test_mutex_);
} test(task_queue());
RunBaseTest(&test);
diff --git a/video/end_to_end_tests/retransmission_tests.cc b/video/end_to_end_tests/retransmission_tests.cc
index c28b12960f..19eb38d388 100644
--- a/video/end_to_end_tests/retransmission_tests.cc
+++ b/video/end_to_end_tests/retransmission_tests.cc
@@ -19,6 +19,7 @@
#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "test/call_test.h"
#include "test/field_trial.h"
@@ -58,7 +59,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesAndRetransmitsNack) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -95,7 +96,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesAndRetransmitsNack) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
test::RtcpPacketParser parser;
EXPECT_TRUE(parser.Parse(packet, length));
nacks_left_ -= parser.nack()->num_packets();
@@ -116,12 +117,12 @@ TEST_F(RetransmissionEndToEndTest, ReceivesAndRetransmitsNack) {
"rendered.";
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
std::set<uint16_t> dropped_packets_;
std::set<uint16_t> retransmitted_packets_;
uint64_t sent_rtp_packets_;
int packets_left_to_drop_;
- int nacks_left_ RTC_GUARDED_BY(&crit_);
+ int nacks_left_ RTC_GUARDED_BY(&mutex_);
} test;
RunBaseTest(&test);
@@ -290,7 +291,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -308,7 +309,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
test::RtcpPacketParser parser;
EXPECT_TRUE(parser.Parse(packet, length));
if (!nack_enabled_)
@@ -319,7 +320,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
}
void OnFrame(const VideoFrame& video_frame) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (received_pli_ &&
video_frame.timestamp() > highest_dropped_timestamp_) {
observation_complete_.Set();
@@ -343,12 +344,12 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
"rendered afterwards.";
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
int rtp_history_ms_;
bool nack_enabled_;
- uint32_t highest_dropped_timestamp_ RTC_GUARDED_BY(&crit_);
- int frames_to_drop_ RTC_GUARDED_BY(&crit_);
- bool received_pli_ RTC_GUARDED_BY(&crit_);
+ uint32_t highest_dropped_timestamp_ RTC_GUARDED_BY(&mutex_);
+ int frames_to_drop_ RTC_GUARDED_BY(&mutex_);
+ bool received_pli_ RTC_GUARDED_BY(&mutex_);
} test(rtp_history_ms);
RunBaseTest(&test);
@@ -382,7 +383,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx,
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -427,7 +428,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx,
void OnFrame(const VideoFrame& frame) override {
EXPECT_EQ(kVideoRotation_90, frame.rotation());
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (frame.timestamp() == retransmitted_timestamp_)
observation_complete_.Set();
rendered_timestamps_.push_back(frame.timestamp());
@@ -502,7 +503,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx,
return kFakeVideoSendPayloadType;
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
rtc::VideoSinkInterface<VideoFrame>* orig_renderer_ = nullptr;
const int payload_type_;
const uint32_t retransmission_ssrc_;
@@ -510,8 +511,8 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx,
test::FunctionVideoEncoderFactory encoder_factory_;
const std::string payload_name_;
int marker_bits_observed_;
- uint32_t retransmitted_timestamp_ RTC_GUARDED_BY(&crit_);
- std::vector<uint32_t> rendered_timestamps_ RTC_GUARDED_BY(&crit_);
+ uint32_t retransmitted_timestamp_ RTC_GUARDED_BY(&mutex_);
+ std::vector<uint32_t> rendered_timestamps_ RTC_GUARDED_BY(&mutex_);
} test(enable_rtx, enable_red);
RunBaseTest(&test);
diff --git a/video/end_to_end_tests/rtp_rtcp_tests.cc b/video/end_to_end_tests/rtp_rtcp_tests.cc
index 71783febfe..76018027d6 100644
--- a/video/end_to_end_tests/rtp_rtcp_tests.cc
+++ b/video/end_to_end_tests/rtp_rtcp_tests.cc
@@ -16,6 +16,7 @@
#include "modules/include/module_common_types_public.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "test/call_test.h"
#include "test/gtest.h"
@@ -46,7 +47,7 @@ void RtpRtcpEndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (++sent_rtp_ % 3 == 0)
return DROP_PACKET;
@@ -54,7 +55,7 @@ void RtpRtcpEndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
++sent_rtcp_;
test::RtcpPacketParser parser;
EXPECT_TRUE(parser.Parse(packet, length));
@@ -105,11 +106,11 @@ void RtpRtcpEndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
}
RtcpMode rtcp_mode_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
// Must be protected since RTCP can be sent by both the process thread
// and the pacer thread.
- int sent_rtp_ RTC_GUARDED_BY(&crit_);
- int sent_rtcp_ RTC_GUARDED_BY(&crit_);
+ int sent_rtp_ RTC_GUARDED_BY(&mutex_);
+ int sent_rtcp_ RTC_GUARDED_BY(&mutex_);
} test(rtcp_mode);
RunBaseTest(&test);
@@ -176,7 +177,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation(
}
void ResetExpectedSsrcs(size_t num_expected_ssrcs) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
ssrc_observed_.clear();
ssrcs_to_observe_ = num_expected_ssrcs;
}
@@ -185,7 +186,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation(
void ValidateTimestampGap(uint32_t ssrc,
uint32_t timestamp,
bool only_padding)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_) {
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) {
static const int32_t kMaxTimestampGap = kDefaultTimeoutMs * 90;
auto timestamp_it = last_observed_timestamp_.find(ssrc);
if (timestamp_it == last_observed_timestamp_.end()) {
@@ -240,7 +241,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation(
}
if (!ssrc_is_rtx_[ssrc]) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
ValidateTimestampGap(ssrc, timestamp, only_padding);
// Wait for media packets on all ssrcs.
@@ -261,7 +262,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation(
uint32_t ssrc = rtcp_parser.sender_report()->sender_ssrc();
uint32_t rtcp_timestamp = rtcp_parser.sender_report()->rtp_timestamp();
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
ValidateTimestampGap(ssrc, rtcp_timestamp, false);
}
return SEND_PACKET;
@@ -272,9 +273,9 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation(
std::map<uint32_t, uint32_t> last_observed_timestamp_;
std::map<uint32_t, bool> ssrc_is_rtx_;
- rtc::CriticalSection crit_;
- size_t ssrcs_to_observe_ RTC_GUARDED_BY(crit_);
- std::map<uint32_t, bool> ssrc_observed_ RTC_GUARDED_BY(crit_);
+ Mutex mutex_;
+ size_t ssrcs_to_observe_ RTC_GUARDED_BY(mutex_);
+ std::map<uint32_t, bool> ssrc_observed_ RTC_GUARDED_BY(mutex_);
} observer(use_rtx);
std::unique_ptr<test::PacketTransport> send_transport;
@@ -414,13 +415,13 @@ TEST_F(RtpRtcpEndToEndTest, DISABLED_TestFlexfecRtpStatePreservation) {
num_flexfec_packets_sent_(0) {}
void ResetPacketCount() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
num_flexfec_packets_sent_ = 0;
}
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -468,10 +469,10 @@ TEST_F(RtpRtcpEndToEndTest, DISABLED_TestFlexfecRtpStatePreservation) {
}
absl::optional<uint16_t> last_observed_sequence_number_
- RTC_GUARDED_BY(crit_);
- absl::optional<uint32_t> last_observed_timestamp_ RTC_GUARDED_BY(crit_);
- size_t num_flexfec_packets_sent_ RTC_GUARDED_BY(crit_);
- rtc::CriticalSection crit_;
+ RTC_GUARDED_BY(mutex_);
+ absl::optional<uint32_t> last_observed_timestamp_ RTC_GUARDED_BY(mutex_);
+ size_t num_flexfec_packets_sent_ RTC_GUARDED_BY(mutex_);
+ Mutex mutex_;
} observer;
static constexpr int kFrameMaxWidth = 320;
diff --git a/video/end_to_end_tests/stats_tests.cc b/video/end_to_end_tests/stats_tests.cc
index 32bcedb9c8..ae0532b9a3 100644
--- a/video/end_to_end_tests/stats_tests.cc
+++ b/video/end_to_end_tests/stats_tests.cc
@@ -20,6 +20,7 @@
#include "modules/rtp_rtcp/source/rtp_utility.h"
#include "modules/video_coding/include/video_coding_defines.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "system_wrappers/include/metrics.h"
#include "system_wrappers/include/sleep.h"
@@ -479,7 +480,7 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) {
if (video_frame.ntp_time_ms() > 0 &&
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >=
video_frame.ntp_time_ms()) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
++num_frames_received_;
}
}
@@ -493,7 +494,7 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) {
bool MinNumberOfFramesReceived() const {
// Have some room for frames with wrong content type during switch.
const int kMinRequiredHistogramSamples = 200 + 50;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return num_frames_received_ > kMinRequiredHistogramSamples;
}
@@ -502,13 +503,13 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) {
EXPECT_TRUE(Wait()) << "Timed out waiting for enough packets.";
// Reset frame counter so next PerformTest() call will do something.
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
num_frames_received_ = 0;
}
}
- rtc::CriticalSection crit_;
- int num_frames_received_ RTC_GUARDED_BY(&crit_);
+ mutable Mutex mutex_;
+ int num_frames_received_ RTC_GUARDED_BY(&mutex_);
} test;
metrics::Reset();
@@ -609,7 +610,7 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (++sent_rtp_packets_ == kPacketNumberToDrop) {
std::unique_ptr<RtpHeaderParser> parser(
RtpHeaderParser::CreateForTest());
@@ -623,7 +624,7 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
test::RtcpPacketParser rtcp_parser;
rtcp_parser.Parse(packet, length);
const std::vector<uint16_t>& nacks = rtcp_parser.nack()->packet_ids();
@@ -633,7 +634,7 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) {
return SEND_PACKET;
}
- void VerifyStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
+ void VerifyStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_) {
if (!dropped_rtp_packet_requested_)
return;
int send_stream_nack_packets = 0;
@@ -684,7 +685,7 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) {
}
bool Run() override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
VerifyStats();
return false;
}
@@ -694,10 +695,10 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) {
}
test::FakeVideoRenderer fake_renderer_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
uint64_t sent_rtp_packets_;
- uint16_t dropped_rtp_packet_ RTC_GUARDED_BY(&crit_);
- bool dropped_rtp_packet_requested_ RTC_GUARDED_BY(&crit_);
+ uint16_t dropped_rtp_packet_ RTC_GUARDED_BY(&mutex_);
+ bool dropped_rtp_packet_requested_ RTC_GUARDED_BY(&mutex_);
std::vector<VideoReceiveStream*> receive_streams_;
VideoSendStream* send_stream_;
absl::optional<int64_t> start_runtime_ms_;
diff --git a/video/end_to_end_tests/transport_feedback_tests.cc b/video/end_to_end_tests/transport_feedback_tests.cc
index 4291bc4812..9cfa7d14f4 100644
--- a/video/end_to_end_tests/transport_feedback_tests.cc
+++ b/video/end_to_end_tests/transport_feedback_tests.cc
@@ -18,6 +18,7 @@
#include "modules/rtp_rtcp/source/byte_io.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "rtc_base/synchronization/mutex.h"
#include "test/call_test.h"
#include "test/field_trial.h"
#include "test/gtest.h"
@@ -65,7 +66,7 @@ TEST(TransportFeedbackMultiStreamTest, AssignsTransportSequenceNumbers) {
size_t length,
const PacketOptions& options) override {
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (IsDone())
return false;
@@ -141,14 +142,14 @@ TEST(TransportFeedbackMultiStreamTest, AssignsTransportSequenceNumbers) {
{
// Can't be sure until this point that rtx_to_media_ssrcs_ etc have
// been initialized and are OK to read.
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
started_ = true;
}
return done_.Wait(kDefaultTimeoutMs);
}
private:
- rtc::CriticalSection lock_;
+ Mutex lock_;
rtc::Event done_;
RtpHeaderExtensionMap extensions_;
SequenceNumberUnwrapper unwrapper_;
@@ -366,7 +367,7 @@ TEST_F(TransportFeedbackEndToEndTest,
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
const bool only_padding = rtp_packet.payload_size() == 0;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// Padding is expected in congested state to probe for connectivity when
// packets has been dropped.
if (only_padding) {
@@ -386,7 +387,7 @@ TEST_F(TransportFeedbackEndToEndTest,
}
Action OnReceiveRtcp(const uint8_t* data, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// To fill up the congestion window we drop feedback on packets after 20
// packets have been sent. This means that any packets that has not yet
// received feedback after that will be considered as oustanding data and
@@ -425,10 +426,10 @@ TEST_F(TransportFeedbackEndToEndTest,
private:
const size_t num_video_streams_;
const size_t num_audio_streams_;
- rtc::CriticalSection crit_;
- int media_sent_ RTC_GUARDED_BY(crit_);
- int media_sent_before_ RTC_GUARDED_BY(crit_);
- int padding_sent_ RTC_GUARDED_BY(crit_);
+ Mutex mutex_;
+ int media_sent_ RTC_GUARDED_BY(mutex_);
+ int media_sent_before_ RTC_GUARDED_BY(mutex_);
+ int padding_sent_ RTC_GUARDED_BY(mutex_);
} test(1, 0);
RunBaseTest(&test);
}
diff --git a/video/frame_encode_metadata_writer.cc b/video/frame_encode_metadata_writer.cc
index e5f55575ec..8ffb3ae5ea 100644
--- a/video/frame_encode_metadata_writer.cc
+++ b/video/frame_encode_metadata_writer.cc
@@ -60,7 +60,7 @@ FrameEncodeMetadataWriter::~FrameEncodeMetadataWriter() {}
void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec,
bool internal_source) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
codec_settings_ = codec;
internal_source_ = internal_source;
}
@@ -68,7 +68,7 @@ void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec,
void FrameEncodeMetadataWriter::OnSetRates(
const VideoBitrateAllocation& bitrate_allocation,
uint32_t framerate_fps) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
framerate_fps_ = framerate_fps;
const size_t num_spatial_layers = NumSpatialLayers();
if (timing_frames_info_.size() < num_spatial_layers) {
@@ -81,7 +81,7 @@ void FrameEncodeMetadataWriter::OnSetRates(
}
void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (internal_source_) {
return;
}
@@ -128,7 +128,7 @@ void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) {
void FrameEncodeMetadataWriter::FillTimingInfo(size_t simulcast_svc_idx,
EncodedImage* encoded_image) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
absl::optional<size_t> outlier_frame_size;
absl::optional<int64_t> encode_start_ms;
uint8_t timing_flags = VideoSendTiming::kNotTriggered;
@@ -235,7 +235,7 @@ FrameEncodeMetadataWriter::UpdateBitstream(
}
void FrameEncodeMetadataWriter::Reset() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
for (auto& info : timing_frames_info_) {
info.frames.clear();
}
diff --git a/video/frame_encode_metadata_writer.h b/video/frame_encode_metadata_writer.h
index 4ee2d7eec7..32b5872b27 100644
--- a/video/frame_encode_metadata_writer.h
+++ b/video/frame_encode_metadata_writer.h
@@ -20,7 +20,7 @@
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
#include "modules/video_coding/include/video_codec_interface.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -69,7 +69,7 @@ class FrameEncodeMetadataWriter {
std::list<FrameMetadata> frames;
};
- rtc::CriticalSection lock_;
+ Mutex lock_;
EncodedImageCallback* const frame_drop_callback_;
VideoCodec codec_settings_ RTC_GUARDED_BY(&lock_);
bool internal_source_ RTC_GUARDED_BY(&lock_);
diff --git a/video/full_stack_tests.cc b/video/full_stack_tests.cc
index 7307b462b7..a12b334230 100644
--- a/video/full_stack_tests.cc
+++ b/video/full_stack_tests.cc
@@ -90,26 +90,6 @@ std::string ClipNameToClipPath(const char* clip_name) {
// logs // bool
// };
-class GenericDescriptorTest : public ::testing::TestWithParam<std::string> {
- public:
- GenericDescriptorTest()
- : field_trial_(AppendFieldTrials(GetParam())),
- generic_descriptor_enabled_(
- field_trial::IsEnabled("WebRTC-GenericDescriptor")) {}
-
- std::string GetTestName(std::string base) {
- if (generic_descriptor_enabled_)
- base += "_generic_descriptor";
- return base;
- }
-
- bool GenericDescriptorEnabled() const { return generic_descriptor_enabled_; }
-
- private:
- test::ScopedFieldTrials field_trial_;
- bool generic_descriptor_enabled_;
-};
-
#if defined(RTC_ENABLE_VP9)
TEST(FullStackTest, ForemanCifWithoutPacketLossVp9) {
auto fixture = CreateVideoQualityTestFixture();
@@ -125,7 +105,7 @@ TEST(FullStackTest, ForemanCifWithoutPacketLossVp9) {
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCifPlr5Vp9) {
+TEST(GenericDescriptorTest, ForemanCifPlr5Vp9) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -134,11 +114,11 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5Vp9) {
30000, 500000, 2000000, false,
"VP9", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5_VP9"), 0.0,
- 0.0, kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_VP9_generic_descriptor",
+ 0.0, 0.0, kFullStackTestDurationSecs};
foreman_cif.config->loss_percent = 5;
foreman_cif.config->queue_delay_ms = 50;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -217,7 +197,7 @@ TEST(FullStackTest, MAYBE_ParisQcifWithoutPacketLoss) {
fixture->RunWithAnalyzer(paris_qcif);
}
-TEST_P(GenericDescriptorTest, ForemanCifWithoutPacketLoss) {
+TEST(GenericDescriptorTest, ForemanCifWithoutPacketLoss) {
auto fixture = CreateVideoQualityTestFixture();
// TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
ParamsWithLogging foreman_cif;
@@ -227,13 +207,13 @@ TEST_P(GenericDescriptorTest, ForemanCifWithoutPacketLoss) {
700000, 700000, 700000, false,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_net_delay_0_0_plr_0"), 0.0,
- 0.0, kFullStackTestDurationSecs};
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_generic_descriptor",
+ 0.0, 0.0, kFullStackTestDurationSecs};
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
+TEST(GenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -242,15 +222,16 @@ TEST_P(GenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
30000, 30000, 30000, false,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_30kbps_net_delay_0_0_plr_0"),
- 0.0, 0.0, kFullStackTestDurationSecs};
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.analyzer = {
+ "foreman_cif_30kbps_net_delay_0_0_plr_0_generic_descriptor", 0.0, 0.0,
+ kFullStackTestDurationSecs};
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
// TODO(webrtc:9722): Remove when experiment is cleaned up.
-TEST_P(GenericDescriptorTest,
- ForemanCif30kbpsWithoutPacketLossTrustedRateControl) {
+TEST(GenericDescriptorTest,
+ ForemanCif30kbpsWithoutPacketLossTrustedRateControl) {
test::ScopedFieldTrials override_field_trials(
AppendFieldTrials(kVp8TrustedRateControllerFieldTrial));
auto fixture = CreateVideoQualityTestFixture();
@@ -263,9 +244,10 @@ TEST_P(GenericDescriptorTest,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
foreman_cif.analyzer = {
- GetTestName("foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl"),
+ "foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl_generic_"
+ "descriptor",
0.0, 0.0, kFullStackTestDurationSecs};
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -328,7 +310,7 @@ TEST(FullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) {
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCifPlr5) {
+TEST(GenericDescriptorTest, ForemanCifPlr5) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -337,15 +319,15 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5) {
30000, 500000, 2000000, false,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5"), 0.0, 0.0,
- kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_generic_descriptor",
+ 0.0, 0.0, kFullStackTestDurationSecs};
foreman_cif.config->loss_percent = 5;
foreman_cif.config->queue_delay_ms = 50;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCifPlr5Ulpfec) {
+TEST(GenericDescriptorTest, ForemanCifPlr5Ulpfec) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -354,11 +336,12 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5Ulpfec) {
30000, 500000, 2000000, false,
"VP8", 1, 0, 0,
true, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5_ulpfec"),
- 0.0, 0.0, kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {
+ "foreman_cif_delay_50_0_plr_5_ulpfec_generic_descriptor", 0.0, 0.0,
+ kFullStackTestDurationSecs};
foreman_cif.config->loss_percent = 5;
foreman_cif.config->queue_delay_ms = 50;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -442,7 +425,7 @@ TEST(FullStackTest, ForemanCif30kbpsWithoutPacketlossH264) {
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCifPlr5H264) {
+TEST(GenericDescriptorTest, ForemanCifPlr5H264) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -451,11 +434,12 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5H264) {
30000, 500000, 2000000, false,
"H264", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5_H264"), 0.0,
- 0.0, kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {
+ "foreman_cif_delay_50_0_plr_5_H264_generic_descriptor", 0.0, 0.0,
+ kFullStackTestDurationSecs};
foreman_cif.config->loss_percent = 5;
foreman_cif.config->queue_delay_ms = 50;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -565,7 +549,7 @@ TEST(FullStackTest, ForemanCif500kbps100ms) {
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
+TEST(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -574,12 +558,13 @@ TEST_P(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
30000, 500000, 2000000, false,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_500kbps_100ms_32pkts_queue"),
- 0.0, 0.0, kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {
+ "foreman_cif_500kbps_100ms_32pkts_queue_generic_descriptor", 0.0, 0.0,
+ kFullStackTestDurationSecs};
foreman_cif.config->queue_length_packets = 32;
foreman_cif.config->queue_delay_ms = 100;
foreman_cif.config->link_capacity_kbps = 500;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -666,7 +651,7 @@ TEST(FullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) {
fixture->RunWithAnalyzer(conf_motion_hd);
}
-TEST_P(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
+TEST(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging conf_motion_hd;
conf_motion_hd.call.send_side_bwe = true;
@@ -680,13 +665,13 @@ TEST_P(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
false, false,
false, ClipNameToClipPath("ConferenceMotion_1280_720_50")};
conf_motion_hd.analyzer = {
- GetTestName("conference_motion_hd_2tl_moderate_limits"), 0.0, 0.0,
+ "conference_motion_hd_2tl_moderate_limits_generic_descriptor", 0.0, 0.0,
kFullStackTestDurationSecs};
conf_motion_hd.config->queue_length_packets = 50;
conf_motion_hd.config->loss_percent = 3;
conf_motion_hd.config->queue_delay_ms = 100;
conf_motion_hd.config->link_capacity_kbps = 2000;
- conf_motion_hd.call.generic_descriptor = GenericDescriptorEnabled();
+ conf_motion_hd.call.generic_descriptor = true;
fixture->RunWithAnalyzer(conf_motion_hd);
}
@@ -867,7 +852,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) {
fixture->RunWithAnalyzer(config);
}
-TEST_P(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
+TEST(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging screenshare;
screenshare.call.send_side_bwe = true;
@@ -875,12 +860,12 @@ TEST_P(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
1000000, false, "VP8", 2, 1, 400000,
false, false, false, ""};
screenshare.screenshare[0] = {true, false, 10};
- screenshare.analyzer = {GetTestName("screenshare_slides_lossy_net"), 0.0, 0.0,
- kFullStackTestDurationSecs};
+ screenshare.analyzer = {"screenshare_slides_lossy_net_generic_descriptor",
+ 0.0, 0.0, kFullStackTestDurationSecs};
screenshare.config->loss_percent = 5;
screenshare.config->queue_delay_ms = 200;
screenshare.config->link_capacity_kbps = 500;
- screenshare.call.generic_descriptor = GenericDescriptorEnabled();
+ screenshare.call.generic_descriptor = true;
fixture->RunWithAnalyzer(screenshare);
}
@@ -1246,10 +1231,4 @@ TEST(FullStackTest, MAYBE_LargeRoomVP8_50thumb) {
fixture->RunWithAnalyzer(large_room);
}
-INSTANTIATE_TEST_SUITE_P(
- FullStackTest,
- GenericDescriptorTest,
- ::testing::Values("WebRTC-GenericDescriptor/Disabled/",
- "WebRTC-GenericDescriptor/Enabled/"));
-
} // namespace webrtc
diff --git a/video/pc_full_stack_tests.cc b/video/pc_full_stack_tests.cc
index dc61680694..7a4b449093 100644
--- a/video/pc_full_stack_tests.cc
+++ b/video/pc_full_stack_tests.cc
@@ -20,6 +20,7 @@
#include "api/test/network_emulation_manager.h"
#include "api/test/peerconnection_quality_test_fixture.h"
#include "api/test/simulated_network.h"
+#include "api/test/time_controller.h"
#include "call/simulated_network.h"
#include "media/base/vp9_profile.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
@@ -38,8 +39,6 @@ using VideoConfig =
webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig;
using AudioConfig =
webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig;
-using VideoGeneratorType =
- webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoGeneratorType;
using ScreenShareConfig =
webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::ScreenShareConfig;
using VideoSimulcastConfig =
@@ -80,12 +79,13 @@ CreateTwoNetworkLinks(NetworkEmulationManager* emulation,
std::unique_ptr<webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture>
CreateTestFixture(const std::string& test_case_name,
+ TimeController& time_controller,
std::pair<EmulatedNetworkManagerInterface*,
EmulatedNetworkManagerInterface*> network_links,
rtc::FunctionView<void(PeerConfigurer*)> alice_configurer,
rtc::FunctionView<void(PeerConfigurer*)> bob_configurer) {
auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture(
- test_case_name, /*audio_quality_analyzer=*/nullptr,
+ test_case_name, time_controller, /*audio_quality_analyzer=*/nullptr,
/*video_quality_analyzer=*/nullptr);
fixture->AddPeer(network_links.first->network_thread(),
network_links.first->network_manager(), alice_configurer);
@@ -108,30 +108,13 @@ std::string ClipNameToClipPath(const char* clip_name) {
} // namespace
-class PCGenericDescriptorTest : public ::testing::TestWithParam<std::string> {
- public:
- PCGenericDescriptorTest()
- : field_trial_(AppendFieldTrials(GetParam())),
- generic_descriptor_enabled_(
- field_trial::IsEnabled("WebRTC-GenericDescriptor")) {}
-
- std::string GetTestName(std::string base) {
- if (generic_descriptor_enabled_)
- base += "_generic_descriptor";
- return base;
- }
-
- private:
- test::ScopedFieldTrials field_trial_;
- bool generic_descriptor_enabled_;
-};
-
#if defined(RTC_ENABLE_VP9)
TEST(PCFullStackTest, ForemanCifWithoutPacketLossVp9) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
"pc_foreman_cif_net_delay_0_0_plr_0_VP9",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -151,14 +134,15 @@ TEST(PCFullStackTest, ForemanCifWithoutPacketLossVp9) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifPlr5Vp9) {
+TEST(PCGenericDescriptorTest, ForemanCifPlr5Vp9) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_delay_50_0_plr_5_VP9"),
+ "pc_foreman_cif_delay_50_0_plr_5_VP9_generic_descriptor",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -192,6 +176,7 @@ TEST(PCFullStackTest, MAYBE_GeneratorWithoutPacketLossVp9Profile2) {
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
"pc_generator_net_delay_0_0_plr_0_VP9Profile2",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -247,7 +232,7 @@ TEST(PCFullStackTest, ParisQcifWithoutPacketLoss) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
- "pc_net_delay_0_0_plr_0",
+ "pc_net_delay_0_0_plr_0", *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -265,11 +250,12 @@ TEST(PCFullStackTest, ParisQcifWithoutPacketLoss) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifWithoutPacketLoss) {
+TEST(PCGenericDescriptorTest, ForemanCifWithoutPacketLoss) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_net_delay_0_0_plr_0"),
+ "pc_foreman_cif_net_delay_0_0_plr_0_generic_descriptor",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -287,12 +273,13 @@ TEST_P(PCGenericDescriptorTest, ForemanCifWithoutPacketLoss) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
+TEST(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_30kbps_net_delay_0_0_plr_0"),
+ "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_generic_descriptor",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 10);
@@ -301,11 +288,11 @@ TEST_P(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
video, ClipNameToClipPath("foreman_cif"));
alice->AddVideoConfig(std::move(video), std::move(frame_generator));
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.min_bitrate_bps = 30000;
- bitrate_params.current_bitrate_bps = 30000;
- bitrate_params.max_bitrate_bps = 30000;
- alice->SetBitrateParameters(bitrate_params);
+ BitrateSettings bitrate_settings;
+ bitrate_settings.min_bitrate_bps = 30000;
+ bitrate_settings.start_bitrate_bps = 30000;
+ bitrate_settings.max_bitrate_bps = 30000;
+ alice->SetBitrateSettings(bitrate_settings);
},
[](PeerConfigurer* bob) {});
RunParams run_params(TimeDelta::Seconds(kTestDurationSec));
@@ -316,16 +303,17 @@ TEST_P(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
}
// TODO(webrtc:9722): Remove when experiment is cleaned up.
-TEST_P(PCGenericDescriptorTest,
- ForemanCif30kbpsWithoutPacketLossTrustedRateControl) {
+TEST(PCGenericDescriptorTest,
+ ForemanCif30kbpsWithoutPacketLossTrustedRateControl) {
test::ScopedFieldTrials override_field_trials(
AppendFieldTrials(kVp8TrustedRateControllerFieldTrial));
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
auto fixture = CreateTestFixture(
- GetTestName(
- "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl"),
+ "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl_generic_"
+ "descriptor",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 10);
@@ -334,11 +322,11 @@ TEST_P(PCGenericDescriptorTest,
video, ClipNameToClipPath("foreman_cif"));
alice->AddVideoConfig(std::move(video), std::move(frame_generator));
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.min_bitrate_bps = 30000;
- bitrate_params.current_bitrate_bps = 30000;
- bitrate_params.max_bitrate_bps = 30000;
- alice->SetBitrateParameters(bitrate_params);
+ BitrateSettings bitrate_settings;
+ bitrate_settings.min_bitrate_bps = 30000;
+ bitrate_settings.start_bitrate_bps = 30000;
+ bitrate_settings.max_bitrate_bps = 30000;
+ alice->SetBitrateSettings(bitrate_settings);
},
[](PeerConfigurer* bob) {});
RunParams run_params(TimeDelta::Seconds(kTestDurationSec));
@@ -356,6 +344,7 @@ TEST(PCFullStackTest, ForemanCifLink150kbpsWithoutPacketLoss) {
config.link_capacity_kbps = 150;
auto fixture = CreateTestFixture(
"pc_foreman_cif_link_150kbps_net_delay_0_0_plr_0",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -381,6 +370,7 @@ TEST(PCFullStackTest, ForemanCifLink130kbps100msDelay1PercentPacketLossUlpfec) {
config.loss_percent = 1;
auto fixture = CreateTestFixture(
"pc_foreman_cif_link_130kbps_delay100ms_loss1_ulpfec",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -406,6 +396,7 @@ TEST(PCFullStackTest, ForemanCifLink50kbps100msDelay1PercentPacketLossUlpfec) {
config.loss_percent = 1;
auto fixture = CreateTestFixture(
"pc_foreman_cif_link_50kbps_delay100ms_loss1_ulpfec",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -432,6 +423,7 @@ TEST(PCFullStackTest, ForemanCifLink150kbpsBadRateController) {
config.queue_delay_ms = 100;
auto fixture = CreateTestFixture(
"pc_foreman_cif_link_150kbps_delay100ms_30pkts_queue_overshoot30",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -463,6 +455,7 @@ TEST(PCFullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) {
config.loss_percent = 1;
auto fixture = CreateTestFixture(
"pc_foreman_cif_link_250kbps_delay100ms_10pkts_loss1",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -480,14 +473,15 @@ TEST(PCFullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifPlr5) {
+TEST(PCGenericDescriptorTest, ForemanCifPlr5) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_delay_50_0_plr_5"),
+ "pc_foreman_cif_delay_50_0_plr_5_generic_descriptor",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -504,14 +498,15 @@ TEST_P(PCGenericDescriptorTest, ForemanCifPlr5) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifPlr5Ulpfec) {
+TEST(PCGenericDescriptorTest, ForemanCifPlr5Ulpfec) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_delay_50_0_plr_5_ulpfec"),
+ "pc_foreman_cif_delay_50_0_plr_5_ulpfec_generic_descriptor",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -536,6 +531,7 @@ TEST(PCFullStackTest, ForemanCifPlr5Flexfec) {
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
"pc_foreman_cif_delay_50_0_plr_5_flexfec",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -561,6 +557,7 @@ TEST(PCFullStackTest, ForemanCif500kbpsPlr3Flexfec) {
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
"pc_foreman_cif_500kbps_delay_50_0_plr_3_flexfec",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -586,6 +583,7 @@ TEST(PCFullStackTest, ForemanCif500kbpsPlr3Ulpfec) {
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
"pc_foreman_cif_500kbps_delay_50_0_plr_3_ulpfec",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -608,6 +606,7 @@ TEST(PCFullStackTest, ForemanCifWithoutPacketlossH264) {
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
"pc_foreman_cif_net_delay_0_0_plr_0_H264",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -631,6 +630,7 @@ TEST(PCFullStackTest, ForemanCif30kbpsWithoutPacketlossH264) {
BuiltInNetworkBehaviorConfig config;
auto fixture = CreateTestFixture(
"pc_foreman_cif_30kbps_net_delay_0_0_plr_0_H264",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 10);
@@ -639,11 +639,11 @@ TEST(PCFullStackTest, ForemanCif30kbpsWithoutPacketlossH264) {
video, ClipNameToClipPath("foreman_cif"));
alice->AddVideoConfig(std::move(video), std::move(frame_generator));
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.min_bitrate_bps = 30000;
- bitrate_params.current_bitrate_bps = 30000;
- bitrate_params.max_bitrate_bps = 30000;
- alice->SetBitrateParameters(bitrate_params);
+ BitrateSettings bitrate_settings;
+ bitrate_settings.min_bitrate_bps = 30000;
+ bitrate_settings.start_bitrate_bps = 30000;
+ bitrate_settings.max_bitrate_bps = 30000;
+ alice->SetBitrateSettings(bitrate_settings);
},
[](PeerConfigurer* bob) {});
RunParams run_params(TimeDelta::Seconds(kTestDurationSec));
@@ -653,14 +653,15 @@ TEST(PCFullStackTest, ForemanCif30kbpsWithoutPacketlossH264) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifPlr5H264) {
+TEST(PCGenericDescriptorTest, ForemanCifPlr5H264) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_delay_50_0_plr_5_H264"),
+ "pc_foreman_cif_delay_50_0_plr_5_H264_generic_descriptor",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -688,6 +689,7 @@ TEST(PCFullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) {
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
"pc_foreman_cif_delay_50_0_plr_5_H264_sps_pps_idr",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -712,6 +714,7 @@ TEST(PCFullStackTest, ForemanCifPlr5H264Flexfec) {
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
"pc_foreman_cif_delay_50_0_plr_5_H264_flexfec",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -738,6 +741,7 @@ TEST(PCFullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) {
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
"pc_foreman_cif_delay_50_0_plr_5_H264_ulpfec",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -763,7 +767,7 @@ TEST(PCFullStackTest, ForemanCif500kbps) {
config.queue_delay_ms = 0;
config.link_capacity_kbps = 500;
auto fixture = CreateTestFixture(
- "pc_foreman_cif_500kbps",
+ "pc_foreman_cif_500kbps", *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -789,6 +793,7 @@ TEST(PCFullStackTest, ForemanCif500kbpsLimitedQueue) {
config.link_capacity_kbps = 500;
auto fixture = CreateTestFixture(
"pc_foreman_cif_500kbps_32pkts_queue",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -814,6 +819,7 @@ TEST(PCFullStackTest, ForemanCif500kbps100ms) {
config.link_capacity_kbps = 500;
auto fixture = CreateTestFixture(
"pc_foreman_cif_500kbps_100ms",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -830,7 +836,7 @@ TEST(PCFullStackTest, ForemanCif500kbps100ms) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
+TEST(PCGenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
@@ -838,7 +844,8 @@ TEST_P(PCGenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
config.queue_delay_ms = 100;
config.link_capacity_kbps = 500;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_500kbps_100ms_32pkts_queue"),
+ "pc_foreman_cif_500kbps_100ms_32pkts_queue_generic_descriptor",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -885,6 +892,7 @@ TEST(PCFullStackTest, ForemanCif1000kbps100msLimitedQueue) {
config.link_capacity_kbps = 1000;
auto fixture = CreateTestFixture(
"pc_foreman_cif_1000kbps_100ms_32pkts_queue",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -911,6 +919,7 @@ TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) {
config.link_capacity_kbps = 2000;
auto fixture = CreateTestFixture(
"pc_conference_motion_hd_2000kbps_100ms_32pkts_queue",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(1280, 720, 50);
@@ -940,6 +949,7 @@ TEST(PCFullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) {
config.link_capacity_kbps = 2000;
auto fixture = CreateTestFixture(
"pc_conference_motion_hd_1tl_moderate_limits_trusted_rate_ctrl",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(1280, 720, 50);
@@ -958,7 +968,7 @@ TEST(PCFullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) {
/*
// TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework
-TEST_P(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
+TEST(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging conf_motion_hd;
conf_motion_hd.call.send_side_bwe = true;
@@ -972,7 +982,7 @@ TEST_P(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
false, false,
false, ClipNameToClipPath("ConferenceMotion_1280_720_50")};
conf_motion_hd.analyzer = {
- GetTestName("conference_motion_hd_2tl_moderate_limits"), 0.0, 0.0,
+ "conference_motion_hd_2tl_moderate_limits_generic_descriptor", 0.0, 0.0,
kTestDurationSec};
conf_motion_hd.config->queue_length_packets = 50;
conf_motion_hd.config->loss_percent = 3;
@@ -1092,6 +1102,7 @@ TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) {
config.link_capacity_kbps = 2000;
auto fixture = CreateTestFixture(
"pc_conference_motion_hd_2000kbps_100ms_32pkts_queue_vp9",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(1280, 720, 50);
@@ -1116,6 +1127,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_NoConferenceMode) {
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
"pc_screenshare_slides_no_conference_mode",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -1138,7 +1150,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
- "pc_screenshare_slides",
+ "pc_screenshare_slides", *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -1165,6 +1177,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_NoConferenceMode) {
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
"pc_screenshare_slides_simulcast_no_conference_mode",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -1190,6 +1203,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) {
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
"pc_screenshare_slides_simulcast",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -1297,7 +1311,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Scroll) {
}
// TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework
-TEST_P(PCGenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
+TEST(PCGenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging screenshare;
screenshare.call.send_side_bwe = true;
@@ -1305,12 +1319,12 @@ TEST_P(PCGenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
1000000, false, "VP8", 2, 1, 400000,
false, false, false, ""};
screenshare.screenshare[0] = {true, false, 10};
- screenshare.analyzer = {GetTestName("screenshare_slides_lossy_net"), 0.0, 0.0,
- kTestDurationSec};
+ screenshare.analyzer = {"screenshare_slides_lossy_net_generic_descriptor",
+ 0.0, 0.0, kTestDurationSec};
screenshare.config->loss_percent = 5;
screenshare.config->queue_delay_ms = 200;
screenshare.config->link_capacity_kbps = 500;
- screenshare.call.generic_descriptor = GenericDescriptorEnabled();
+ screenshare.call.generic_descriptor = true;
fixture->RunWithAnalyzer(screenshare);
}
@@ -1431,6 +1445,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) {
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
"pc_screenshare_slides_vp9_3sl_high_fps",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -1459,7 +1474,7 @@ TEST(PCFullStackTest, VP9SVC_3SL_High) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
- "pc_vp9svc_3sl_high",
+ "pc_vp9svc_3sl_high", *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -1488,7 +1503,7 @@ TEST(PCFullStackTest, VP9SVC_3SL_Low) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
- "pc_vp9svc_3sl_low",
+ "pc_vp9svc_3sl_low", *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -1619,7 +1634,7 @@ TEST(PCFullStackTest, MAYBE_SimulcastFullHdOveruse) {
config.loss_percent = 0;
config.queue_delay_ms = 100;
auto fixture = CreateTestFixture(
- "pc_simulcast_HD_high",
+ "pc_simulcast_HD_high", *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(1920, 1080, 30);
@@ -1644,6 +1659,7 @@ TEST(PCFullStackTest, SimulcastVP8_3SL_High) {
config.queue_delay_ms = 100;
auto fixture = CreateTestFixture(
"pc_simulcast_vp8_3sl_high",
+ *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(1280, 720, 30);
@@ -1668,7 +1684,7 @@ TEST(PCFullStackTest, SimulcastVP8_3SL_Low) {
config.loss_percent = 0;
config.queue_delay_ms = 100;
auto fixture = CreateTestFixture(
- "pc_simulcast_vp8_3sl_low",
+ "pc_simulcast_vp8_3sl_low", *network_emulation_manager->time_controller(),
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(1280, 720, 30);
@@ -1815,12 +1831,6 @@ TEST(PCFullStackTest, MAYBE_LargeRoomVP8_50thumb) {
}
*/
-INSTANTIATE_TEST_SUITE_P(
- PCFullStackTest,
- PCGenericDescriptorTest,
- ::testing::Values("WebRTC-GenericDescriptor/Disabled/",
- "WebRTC-GenericDescriptor/Enabled/"));
-
class PCDualStreamsTest : public ::testing::TestWithParam<int> {};
/*
diff --git a/video/picture_id_tests.cc b/video/picture_id_tests.cc
index 19c1141b0a..298919c096 100644
--- a/video/picture_id_tests.cc
+++ b/video/picture_id_tests.cc
@@ -22,6 +22,7 @@
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
#include "test/call_test.h"
@@ -49,12 +50,12 @@ class PictureIdObserver : public test::RtpRtcpObserver {
num_ssrcs_to_observe_(1) {}
void SetExpectedSsrcs(size_t num_expected_ssrcs) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
num_ssrcs_to_observe_ = num_expected_ssrcs;
}
void ResetObservedSsrcs() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// Do not clear the timestamp and picture_id, to ensure that we check
// consistency between reinits and recreations.
num_packets_sent_.clear();
@@ -62,7 +63,7 @@ class PictureIdObserver : public test::RtpRtcpObserver {
}
void SetMaxExpectedPictureIdGap(int max_expected_picture_id_gap) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
max_expected_picture_id_gap_ = max_expected_picture_id_gap;
// Expect smaller gap for |tl0_pic_idx| (running index for temporal_idx 0).
max_expected_tl0_idx_gap_ = max_expected_picture_id_gap_ / 2;
@@ -120,7 +121,7 @@ class PictureIdObserver : public test::RtpRtcpObserver {
// Verify continuity and monotonicity of picture_id sequence.
void VerifyPictureId(const ParsedPacket& current,
const ParsedPacket& last) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_) {
if (current.timestamp == last.timestamp) {
EXPECT_EQ(last.picture_id, current.picture_id);
return; // Same frame.
@@ -143,7 +144,7 @@ class PictureIdObserver : public test::RtpRtcpObserver {
}
void VerifyTl0Idx(const ParsedPacket& current, const ParsedPacket& last) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_) {
if (current.tl0_pic_idx == kNoTl0PicIdx ||
current.temporal_idx == kNoTemporalIdx) {
return; // No temporal layers.
@@ -169,7 +170,7 @@ class PictureIdObserver : public test::RtpRtcpObserver {
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
ParsedPacket parsed;
if (!ParsePayload(packet, length, &parsed))
@@ -196,14 +197,14 @@ class PictureIdObserver : public test::RtpRtcpObserver {
return SEND_PACKET;
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
const std::unique_ptr<VideoRtpDepacketizer> depacketizer_;
- std::map<uint32_t, ParsedPacket> last_observed_packet_ RTC_GUARDED_BY(crit_);
- std::map<uint32_t, size_t> num_packets_sent_ RTC_GUARDED_BY(crit_);
- int max_expected_picture_id_gap_ RTC_GUARDED_BY(crit_);
- int max_expected_tl0_idx_gap_ RTC_GUARDED_BY(crit_);
- size_t num_ssrcs_to_observe_ RTC_GUARDED_BY(crit_);
- std::set<uint32_t> observed_ssrcs_ RTC_GUARDED_BY(crit_);
+ std::map<uint32_t, ParsedPacket> last_observed_packet_ RTC_GUARDED_BY(mutex_);
+ std::map<uint32_t, size_t> num_packets_sent_ RTC_GUARDED_BY(mutex_);
+ int max_expected_picture_id_gap_ RTC_GUARDED_BY(mutex_);
+ int max_expected_tl0_idx_gap_ RTC_GUARDED_BY(mutex_);
+ size_t num_ssrcs_to_observe_ RTC_GUARDED_BY(mutex_);
+ std::set<uint32_t> observed_ssrcs_ RTC_GUARDED_BY(mutex_);
};
class PictureIdTest : public test::CallTest,
diff --git a/video/quality_scaling_tests.cc b/video/quality_scaling_tests.cc
index 19b9e8c36c..65a23dbbcc 100644
--- a/video/quality_scaling_tests.cc
+++ b/video/quality_scaling_tests.cc
@@ -233,7 +233,8 @@ TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateWithScalingOff) {
TEST_F(QualityScalingTest, NoAdaptDownForHighQp_Vp9) {
// VP9 QP thresholds, low:1, high:1 -> high QP.
- test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd);
+ test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd +
+ "WebRTC-VP9QualityScaler/Disabled/");
// QualityScaler always disabled.
const bool kAutomaticResize = true;
diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc
index 82951c8a50..7aec685a1c 100644
--- a/video/receive_statistics_proxy.cc
+++ b/video/receive_statistics_proxy.cc
@@ -133,7 +133,7 @@ void ReceiveStatisticsProxy::UpdateHistograms(
// earlier.
RTC_DCHECK_RUN_ON(&decode_thread_);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
char log_stream_buf[8 * 1024];
rtc::SimpleStringBuilder log_stream(log_stream_buf);
@@ -623,7 +623,7 @@ ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs(
}
VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// Get current frame rates here, as only updating them on new frames prevents
// us from ever correctly displaying frame rate of 0.
int64_t now_ms = clock_->TimeInMilliseconds();
@@ -654,13 +654,13 @@ VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
}
void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
stats_.current_payload_type = payload_type;
}
void ReceiveStatisticsProxy::OnDecoderImplementationName(
const char* implementation_name) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
stats_.decoder_implementation_name = implementation_name;
}
@@ -671,7 +671,7 @@ void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated(
int jitter_buffer_ms,
int min_playout_delay_ms,
int render_delay_ms) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
stats_.max_decode_ms = max_decode_ms;
stats_.current_delay_ms = current_delay_ms;
stats_.target_delay_ms = target_delay_ms;
@@ -687,13 +687,13 @@ void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated(
}
void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
num_unique_frames_.emplace(num_unique_frames);
}
void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated(
const TimingFrameInfo& info) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (info.flags != VideoSendTiming::kInvalid) {
int64_t now_ms = clock_->TimeInMilliseconds();
timing_frame_info_counter_.Add(info, now_ms);
@@ -714,14 +714,14 @@ void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated(
void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
uint32_t ssrc,
const RtcpPacketTypeCounter& packet_counter) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (stats_.ssrc != ssrc)
return;
stats_.rtcp_packet_type_counts = packet_counter;
}
void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// TODO(pbos): Handle both local and remote ssrcs here and RTC_DCHECK that we
// receive stats from one of them.
if (stats_.ssrc != ssrc)
@@ -733,7 +733,7 @@ void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame,
absl::optional<uint8_t> qp,
int32_t decode_time_ms,
VideoContentType content_type) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
uint64_t now_ms = clock_->TimeInMilliseconds();
@@ -799,7 +799,7 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
RTC_DCHECK_GT(width, 0);
RTC_DCHECK_GT(height, 0);
int64_t now_ms = clock_->TimeInMilliseconds();
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
video_quality_observer_->OnRenderedFrame(frame, now_ms);
@@ -833,7 +833,7 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
int64_t sync_offset_ms,
double estimated_freq_khz) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
sync_offset_counter_.Add(std::abs(sync_offset_ms));
stats_.sync_offset_ms = sync_offset_ms;
last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms;
@@ -851,7 +851,7 @@ void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
size_t size_bytes,
VideoContentType content_type) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (is_keyframe) {
++stats_.frame_counts.key_frames;
} else {
@@ -881,13 +881,13 @@ void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
}
void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
stats_.frames_dropped += frames_dropped;
}
void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) {
RTC_DCHECK_RUN_ON(&decode_thread_);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
last_codec_type_ = codec_type;
if (last_codec_type_ == kVideoCodecVP8 && qp != -1) {
qp_counters_.vp8.Add(qp);
@@ -898,7 +898,7 @@ void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) {
void ReceiveStatisticsProxy::OnStreamInactive() {
// TODO(sprang): Figure out any other state that should be reset.
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// Don't report inter-frame delay if stream was paused.
last_decoded_frame_time_ms_.reset();
video_quality_observer_->OnStreamInactive();
@@ -906,7 +906,7 @@ void ReceiveStatisticsProxy::OnStreamInactive() {
void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms,
int64_t max_rtt_ms) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
avg_rtt_ms_ = avg_rtt_ms;
}
diff --git a/video/receive_statistics_proxy.h b/video/receive_statistics_proxy.h
index 02043d6944..8b94c32b69 100644
--- a/video/receive_statistics_proxy.h
+++ b/video/receive_statistics_proxy.h
@@ -20,12 +20,12 @@
#include "call/video_receive_stream.h"
#include "modules/include/module_common_types.h"
#include "modules/video_coding/include/video_coding_defines.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/numerics/histogram_percentile_counter.h"
#include "rtc_base/numerics/moving_max_counter.h"
#include "rtc_base/numerics/sample_counter.h"
#include "rtc_base/rate_statistics.h"
#include "rtc_base/rate_tracker.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
#include "video/quality_threshold.h"
@@ -124,19 +124,19 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
rtc::HistogramPercentileCounter interframe_delay_percentiles;
};
- void QualitySample() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void QualitySample() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Removes info about old frames and then updates the framerate.
void UpdateFramerate(int64_t now_ms) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void UpdateDecodeTimeHistograms(int width,
int height,
int decode_time_ms) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
absl::optional<int64_t> GetCurrentEstimatedPlayoutNtpTimestampMs(
- int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
Clock* const clock_;
// Ownership of this object lies with the owner of the ReceiveStatisticsProxy
@@ -150,52 +150,52 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
const int64_t start_ms_;
const bool enable_decode_time_histograms_;
- rtc::CriticalSection crit_;
- int64_t last_sample_time_ RTC_GUARDED_BY(crit_);
- QualityThreshold fps_threshold_ RTC_GUARDED_BY(crit_);
- QualityThreshold qp_threshold_ RTC_GUARDED_BY(crit_);
- QualityThreshold variance_threshold_ RTC_GUARDED_BY(crit_);
- rtc::SampleCounter qp_sample_ RTC_GUARDED_BY(crit_);
- int num_bad_states_ RTC_GUARDED_BY(crit_);
- int num_certain_states_ RTC_GUARDED_BY(crit_);
+ mutable Mutex mutex_;
+ int64_t last_sample_time_ RTC_GUARDED_BY(mutex_);
+ QualityThreshold fps_threshold_ RTC_GUARDED_BY(mutex_);
+ QualityThreshold qp_threshold_ RTC_GUARDED_BY(mutex_);
+ QualityThreshold variance_threshold_ RTC_GUARDED_BY(mutex_);
+ rtc::SampleCounter qp_sample_ RTC_GUARDED_BY(mutex_);
+ int num_bad_states_ RTC_GUARDED_BY(mutex_);
+ int num_certain_states_ RTC_GUARDED_BY(mutex_);
// Note: The |stats_.rtp_stats| member is not used or populated by this class.
- mutable VideoReceiveStream::Stats stats_ RTC_GUARDED_BY(crit_);
- RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(crit_);
- RateStatistics renders_fps_estimator_ RTC_GUARDED_BY(crit_);
- rtc::RateTracker render_fps_tracker_ RTC_GUARDED_BY(crit_);
- rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(crit_);
- rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(crit_);
- rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(crit_);
- rtc::SampleCounter jitter_buffer_delay_counter_ RTC_GUARDED_BY(crit_);
- rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(crit_);
- rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(crit_);
- rtc::SampleCounter delay_counter_ RTC_GUARDED_BY(crit_);
+ mutable VideoReceiveStream::Stats stats_ RTC_GUARDED_BY(mutex_);
+ RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(mutex_);
+ RateStatistics renders_fps_estimator_ RTC_GUARDED_BY(mutex_);
+ rtc::RateTracker render_fps_tracker_ RTC_GUARDED_BY(mutex_);
+ rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(mutex_);
+ rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(mutex_);
+ rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(mutex_);
+ rtc::SampleCounter jitter_buffer_delay_counter_ RTC_GUARDED_BY(mutex_);
+ rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(mutex_);
+ rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(mutex_);
+ rtc::SampleCounter delay_counter_ RTC_GUARDED_BY(mutex_);
std::unique_ptr<VideoQualityObserver> video_quality_observer_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
mutable rtc::MovingMaxCounter<int> interframe_delay_max_moving_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
std::map<VideoContentType, ContentSpecificStats> content_specific_stats_
- RTC_GUARDED_BY(crit_);
- MaxCounter freq_offset_counter_ RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
+ MaxCounter freq_offset_counter_ RTC_GUARDED_BY(mutex_);
QpCounters qp_counters_ RTC_GUARDED_BY(decode_thread_);
- int64_t avg_rtt_ms_ RTC_GUARDED_BY(crit_);
- mutable std::map<int64_t, size_t> frame_window_ RTC_GUARDED_BY(&crit_);
- VideoContentType last_content_type_ RTC_GUARDED_BY(&crit_);
- VideoCodecType last_codec_type_ RTC_GUARDED_BY(&crit_);
- absl::optional<int64_t> first_frame_received_time_ms_ RTC_GUARDED_BY(&crit_);
- absl::optional<int64_t> first_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_);
- absl::optional<int64_t> last_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_);
- size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(&crit_);
- int64_t sum_missed_render_deadline_ms_ RTC_GUARDED_BY(&crit_);
+ int64_t avg_rtt_ms_ RTC_GUARDED_BY(mutex_);
+ mutable std::map<int64_t, size_t> frame_window_ RTC_GUARDED_BY(&mutex_);
+ VideoContentType last_content_type_ RTC_GUARDED_BY(&mutex_);
+ VideoCodecType last_codec_type_ RTC_GUARDED_BY(&mutex_);
+ absl::optional<int64_t> first_frame_received_time_ms_ RTC_GUARDED_BY(&mutex_);
+ absl::optional<int64_t> first_decoded_frame_time_ms_ RTC_GUARDED_BY(&mutex_);
+ absl::optional<int64_t> last_decoded_frame_time_ms_ RTC_GUARDED_BY(&mutex_);
+ size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(&mutex_);
+ int64_t sum_missed_render_deadline_ms_ RTC_GUARDED_BY(&mutex_);
// Mutable because calling Max() on MovingMaxCounter is not const. Yet it is
// called from const GetStats().
mutable rtc::MovingMaxCounter<TimingFrameInfo> timing_frame_info_counter_
- RTC_GUARDED_BY(&crit_);
- absl::optional<int> num_unique_frames_ RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(&mutex_);
+ absl::optional<int> num_unique_frames_ RTC_GUARDED_BY(mutex_);
absl::optional<int64_t> last_estimated_playout_ntp_timestamp_ms_
- RTC_GUARDED_BY(&crit_);
+ RTC_GUARDED_BY(&mutex_);
absl::optional<int64_t> last_estimated_playout_time_ms_
- RTC_GUARDED_BY(&crit_);
+ RTC_GUARDED_BY(&mutex_);
rtc::ThreadChecker decode_thread_;
rtc::ThreadChecker network_thread_;
rtc::ThreadChecker main_thread_;
diff --git a/video/receive_statistics_proxy2.cc b/video/receive_statistics_proxy2.cc
index b818eae018..3cce3c8ea4 100644
--- a/video/receive_statistics_proxy2.cc
+++ b/video/receive_statistics_proxy2.cc
@@ -19,6 +19,7 @@
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/task_utils/to_queued_task.h"
+#include "rtc_base/thread.h"
#include "rtc_base/time_utils.h"
#include "system_wrappers/include/clock.h"
#include "system_wrappers/include/field_trial.h"
@@ -81,6 +82,20 @@ std::string UmaSuffixForContentType(VideoContentType content_type) {
return ss.str();
}
+// TODO(https://bugs.webrtc.org/11572): Workaround for an issue with some
+// rtc::Thread instances and/or implementations that don't register as the
+// current task queue.
+bool IsCurrentTaskQueueOrThread(TaskQueueBase* task_queue) {
+ if (task_queue->IsCurrent())
+ return true;
+
+ rtc::Thread* current_thread = rtc::ThreadManager::Instance()->CurrentThread();
+ if (!current_thread)
+ return false;
+
+ return static_cast<TaskQueueBase*>(current_thread) == task_queue;
+}
+
} // namespace
ReceiveStatisticsProxy::ReceiveStatisticsProxy(
@@ -129,7 +144,6 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy(
ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {
RTC_DCHECK_RUN_ON(&main_thread_);
- task_safety_flag_->SetNotAlive();
}
void ReceiveStatisticsProxy::UpdateHistograms(
@@ -689,18 +703,17 @@ VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) {
RTC_DCHECK_RUN_ON(&decode_queue_);
- worker_thread_->PostTask(
- ToQueuedTask(task_safety_flag_, [payload_type, this]() {
- RTC_DCHECK_RUN_ON(&main_thread_);
- stats_.current_payload_type = payload_type;
- }));
+ worker_thread_->PostTask(ToQueuedTask(task_safety_, [payload_type, this]() {
+ RTC_DCHECK_RUN_ON(&main_thread_);
+ stats_.current_payload_type = payload_type;
+ }));
}
void ReceiveStatisticsProxy::OnDecoderImplementationName(
const char* implementation_name) {
RTC_DCHECK_RUN_ON(&decode_queue_);
worker_thread_->PostTask(ToQueuedTask(
- task_safety_flag_, [name = std::string(implementation_name), this]() {
+ task_safety_, [name = std::string(implementation_name), this]() {
RTC_DCHECK_RUN_ON(&main_thread_);
stats_.decoder_implementation_name = name;
}));
@@ -715,7 +728,7 @@ void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated(
int render_delay_ms) {
RTC_DCHECK_RUN_ON(&decode_queue_);
worker_thread_->PostTask(ToQueuedTask(
- task_safety_flag_,
+ task_safety_,
[max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms,
min_playout_delay_ms, render_delay_ms, this]() {
RTC_DCHECK_RUN_ON(&main_thread_);
@@ -742,7 +755,7 @@ void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) {
void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated(
const TimingFrameInfo& info) {
RTC_DCHECK_RUN_ON(&decode_queue_);
- worker_thread_->PostTask(ToQueuedTask(task_safety_flag_, [info, this]() {
+ worker_thread_->PostTask(ToQueuedTask(task_safety_, [info, this]() {
RTC_DCHECK_RUN_ON(&main_thread_);
if (info.flags != VideoSendTiming::kInvalid) {
int64_t now_ms = clock_->TimeInMilliseconds();
@@ -768,20 +781,20 @@ void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
if (ssrc != remote_ssrc_)
return;
- if (!worker_thread_->IsCurrent()) {
- // RtpRtcp::Configuration has a single RtcpPacketTypeCounterObserver and
- // that same configuration may be used for both receiver and sender
- // (see ModuleRtpRtcpImpl::ModuleRtpRtcpImpl).
- // The RTCPSender implementation currently makes calls to this function on a
+ if (!IsCurrentTaskQueueOrThread(worker_thread_)) {
+ // RtpRtcpInterface::Configuration has a single
+ // RtcpPacketTypeCounterObserver and that same configuration may be used for
+ // both receiver and sender (see ModuleRtpRtcpImpl::ModuleRtpRtcpImpl). The
+ // RTCPSender implementation currently makes calls to this function on a
// process thread whereas the RTCPReceiver implementation calls back on the
// [main] worker thread.
// So until the sender implementation has been updated, we work around this
// here by posting the update to the expected thread. We make a by value
- // copy of the |task_safety_flag_| to handle the case if the queued task
+ // copy of the |task_safety_| to handle the case if the queued task
// runs after the |ReceiveStatisticsProxy| has been deleted. In such a
// case the packet_counter update won't be recorded.
worker_thread_->PostTask(
- ToQueuedTask(task_safety_flag_, [ssrc, packet_counter, this]() {
+ ToQueuedTask(task_safety_, [ssrc, packet_counter, this]() {
RtcpPacketTypesCounterUpdated(ssrc, packet_counter);
}));
return;
@@ -810,7 +823,7 @@ void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame,
// "com.apple.coremedia.decompressionsession.clientcallback"
VideoFrameMetaData meta(frame, clock_->CurrentTime());
worker_thread_->PostTask(ToQueuedTask(
- task_safety_flag_, [meta, qp, decode_time_ms, content_type, this]() {
+ task_safety_, [meta, qp, decode_time_ms, content_type, this]() {
OnDecodedFrame(meta, qp, decode_time_ms, content_type);
}));
}
@@ -936,8 +949,8 @@ void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
RTC_DCHECK_RUN_ON(&incoming_render_queue_);
int64_t now_ms = clock_->TimeInMilliseconds();
worker_thread_->PostTask(
- ToQueuedTask(task_safety_flag_, [video_playout_ntp_ms, sync_offset_ms,
- estimated_freq_khz, now_ms, this]() {
+ ToQueuedTask(task_safety_, [video_playout_ntp_ms, sync_offset_ms,
+ estimated_freq_khz, now_ms, this]() {
RTC_DCHECK_RUN_ON(&main_thread_);
sync_offset_counter_.Add(std::abs(sync_offset_ms));
stats_.sync_offset_ms = sync_offset_ms;
@@ -989,25 +1002,24 @@ void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
}
void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) {
- RTC_DCHECK_RUN_ON(&decode_queue_);
- worker_thread_->PostTask(
- ToQueuedTask(task_safety_flag_, [frames_dropped, this]() {
- RTC_DCHECK_RUN_ON(&main_thread_);
- stats_.frames_dropped += frames_dropped;
- }));
+ // Can be called on either the decode queue or the worker thread
+ // See FrameBuffer2 for more details.
+ worker_thread_->PostTask(ToQueuedTask(task_safety_, [frames_dropped, this]() {
+ RTC_DCHECK_RUN_ON(&main_thread_);
+ stats_.frames_dropped += frames_dropped;
+ }));
}
void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) {
RTC_DCHECK_RUN_ON(&decode_queue_);
- worker_thread_->PostTask(
- ToQueuedTask(task_safety_flag_, [codec_type, qp, this]() {
- RTC_DCHECK_RUN_ON(&main_thread_);
- last_codec_type_ = codec_type;
- if (last_codec_type_ == kVideoCodecVP8 && qp != -1) {
- qp_counters_.vp8.Add(qp);
- qp_sample_.Add(qp);
- }
- }));
+ worker_thread_->PostTask(ToQueuedTask(task_safety_, [codec_type, qp, this]() {
+ RTC_DCHECK_RUN_ON(&main_thread_);
+ last_codec_type_ = codec_type;
+ if (last_codec_type_ == kVideoCodecVP8 && qp != -1) {
+ qp_counters_.vp8.Add(qp);
+ qp_sample_.Add(qp);
+ }
+ }));
}
void ReceiveStatisticsProxy::OnStreamInactive() {
diff --git a/video/receive_statistics_proxy2.h b/video/receive_statistics_proxy2.h
index d6f6f1cc21..1357c407ad 100644
--- a/video/receive_statistics_proxy2.h
+++ b/video/receive_statistics_proxy2.h
@@ -211,8 +211,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
// methods are invoked on such as GetStats().
TaskQueueBase* const worker_thread_;
- PendingTaskSafetyFlag::Pointer task_safety_flag_ =
- PendingTaskSafetyFlag::Create();
+ ScopedTaskSafety task_safety_;
SequenceChecker decode_queue_;
rtc::ThreadChecker main_thread_;
diff --git a/video/rtp_streams_synchronizer.cc b/video/rtp_streams_synchronizer.cc
index 3dedc43eaa..28e9a0ba9d 100644
--- a/video/rtp_streams_synchronizer.cc
+++ b/video/rtp_streams_synchronizer.cc
@@ -51,7 +51,7 @@ RtpStreamsSynchronizer::RtpStreamsSynchronizer(Syncable* syncable_video)
RtpStreamsSynchronizer::~RtpStreamsSynchronizer() = default;
void RtpStreamsSynchronizer::ConfigureSync(Syncable* syncable_audio) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (syncable_audio == syncable_audio_) {
// This prevents expensive no-ops.
return;
@@ -76,7 +76,7 @@ void RtpStreamsSynchronizer::Process() {
RTC_DCHECK_RUN_ON(&process_thread_checker_);
last_sync_time_ = rtc::TimeNanos();
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (!syncable_audio_) {
return;
}
@@ -157,7 +157,7 @@ bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs(
int64_t* video_playout_ntp_ms,
int64_t* stream_offset_ms,
double* estimated_freq_khz) const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (!syncable_audio_) {
return false;
}
diff --git a/video/rtp_streams_synchronizer.h b/video/rtp_streams_synchronizer.h
index 6abf5bbe0e..732c9a7d77 100644
--- a/video/rtp_streams_synchronizer.h
+++ b/video/rtp_streams_synchronizer.h
@@ -17,7 +17,7 @@
#include <memory>
#include "modules/include/module.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_checker.h"
#include "video/stream_synchronization.h"
@@ -51,11 +51,11 @@ class RtpStreamsSynchronizer : public Module {
private:
Syncable* syncable_video_;
- rtc::CriticalSection crit_;
- Syncable* syncable_audio_ RTC_GUARDED_BY(crit_);
- std::unique_ptr<StreamSynchronization> sync_ RTC_GUARDED_BY(crit_);
- StreamSynchronization::Measurements audio_measurement_ RTC_GUARDED_BY(crit_);
- StreamSynchronization::Measurements video_measurement_ RTC_GUARDED_BY(crit_);
+ mutable Mutex mutex_;
+ Syncable* syncable_audio_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<StreamSynchronization> sync_ RTC_GUARDED_BY(mutex_);
+ StreamSynchronization::Measurements audio_measurement_ RTC_GUARDED_BY(mutex_);
+ StreamSynchronization::Measurements video_measurement_ RTC_GUARDED_BY(mutex_);
rtc::ThreadChecker process_thread_checker_;
int64_t last_sync_time_ RTC_GUARDED_BY(&process_thread_checker_);
diff --git a/video/rtp_streams_synchronizer2.cc b/video/rtp_streams_synchronizer2.cc
index 116cf2879b..49be355a38 100644
--- a/video/rtp_streams_synchronizer2.cc
+++ b/video/rtp_streams_synchronizer2.cc
@@ -23,7 +23,7 @@ namespace internal {
namespace {
// Time interval for logging stats.
constexpr int64_t kStatsLogIntervalMs = 10000;
-constexpr uint32_t kSyncIntervalMs = 1000;
+constexpr TimeDelta kSyncInterval = TimeDelta::Millis(1000);
bool UpdateMeasurements(StreamSynchronization::Measurements* stream,
const Syncable::Info& info) {
@@ -34,20 +34,20 @@ bool UpdateMeasurements(StreamSynchronization::Measurements* stream,
info.capture_time_ntp_secs, info.capture_time_ntp_frac,
info.capture_time_source_clock, &new_rtcp_sr);
}
+
} // namespace
RtpStreamsSynchronizer::RtpStreamsSynchronizer(TaskQueueBase* main_queue,
Syncable* syncable_video)
: task_queue_(main_queue),
syncable_video_(syncable_video),
- last_sync_time_(rtc::TimeNanos()),
last_stats_log_ms_(rtc::TimeMillis()) {
RTC_DCHECK(syncable_video);
}
RtpStreamsSynchronizer::~RtpStreamsSynchronizer() {
RTC_DCHECK_RUN_ON(&main_checker_);
- task_safety_flag_->SetNotAlive();
+ repeating_task_.Stop();
}
void RtpStreamsSynchronizer::ConfigureSync(Syncable* syncable_audio) {
@@ -59,53 +59,32 @@ void RtpStreamsSynchronizer::ConfigureSync(Syncable* syncable_audio) {
syncable_audio_ = syncable_audio;
sync_.reset(nullptr);
- if (!syncable_audio_)
+ if (!syncable_audio_) {
+ repeating_task_.Stop();
return;
+ }
sync_.reset(
new StreamSynchronization(syncable_video_->id(), syncable_audio_->id()));
- QueueTimer();
-}
-void RtpStreamsSynchronizer::QueueTimer() {
- RTC_DCHECK_RUN_ON(&main_checker_);
- if (timer_running_)
+ if (repeating_task_.Running())
return;
- timer_running_ = true;
- uint32_t delay = kSyncIntervalMs - (rtc::TimeNanos() - last_sync_time_) /
- rtc::kNumNanosecsPerMillisec;
- if (delay > kSyncIntervalMs) {
- // TODO(tommi): |linux_chromium_tsan_rel_ng| bot has shown a failure when
- // running WebRtcBrowserTest.CallAndModifyStream, indicating that the
- // underlying clock is not reliable. Possibly there's a fake clock being
- // used as the tests are flaky. Look into and fix.
- RTC_LOG(LS_ERROR) << "Unexpected timer value: " << delay;
- delay = kSyncIntervalMs;
- }
-
- RTC_DCHECK_LE(delay, kSyncIntervalMs);
- task_queue_->PostDelayedTask(ToQueuedTask([this, safety = task_safety_flag_] {
- if (!safety->alive())
- return;
- RTC_DCHECK_RUN_ON(&main_checker_);
- timer_running_ = false;
- UpdateDelay();
- }),
- delay);
+ repeating_task_ =
+ RepeatingTaskHandle::DelayedStart(task_queue_, kSyncInterval, [this]() {
+ UpdateDelay();
+ return kSyncInterval;
+ });
}
void RtpStreamsSynchronizer::UpdateDelay() {
RTC_DCHECK_RUN_ON(&main_checker_);
- last_sync_time_ = rtc::TimeNanos();
if (!syncable_audio_)
return;
RTC_DCHECK(sync_.get());
- QueueTimer();
-
bool log_stats = false;
const int64_t now_ms = rtc::TimeMillis();
if (now_ms - last_stats_log_ms_ > kStatsLogIntervalMs) {
diff --git a/video/rtp_streams_synchronizer2.h b/video/rtp_streams_synchronizer2.h
index 353434e6a9..6a522e801d 100644
--- a/video/rtp_streams_synchronizer2.h
+++ b/video/rtp_streams_synchronizer2.h
@@ -15,7 +15,7 @@
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
-#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/task_utils/repeating_task.h"
#include "video/stream_synchronization.h"
namespace webrtc {
@@ -45,7 +45,6 @@ class RtpStreamsSynchronizer {
double* estimated_freq_khz) const;
private:
- void QueueTimer();
void UpdateDelay();
TaskQueueBase* const task_queue_;
@@ -65,13 +64,8 @@ class RtpStreamsSynchronizer {
RTC_GUARDED_BY(main_checker_);
StreamSynchronization::Measurements video_measurement_
RTC_GUARDED_BY(main_checker_);
- int64_t last_sync_time_ RTC_GUARDED_BY(&main_checker_);
+ RepeatingTaskHandle repeating_task_ RTC_GUARDED_BY(main_checker_);
int64_t last_stats_log_ms_ RTC_GUARDED_BY(&main_checker_);
- bool timer_running_ RTC_GUARDED_BY(main_checker_) = false;
-
- // Used to signal destruction to potentially pending tasks.
- PendingTaskSafetyFlag::Pointer task_safety_flag_ =
- PendingTaskSafetyFlag::Create();
};
} // namespace internal
diff --git a/video/rtp_video_stream_receiver.cc b/video/rtp_video_stream_receiver.cc
index e1dd736be6..05b419b8c9 100644
--- a/video/rtp_video_stream_receiver.cc
+++ b/video/rtp_video_stream_receiver.cc
@@ -25,7 +25,6 @@
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
#include "modules/rtp_rtcp/include/rtp_cvo.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/ulpfec_receiver.h"
#include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h"
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
@@ -35,13 +34,14 @@
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h"
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h"
#include "modules/utility/include/process_thread.h"
+#include "modules/video_coding/deprecated/nack_module.h"
#include "modules/video_coding/frame_object.h"
#include "modules/video_coding/h264_sprop_parameter_sets.h"
#include "modules/video_coding/h264_sps_pps_tracker.h"
-#include "modules/video_coding/nack_module.h"
#include "modules/video_coding/packet_buffer.h"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
@@ -77,8 +77,6 @@ int PacketBufferMaxSize() {
return packet_buffer_max_size;
}
-} // namespace
-
std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
Clock* clock,
ReceiveStatistics* receive_statistics,
@@ -87,7 +85,7 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
RtcpCnameCallback* rtcp_cname_callback,
uint32_t local_ssrc) {
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.clock = clock;
configuration.audio = false;
configuration.receiver_only = true;
@@ -99,7 +97,7 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
configuration.rtcp_cname_callback = rtcp_cname_callback;
configuration.local_media_ssrc = local_ssrc;
- std::unique_ptr<RtpRtcp> rtp_rtcp = RtpRtcp::Create(configuration);
+ std::unique_ptr<RtpRtcp> rtp_rtcp = RtpRtcp::DEPRECATED_Create(configuration);
rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
return rtp_rtcp;
@@ -107,6 +105,8 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
static const int kPacketLogIntervalMs = 10000;
+} // namespace
+
RtpVideoStreamReceiver::RtcpFeedbackBuffer::RtcpFeedbackBuffer(
KeyFrameRequestSender* key_frame_request_sender,
NackSender* nack_sender,
@@ -121,7 +121,7 @@ RtpVideoStreamReceiver::RtcpFeedbackBuffer::RtcpFeedbackBuffer(
}
void RtpVideoStreamReceiver::RtcpFeedbackBuffer::RequestKeyFrame() {
- rtc::CritScope lock(&cs_);
+ MutexLock lock(&mutex_);
request_key_frame_ = true;
}
@@ -129,14 +129,14 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendNack(
const std::vector<uint16_t>& sequence_numbers,
bool buffering_allowed) {
RTC_DCHECK(!sequence_numbers.empty());
- rtc::CritScope lock(&cs_);
+ MutexLock lock(&mutex_);
nack_sequence_numbers_.insert(nack_sequence_numbers_.end(),
sequence_numbers.cbegin(),
sequence_numbers.cend());
if (!buffering_allowed) {
// Note that while *buffering* is not allowed, *batching* is, meaning that
// previously buffered messages may be sent along with the current message.
- SendBufferedRtcpFeedback();
+ SendRtcpFeedback(ConsumeRtcpFeedbackLocked());
}
}
@@ -146,7 +146,7 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendLossNotification(
bool decodability_flag,
bool buffering_allowed) {
RTC_DCHECK(buffering_allowed);
- rtc::CritScope lock(&cs_);
+ MutexLock lock(&mutex_);
RTC_DCHECK(!lntf_state_)
<< "SendLossNotification() called twice in a row with no call to "
"SendBufferedRtcpFeedback() in between.";
@@ -155,34 +155,44 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendLossNotification(
}
void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() {
- bool request_key_frame = false;
- std::vector<uint16_t> nack_sequence_numbers;
- absl::optional<LossNotificationState> lntf_state;
+ SendRtcpFeedback(ConsumeRtcpFeedback());
+}
- {
- rtc::CritScope lock(&cs_);
- std::swap(request_key_frame, request_key_frame_);
- std::swap(nack_sequence_numbers, nack_sequence_numbers_);
- std::swap(lntf_state, lntf_state_);
- }
+RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumedRtcpFeedback
+RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumeRtcpFeedback() {
+ MutexLock lock(&mutex_);
+ return ConsumeRtcpFeedbackLocked();
+}
+
+RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumedRtcpFeedback
+RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumeRtcpFeedbackLocked() {
+ ConsumedRtcpFeedback feedback;
+ std::swap(feedback.request_key_frame, request_key_frame_);
+ std::swap(feedback.nack_sequence_numbers, nack_sequence_numbers_);
+ std::swap(feedback.lntf_state, lntf_state_);
+ return feedback;
+}
- if (lntf_state) {
+void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendRtcpFeedback(
+ ConsumedRtcpFeedback feedback) {
+ if (feedback.lntf_state) {
// If either a NACK or a key frame request is sent, we should buffer
// the LNTF and wait for them (NACK or key frame request) to trigger
// the compound feedback message.
// Otherwise, the LNTF should be sent out immediately.
const bool buffering_allowed =
- request_key_frame || !nack_sequence_numbers.empty();
+ feedback.request_key_frame || !feedback.nack_sequence_numbers.empty();
loss_notification_sender_->SendLossNotification(
- lntf_state->last_decoded_seq_num, lntf_state->last_received_seq_num,
- lntf_state->decodability_flag, buffering_allowed);
+ feedback.lntf_state->last_decoded_seq_num,
+ feedback.lntf_state->last_received_seq_num,
+ feedback.lntf_state->decodability_flag, buffering_allowed);
}
- if (request_key_frame) {
+ if (feedback.request_key_frame) {
key_frame_request_sender_->RequestKeyFrame();
- } else if (!nack_sequence_numbers.empty()) {
- nack_sender_->SendNack(nack_sequence_numbers, true);
+ } else if (!feedback.nack_sequence_numbers.empty()) {
+ nack_sender_->SendNack(feedback.nack_sequence_numbers, true);
}
}
@@ -305,8 +315,8 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver(
}
if (config_.rtp.nack.rtp_history_ms != 0) {
- nack_module_ = std::make_unique<NackModule>(clock_, &rtcp_feedback_buffer_,
- &rtcp_feedback_buffer_);
+ nack_module_ = std::make_unique<DEPRECATED_NackModule>(
+ clock_, &rtcp_feedback_buffer_, &rtcp_feedback_buffer_);
process_thread_->RegisterModule(nack_module_.get(), RTC_FROM_HERE);
}
@@ -366,7 +376,7 @@ absl::optional<Syncable::Info> RtpVideoStreamReceiver::GetSyncInfo() const {
return absl::nullopt;
}
{
- rtc::CritScope lock(&sync_info_lock_);
+ MutexLock lock(&sync_info_lock_);
if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) {
return absl::nullopt;
}
@@ -507,7 +517,6 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData(
video_header.content_type = VideoContentType::UNSPECIFIED;
video_header.video_timing.flags = VideoSendTiming::kInvalid;
video_header.is_last_packet_in_frame |= rtp_packet.Marker();
- video_header.frame_marking.temporal_id = kNoTemporalIdx;
if (const auto* vp9_header =
absl::get_if<RTPVideoHeaderVP9>(&video_header.video_type_header)) {
@@ -525,7 +534,6 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData(
} else {
rtp_packet.GetExtension<PlayoutDelayLimits>(&video_header.playout_delay);
}
- rtp_packet.GetExtension<FrameMarkingExtension>(&video_header.frame_marking);
ParseGenericDependenciesResult generic_descriptor_state =
ParseGenericDependenciesExtension(rtp_packet, &video_header);
@@ -659,7 +667,7 @@ void RtpVideoStreamReceiver::OnRtpPacket(const RtpPacketReceived& packet) {
// TODO(nisse): Exclude out-of-order packets?
int64_t now_ms = clock_->TimeInMilliseconds();
{
- rtc::CritScope cs(&sync_info_lock_);
+ MutexLock lock(&sync_info_lock_);
last_received_rtp_timestamp_ = packet.Timestamp();
last_received_rtp_system_time_ms_ = now_ms;
}
@@ -836,7 +844,7 @@ void RtpVideoStreamReceiver::OnAssembledFrame(
has_received_frame_ = true;
}
- rtc::CritScope lock(&reference_finder_lock_);
+ MutexLock lock(&reference_finder_lock_);
// Reset |reference_finder_| if |frame| is new and the codec have changed.
if (current_codec_) {
bool frame_is_newer =
@@ -879,7 +887,7 @@ void RtpVideoStreamReceiver::OnAssembledFrame(
void RtpVideoStreamReceiver::OnCompleteFrame(
std::unique_ptr<video_coding::EncodedFrame> frame) {
{
- rtc::CritScope lock(&last_seq_num_cs_);
+ MutexLock lock(&last_seq_num_mutex_);
video_coding::RtpFrameObject* rtp_frame =
static_cast<video_coding::RtpFrameObject*>(frame.get());
last_seq_num_for_pic_id_[rtp_frame->id.picture_id] =
@@ -892,7 +900,7 @@ void RtpVideoStreamReceiver::OnCompleteFrame(
void RtpVideoStreamReceiver::OnDecryptedFrame(
std::unique_ptr<video_coding::RtpFrameObject> frame) {
- rtc::CritScope lock(&reference_finder_lock_);
+ MutexLock lock(&reference_finder_lock_);
reference_finder_->ManageFrame(std::move(frame));
}
@@ -959,7 +967,7 @@ void RtpVideoStreamReceiver::RemoveSecondarySink(
void RtpVideoStreamReceiver::ManageFrame(
std::unique_ptr<video_coding::RtpFrameObject> frame) {
- rtc::CritScope lock(&reference_finder_lock_);
+ MutexLock lock(&reference_finder_lock_);
reference_finder_->ManageFrame(std::move(frame));
}
@@ -1014,7 +1022,7 @@ void RtpVideoStreamReceiver::ParseAndHandleEncapsulatingHeader(
// correctly calculate frame references.
void RtpVideoStreamReceiver::NotifyReceiverOfEmptyPacket(uint16_t seq_num) {
{
- rtc::CritScope lock(&reference_finder_lock_);
+ MutexLock lock(&reference_finder_lock_);
reference_finder_->PaddingReceived(seq_num);
}
OnInsertedPacket(packet_buffer_.InsertPadding(seq_num));
@@ -1078,7 +1086,7 @@ void RtpVideoStreamReceiver::FrameContinuous(int64_t picture_id) {
int seq_num = -1;
{
- rtc::CritScope lock(&last_seq_num_cs_);
+ MutexLock lock(&last_seq_num_mutex_);
auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
if (seq_num_it != last_seq_num_for_pic_id_.end())
seq_num = seq_num_it->second;
@@ -1090,7 +1098,7 @@ void RtpVideoStreamReceiver::FrameContinuous(int64_t picture_id) {
void RtpVideoStreamReceiver::FrameDecoded(int64_t picture_id) {
int seq_num = -1;
{
- rtc::CritScope lock(&last_seq_num_cs_);
+ MutexLock lock(&last_seq_num_mutex_);
auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
if (seq_num_it != last_seq_num_for_pic_id_.end()) {
seq_num = seq_num_it->second;
@@ -1100,7 +1108,7 @@ void RtpVideoStreamReceiver::FrameDecoded(int64_t picture_id) {
}
if (seq_num != -1) {
packet_buffer_.ClearTo(seq_num);
- rtc::CritScope lock(&reference_finder_lock_);
+ MutexLock lock(&reference_finder_lock_);
reference_finder_->ClearTo(seq_num);
}
}
diff --git a/video/rtp_video_stream_receiver.h b/video/rtp_video_stream_receiver.h
index 0289f23a07..fb5dd85e3d 100644
--- a/video/rtp_video_stream_receiver.h
+++ b/video/rtp_video_stream_receiver.h
@@ -42,9 +42,9 @@
#include "modules/video_coding/rtp_frame_reference_finder.h"
#include "modules/video_coding/unique_timestamp_counter.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
@@ -53,7 +53,7 @@
namespace webrtc {
-class NackModule;
+class DEPRECATED_NackModule;
class PacketRouter;
class ProcessThread;
class ReceiveStatistics;
@@ -69,7 +69,8 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
public KeyFrameRequestSender,
public video_coding::OnCompleteFrameCallback,
public OnDecryptedFrameCallback,
- public OnDecryptionStatusChangeCallback {
+ public OnDecryptionStatusChangeCallback,
+ public RtpVideoFrameReceiver {
public:
// DEPRECATED due to dependency on ReceiveStatisticsProxy.
RtpVideoStreamReceiver(
@@ -205,9 +206,11 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
void AddSecondarySink(RtpPacketSinkInterface* sink);
void RemoveSecondarySink(const RtpPacketSinkInterface* sink);
- virtual void ManageFrame(std::unique_ptr<video_coding::RtpFrameObject> frame);
-
private:
+ // Implements RtpVideoFrameReceiver.
+ void ManageFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) override;
+
// Used for buffering RTCP feedback messages and sending them all together.
// Note:
// 1. Key frame requests and NACKs are mutually exclusive, with the
@@ -225,35 +228,23 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
~RtcpFeedbackBuffer() override = default;
// KeyFrameRequestSender implementation.
- void RequestKeyFrame() override;
+ void RequestKeyFrame() RTC_LOCKS_EXCLUDED(mutex_) override;
// NackSender implementation.
void SendNack(const std::vector<uint16_t>& sequence_numbers,
- bool buffering_allowed) override;
+ bool buffering_allowed) RTC_LOCKS_EXCLUDED(mutex_) override;
// LossNotificationSender implementation.
void SendLossNotification(uint16_t last_decoded_seq_num,
uint16_t last_received_seq_num,
bool decodability_flag,
- bool buffering_allowed) override;
+ bool buffering_allowed)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
// Send all RTCP feedback messages buffered thus far.
- void SendBufferedRtcpFeedback();
+ void SendBufferedRtcpFeedback() RTC_LOCKS_EXCLUDED(mutex_);
private:
- KeyFrameRequestSender* const key_frame_request_sender_;
- NackSender* const nack_sender_;
- LossNotificationSender* const loss_notification_sender_;
-
- // NACKs are accessible from two threads due to nack_module_ being a module.
- rtc::CriticalSection cs_;
-
- // Key-frame-request-related state.
- bool request_key_frame_ RTC_GUARDED_BY(cs_);
-
- // NACK-related state.
- std::vector<uint16_t> nack_sequence_numbers_ RTC_GUARDED_BY(cs_);
-
// LNTF-related state.
struct LossNotificationState {
LossNotificationState(uint16_t last_decoded_seq_num,
@@ -267,7 +258,32 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
uint16_t last_received_seq_num;
bool decodability_flag;
};
- absl::optional<LossNotificationState> lntf_state_ RTC_GUARDED_BY(cs_);
+ struct ConsumedRtcpFeedback {
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_sequence_numbers;
+ absl::optional<LossNotificationState> lntf_state;
+ };
+
+ ConsumedRtcpFeedback ConsumeRtcpFeedback() RTC_LOCKS_EXCLUDED(mutex_);
+ ConsumedRtcpFeedback ConsumeRtcpFeedbackLocked()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ // This method is called both with and without mutex_ held.
+ void SendRtcpFeedback(ConsumedRtcpFeedback feedback);
+
+ KeyFrameRequestSender* const key_frame_request_sender_;
+ NackSender* const nack_sender_;
+ LossNotificationSender* const loss_notification_sender_;
+
+ // NACKs are accessible from two threads due to nack_module_ being a module.
+ Mutex mutex_;
+
+ // Key-frame-request-related state.
+ bool request_key_frame_ RTC_GUARDED_BY(mutex_);
+
+ // NACK-related state.
+ std::vector<uint16_t> nack_sequence_numbers_ RTC_GUARDED_BY(mutex_);
+
+ absl::optional<LossNotificationState> lntf_state_ RTC_GUARDED_BY(mutex_);
};
enum ParseGenericDependenciesResult {
kDropPacket,
@@ -317,7 +333,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
KeyFrameRequestSender* const keyframe_request_sender_;
RtcpFeedbackBuffer rtcp_feedback_buffer_;
- std::unique_ptr<NackModule> nack_module_;
+ std::unique_ptr<DEPRECATED_NackModule> nack_module_;
std::unique_ptr<LossNotificationController> loss_notification_controller_;
video_coding::PacketBuffer packet_buffer_;
@@ -335,15 +351,15 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
absl::optional<int64_t> video_structure_frame_id_
RTC_GUARDED_BY(worker_task_checker_);
- rtc::CriticalSection reference_finder_lock_;
+ Mutex reference_finder_lock_;
std::unique_ptr<video_coding::RtpFrameReferenceFinder> reference_finder_
RTC_GUARDED_BY(reference_finder_lock_);
absl::optional<VideoCodecType> current_codec_;
uint32_t last_assembled_frame_rtp_timestamp_;
- rtc::CriticalSection last_seq_num_cs_;
+ Mutex last_seq_num_mutex_;
std::map<int64_t, uint16_t> last_seq_num_for_pic_id_
- RTC_GUARDED_BY(last_seq_num_cs_);
+ RTC_GUARDED_BY(last_seq_num_mutex_);
video_coding::H264SpsPpsTracker tracker_;
// Maps payload id to the depacketizer.
@@ -362,7 +378,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
// Info for GetSyncInfo is updated on network or worker thread, and queried on
// the worker thread.
- rtc::CriticalSection sync_info_lock_;
+ mutable Mutex sync_info_lock_;
absl::optional<uint32_t> last_received_rtp_timestamp_
RTC_GUARDED_BY(sync_info_lock_);
absl::optional<int64_t> last_received_rtp_system_time_ms_
diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc
new file mode 100644
index 0000000000..3f11bb77c4
--- /dev/null
+++ b/video/rtp_video_stream_receiver2.cc
@@ -0,0 +1,1154 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/rtp_video_stream_receiver2.h"
+
+#include <algorithm>
+#include <limits>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/base/macros.h"
+#include "absl/memory/memory.h"
+#include "absl/types/optional.h"
+#include "media/base/media_constants.h"
+#include "modules/pacing/packet_router.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/rtp_rtcp/include/receive_statistics.h"
+#include "modules/rtp_rtcp/include/rtp_cvo.h"
+#include "modules/rtp_rtcp/include/ulpfec_receiver.h"
+#include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h"
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_format.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h"
+#include "modules/utility/include/process_thread.h"
+#include "modules/video_coding/frame_object.h"
+#include "modules/video_coding/h264_sprop_parameter_sets.h"
+#include "modules/video_coding/h264_sps_pps_tracker.h"
+#include "modules/video_coding/nack_module2.h"
+#include "modules/video_coding/packet_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/location.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
+#include "system_wrappers/include/ntp_time.h"
+#include "video/receive_statistics_proxy2.h"
+
+namespace webrtc {
+
+namespace {
+// TODO(philipel): Change kPacketBufferStartSize back to 32 in M63 see:
+// crbug.com/752886
+constexpr int kPacketBufferStartSize = 512;
+constexpr int kPacketBufferMaxSize = 2048;
+
+int PacketBufferMaxSize() {
+ // The group here must be a positive power of 2, in which case that is used as
+ // size. All other values shall result in the default value being used.
+ const std::string group_name =
+ webrtc::field_trial::FindFullName("WebRTC-PacketBufferMaxSize");
+ int packet_buffer_max_size = kPacketBufferMaxSize;
+ if (!group_name.empty() &&
+ (sscanf(group_name.c_str(), "%d", &packet_buffer_max_size) != 1 ||
+ packet_buffer_max_size <= 0 ||
+ // Verify that the number is a positive power of 2.
+ (packet_buffer_max_size & (packet_buffer_max_size - 1)) != 0)) {
+ RTC_LOG(LS_WARNING) << "Invalid packet buffer max size: " << group_name;
+ packet_buffer_max_size = kPacketBufferMaxSize;
+ }
+ return packet_buffer_max_size;
+}
+
+std::unique_ptr<ModuleRtpRtcpImpl2> CreateRtpRtcpModule(
+ Clock* clock,
+ ReceiveStatistics* receive_statistics,
+ Transport* outgoing_transport,
+ RtcpRttStats* rtt_stats,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RtcpCnameCallback* rtcp_cname_callback,
+ uint32_t local_ssrc) {
+ RtpRtcpInterface::Configuration configuration;
+ configuration.clock = clock;
+ configuration.audio = false;
+ configuration.receiver_only = true;
+ configuration.receive_statistics = receive_statistics;
+ configuration.outgoing_transport = outgoing_transport;
+ configuration.rtt_stats = rtt_stats;
+ configuration.rtcp_packet_type_counter_observer =
+ rtcp_packet_type_counter_observer;
+ configuration.rtcp_cname_callback = rtcp_cname_callback;
+ configuration.local_media_ssrc = local_ssrc;
+
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp =
+ ModuleRtpRtcpImpl2::Create(configuration);
+ rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
+
+ return rtp_rtcp;
+}
+
+std::unique_ptr<NackModule2> MaybeConstructNackModule(
+ TaskQueueBase* current_queue,
+ const VideoReceiveStream::Config& config,
+ Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender) {
+ if (config.rtp.nack.rtp_history_ms == 0)
+ return nullptr;
+
+ return std::make_unique<NackModule2>(current_queue, clock, nack_sender,
+ keyframe_request_sender);
+}
+
+static const int kPacketLogIntervalMs = 10000;
+
+} // namespace
+
+RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RtcpFeedbackBuffer(
+ KeyFrameRequestSender* key_frame_request_sender,
+ NackSender* nack_sender,
+ LossNotificationSender* loss_notification_sender)
+ : key_frame_request_sender_(key_frame_request_sender),
+ nack_sender_(nack_sender),
+ loss_notification_sender_(loss_notification_sender),
+ request_key_frame_(false) {
+ RTC_DCHECK(key_frame_request_sender_);
+ RTC_DCHECK(nack_sender_);
+ RTC_DCHECK(loss_notification_sender_);
+}
+
+void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RequestKeyFrame() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ request_key_frame_ = true;
+}
+
+void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendNack(
+ const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK(!sequence_numbers.empty());
+ nack_sequence_numbers_.insert(nack_sequence_numbers_.end(),
+ sequence_numbers.cbegin(),
+ sequence_numbers.cend());
+ if (!buffering_allowed) {
+ // Note that while *buffering* is not allowed, *batching* is, meaning that
+ // previously buffered messages may be sent along with the current message.
+ SendBufferedRtcpFeedback();
+ }
+}
+
+void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendLossNotification(
+ uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK(buffering_allowed);
+ RTC_DCHECK(!lntf_state_)
+ << "SendLossNotification() called twice in a row with no call to "
+ "SendBufferedRtcpFeedback() in between.";
+ lntf_state_ = absl::make_optional<LossNotificationState>(
+ last_decoded_seq_num, last_received_seq_num, decodability_flag);
+}
+
+void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_sequence_numbers;
+ absl::optional<LossNotificationState> lntf_state;
+
+ std::swap(request_key_frame, request_key_frame_);
+ std::swap(nack_sequence_numbers, nack_sequence_numbers_);
+ std::swap(lntf_state, lntf_state_);
+
+ if (lntf_state) {
+ // If either a NACK or a key frame request is sent, we should buffer
+ // the LNTF and wait for them (NACK or key frame request) to trigger
+ // the compound feedback message.
+ // Otherwise, the LNTF should be sent out immediately.
+ const bool buffering_allowed =
+ request_key_frame || !nack_sequence_numbers.empty();
+
+ loss_notification_sender_->SendLossNotification(
+ lntf_state->last_decoded_seq_num, lntf_state->last_received_seq_num,
+ lntf_state->decodability_flag, buffering_allowed);
+ }
+
+ if (request_key_frame) {
+ key_frame_request_sender_->RequestKeyFrame();
+ } else if (!nack_sequence_numbers.empty()) {
+ nack_sender_->SendNack(nack_sequence_numbers, true);
+ }
+}
+
+RtpVideoStreamReceiver2::RtpVideoStreamReceiver2(
+ TaskQueueBase* current_queue,
+ Clock* clock,
+ Transport* transport,
+ RtcpRttStats* rtt_stats,
+ PacketRouter* packet_router,
+ const VideoReceiveStream::Config* config,
+ ReceiveStatistics* rtp_receive_statistics,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RtcpCnameCallback* rtcp_cname_callback,
+ ProcessThread* process_thread,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender,
+ video_coding::OnCompleteFrameCallback* complete_frame_callback,
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)
+ : clock_(clock),
+ config_(*config),
+ packet_router_(packet_router),
+ process_thread_(process_thread),
+ ntp_estimator_(clock),
+ rtp_header_extensions_(config_.rtp.extensions),
+ forced_playout_delay_max_ms_("max_ms", absl::nullopt),
+ forced_playout_delay_min_ms_("min_ms", absl::nullopt),
+ rtp_receive_statistics_(rtp_receive_statistics),
+ ulpfec_receiver_(UlpfecReceiver::Create(config->rtp.remote_ssrc,
+ this,
+ config->rtp.extensions)),
+ receiving_(false),
+ last_packet_log_ms_(-1),
+ rtp_rtcp_(CreateRtpRtcpModule(clock,
+ rtp_receive_statistics_,
+ transport,
+ rtt_stats,
+ rtcp_packet_type_counter_observer,
+ rtcp_cname_callback,
+ config_.rtp.local_ssrc)),
+ complete_frame_callback_(complete_frame_callback),
+ keyframe_request_sender_(keyframe_request_sender),
+ // TODO(bugs.webrtc.org/10336): Let |rtcp_feedback_buffer_| communicate
+ // directly with |rtp_rtcp_|.
+ rtcp_feedback_buffer_(this, nack_sender, this),
+ nack_module_(MaybeConstructNackModule(current_queue,
+ config_,
+ clock_,
+ &rtcp_feedback_buffer_,
+ &rtcp_feedback_buffer_)),
+ packet_buffer_(clock_, kPacketBufferStartSize, PacketBufferMaxSize()),
+ has_received_frame_(false),
+ frames_decryptable_(false),
+ absolute_capture_time_receiver_(clock) {
+ constexpr bool remb_candidate = true;
+ if (packet_router_)
+ packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate);
+
+ RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff)
+ << "A stream should not be configured with RTCP disabled. This value is "
+ "reserved for internal usage.";
+ // TODO(pbos): What's an appropriate local_ssrc for receive-only streams?
+ RTC_DCHECK(config_.rtp.local_ssrc != 0);
+ RTC_DCHECK(config_.rtp.remote_ssrc != config_.rtp.local_ssrc);
+
+ rtp_rtcp_->SetRTCPStatus(config_.rtp.rtcp_mode);
+ rtp_rtcp_->SetRemoteSSRC(config_.rtp.remote_ssrc);
+
+ static const int kMaxPacketAgeToNack = 450;
+ const int max_reordering_threshold = (config_.rtp.nack.rtp_history_ms > 0)
+ ? kMaxPacketAgeToNack
+ : kDefaultMaxReorderingThreshold;
+ rtp_receive_statistics_->SetMaxReorderingThreshold(config_.rtp.remote_ssrc,
+ max_reordering_threshold);
+ // TODO(nisse): For historic reasons, we applied the above
+ // max_reordering_threshold also for RTX stats, which makes little sense since
+ // we don't NACK rtx packets. Consider deleting the below block, and rely on
+ // the default threshold.
+ if (config_.rtp.rtx_ssrc) {
+ rtp_receive_statistics_->SetMaxReorderingThreshold(
+ config_.rtp.rtx_ssrc, max_reordering_threshold);
+ }
+ if (config_.rtp.rtcp_xr.receiver_reference_time_report)
+ rtp_rtcp_->SetRtcpXrRrtrStatus(true);
+
+ ParseFieldTrial(
+ {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_},
+ field_trial::FindFullName("WebRTC-ForcePlayoutDelay"));
+
+ process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE);
+
+ if (config_.rtp.lntf.enabled) {
+ loss_notification_controller_ =
+ std::make_unique<LossNotificationController>(&rtcp_feedback_buffer_,
+ &rtcp_feedback_buffer_);
+ }
+
+ reference_finder_ =
+ std::make_unique<video_coding::RtpFrameReferenceFinder>(this);
+
+ // Only construct the encrypted receiver if frame encryption is enabled.
+ if (config_.crypto_options.sframe.require_frame_encryption) {
+ buffered_frame_decryptor_ =
+ std::make_unique<BufferedFrameDecryptor>(this, this);
+ if (frame_decryptor != nullptr) {
+ buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor));
+ }
+ }
+
+ if (frame_transformer) {
+ frame_transformer_delegate_ = new rtc::RefCountedObject<
+ RtpVideoStreamReceiverFrameTransformerDelegate>(
+ this, std::move(frame_transformer), rtc::Thread::Current(),
+ config_.rtp.remote_ssrc);
+ frame_transformer_delegate_->Init();
+ }
+}
+
+RtpVideoStreamReceiver2::~RtpVideoStreamReceiver2() {
+ RTC_DCHECK(secondary_sinks_.empty());
+
+ process_thread_->DeRegisterModule(rtp_rtcp_.get());
+
+ if (packet_router_)
+ packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get());
+ UpdateHistograms();
+ if (frame_transformer_delegate_)
+ frame_transformer_delegate_->Reset();
+}
+
+void RtpVideoStreamReceiver2::AddReceiveCodec(
+ const VideoCodec& video_codec,
+ const std::map<std::string, std::string>& codec_params,
+ bool raw_payload) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ payload_type_map_.emplace(
+ video_codec.plType,
+ raw_payload ? std::make_unique<VideoRtpDepacketizerRaw>()
+ : CreateVideoRtpDepacketizer(video_codec.codecType));
+ pt_codec_params_.emplace(video_codec.plType, codec_params);
+}
+
+absl::optional<Syncable::Info> RtpVideoStreamReceiver2::GetSyncInfo() const {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ Syncable::Info info;
+ if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs,
+ &info.capture_time_ntp_frac, nullptr, nullptr,
+ &info.capture_time_source_clock) != 0) {
+ return absl::nullopt;
+ }
+
+ if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) {
+ return absl::nullopt;
+ }
+ info.latest_received_capture_timestamp = *last_received_rtp_timestamp_;
+ info.latest_receive_time_ms = *last_received_rtp_system_time_ms_;
+
+ // Leaves info.current_delay_ms uninitialized.
+ return info;
+}
+
+RtpVideoStreamReceiver2::ParseGenericDependenciesResult
+RtpVideoStreamReceiver2::ParseGenericDependenciesExtension(
+ const RtpPacketReceived& rtp_packet,
+ RTPVideoHeader* video_header) {
+ if (rtp_packet.HasExtension<RtpDependencyDescriptorExtension>()) {
+ webrtc::DependencyDescriptor dependency_descriptor;
+ if (!rtp_packet.GetExtension<RtpDependencyDescriptorExtension>(
+ video_structure_.get(), &dependency_descriptor)) {
+ // Descriptor is there, but failed to parse. Either it is invalid,
+ // or too old packet (after relevant video_structure_ changed),
+ // or too new packet (before relevant video_structure_ arrived).
+ // Drop such packet to be on the safe side.
+ // TODO(bugs.webrtc.org/10342): Stash too new packet.
+ RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
+ << " Failed to parse dependency descriptor.";
+ return kDropPacket;
+ }
+ if (dependency_descriptor.attached_structure != nullptr &&
+ !dependency_descriptor.first_packet_in_frame) {
+ RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
+ << "Invalid dependency descriptor: structure "
+ "attached to non first packet of a frame.";
+ return kDropPacket;
+ }
+ video_header->is_first_packet_in_frame =
+ dependency_descriptor.first_packet_in_frame;
+ video_header->is_last_packet_in_frame =
+ dependency_descriptor.last_packet_in_frame;
+
+ int64_t frame_id =
+ frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number);
+ auto& generic_descriptor_info = video_header->generic.emplace();
+ generic_descriptor_info.frame_id = frame_id;
+ generic_descriptor_info.spatial_index =
+ dependency_descriptor.frame_dependencies.spatial_id;
+ generic_descriptor_info.temporal_index =
+ dependency_descriptor.frame_dependencies.temporal_id;
+ for (int fdiff : dependency_descriptor.frame_dependencies.frame_diffs) {
+ generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
+ }
+ generic_descriptor_info.decode_target_indications =
+ dependency_descriptor.frame_dependencies.decode_target_indications;
+ if (dependency_descriptor.resolution) {
+ video_header->width = dependency_descriptor.resolution->Width();
+ video_header->height = dependency_descriptor.resolution->Height();
+ }
+
+ // FrameDependencyStructure is sent in dependency descriptor of the first
+ // packet of a key frame and required for parsed dependency descriptor in
+ // all the following packets until next key frame.
+ // Save it if there is a (potentially) new structure.
+ if (dependency_descriptor.attached_structure) {
+ RTC_DCHECK(dependency_descriptor.first_packet_in_frame);
+ if (video_structure_frame_id_ > frame_id) {
+ RTC_LOG(LS_WARNING)
+ << "Arrived key frame with id " << frame_id << " and structure id "
+ << dependency_descriptor.attached_structure->structure_id
+ << " is older than the latest received key frame with id "
+ << *video_structure_frame_id_ << " and structure id "
+ << video_structure_->structure_id;
+ return kDropPacket;
+ }
+ video_structure_ = std::move(dependency_descriptor.attached_structure);
+ video_structure_frame_id_ = frame_id;
+ video_header->frame_type = VideoFrameType::kVideoFrameKey;
+ } else {
+ video_header->frame_type = VideoFrameType::kVideoFrameDelta;
+ }
+ return kHasGenericDescriptor;
+ }
+
+ RtpGenericFrameDescriptor generic_frame_descriptor;
+ if (!rtp_packet.GetExtension<RtpGenericFrameDescriptorExtension00>(
+ &generic_frame_descriptor)) {
+ return kNoGenericDescriptor;
+ }
+
+ video_header->is_first_packet_in_frame =
+ generic_frame_descriptor.FirstPacketInSubFrame();
+ video_header->is_last_packet_in_frame =
+ generic_frame_descriptor.LastPacketInSubFrame();
+
+ if (generic_frame_descriptor.FirstPacketInSubFrame()) {
+ video_header->frame_type =
+ generic_frame_descriptor.FrameDependenciesDiffs().empty()
+ ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta;
+
+ auto& generic_descriptor_info = video_header->generic.emplace();
+ int64_t frame_id =
+ frame_id_unwrapper_.Unwrap(generic_frame_descriptor.FrameId());
+ generic_descriptor_info.frame_id = frame_id;
+ generic_descriptor_info.spatial_index =
+ generic_frame_descriptor.SpatialLayer();
+ generic_descriptor_info.temporal_index =
+ generic_frame_descriptor.TemporalLayer();
+ for (uint16_t fdiff : generic_frame_descriptor.FrameDependenciesDiffs()) {
+ generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
+ }
+ }
+ video_header->width = generic_frame_descriptor.Width();
+ video_header->height = generic_frame_descriptor.Height();
+ return kHasGenericDescriptor;
+}
+
+void RtpVideoStreamReceiver2::OnReceivedPayloadData(
+ rtc::CopyOnWriteBuffer codec_payload,
+ const RtpPacketReceived& rtp_packet,
+ const RTPVideoHeader& video) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ auto packet = std::make_unique<video_coding::PacketBuffer::Packet>(
+ rtp_packet, video, ntp_estimator_.Estimate(rtp_packet.Timestamp()),
+ clock_->TimeInMilliseconds());
+
+ // Try to extrapolate absolute capture time if it is missing.
+ packet->packet_info.set_absolute_capture_time(
+ absolute_capture_time_receiver_.OnReceivePacket(
+ AbsoluteCaptureTimeReceiver::GetSource(packet->packet_info.ssrc(),
+ packet->packet_info.csrcs()),
+ packet->packet_info.rtp_timestamp(),
+ // Assume frequency is the same one for all video frames.
+ kVideoPayloadTypeFrequency,
+ packet->packet_info.absolute_capture_time()));
+
+ RTPVideoHeader& video_header = packet->video_header;
+ video_header.rotation = kVideoRotation_0;
+ video_header.content_type = VideoContentType::UNSPECIFIED;
+ video_header.video_timing.flags = VideoSendTiming::kInvalid;
+ video_header.is_last_packet_in_frame |= rtp_packet.Marker();
+
+ if (const auto* vp9_header =
+ absl::get_if<RTPVideoHeaderVP9>(&video_header.video_type_header)) {
+ video_header.is_last_packet_in_frame |= vp9_header->end_of_frame;
+ video_header.is_first_packet_in_frame |= vp9_header->beginning_of_frame;
+ }
+
+ rtp_packet.GetExtension<VideoOrientation>(&video_header.rotation);
+ rtp_packet.GetExtension<VideoContentTypeExtension>(
+ &video_header.content_type);
+ rtp_packet.GetExtension<VideoTimingExtension>(&video_header.video_timing);
+ if (forced_playout_delay_max_ms_ && forced_playout_delay_min_ms_) {
+ video_header.playout_delay.max_ms = *forced_playout_delay_max_ms_;
+ video_header.playout_delay.min_ms = *forced_playout_delay_min_ms_;
+ } else {
+ rtp_packet.GetExtension<PlayoutDelayLimits>(&video_header.playout_delay);
+ }
+
+ ParseGenericDependenciesResult generic_descriptor_state =
+ ParseGenericDependenciesExtension(rtp_packet, &video_header);
+ if (generic_descriptor_state == kDropPacket)
+ return;
+
+ // Color space should only be transmitted in the last packet of a frame,
+ // therefore, neglect it otherwise so that last_color_space_ is not reset by
+ // mistake.
+ if (video_header.is_last_packet_in_frame) {
+ video_header.color_space = rtp_packet.GetExtension<ColorSpaceExtension>();
+ if (video_header.color_space ||
+ video_header.frame_type == VideoFrameType::kVideoFrameKey) {
+ // Store color space since it's only transmitted when changed or for key
+ // frames. Color space will be cleared if a key frame is transmitted
+ // without color space information.
+ last_color_space_ = video_header.color_space;
+ } else if (last_color_space_) {
+ video_header.color_space = last_color_space_;
+ }
+ }
+
+ if (loss_notification_controller_) {
+ if (rtp_packet.recovered()) {
+ // TODO(bugs.webrtc.org/10336): Implement support for reordering.
+ RTC_LOG(LS_INFO)
+ << "LossNotificationController does not support reordering.";
+ } else if (generic_descriptor_state == kNoGenericDescriptor) {
+ RTC_LOG(LS_WARNING) << "LossNotificationController requires generic "
+ "frame descriptor, but it is missing.";
+ } else {
+ if (video_header.is_first_packet_in_frame) {
+ RTC_DCHECK(video_header.generic);
+ LossNotificationController::FrameDetails frame;
+ frame.is_keyframe =
+ video_header.frame_type == VideoFrameType::kVideoFrameKey;
+ frame.frame_id = video_header.generic->frame_id;
+ frame.frame_dependencies = video_header.generic->dependencies;
+ loss_notification_controller_->OnReceivedPacket(
+ rtp_packet.SequenceNumber(), &frame);
+ } else {
+ loss_notification_controller_->OnReceivedPacket(
+ rtp_packet.SequenceNumber(), nullptr);
+ }
+ }
+ }
+
+ if (nack_module_) {
+ const bool is_keyframe =
+ video_header.is_first_packet_in_frame &&
+ video_header.frame_type == VideoFrameType::kVideoFrameKey;
+
+ packet->times_nacked = nack_module_->OnReceivedPacket(
+ rtp_packet.SequenceNumber(), is_keyframe, rtp_packet.recovered());
+ } else {
+ packet->times_nacked = -1;
+ }
+
+ if (codec_payload.size() == 0) {
+ NotifyReceiverOfEmptyPacket(packet->seq_num);
+ rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
+ return;
+ }
+
+ if (packet->codec() == kVideoCodecH264) {
+ // Only when we start to receive packets will we know what payload type
+ // that will be used. When we know the payload type insert the correct
+ // sps/pps into the tracker.
+ if (packet->payload_type != last_payload_type_) {
+ last_payload_type_ = packet->payload_type;
+ InsertSpsPpsIntoTracker(packet->payload_type);
+ }
+
+ video_coding::H264SpsPpsTracker::FixedBitstream fixed =
+ tracker_.CopyAndFixBitstream(
+ rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()),
+ &packet->video_header);
+
+ switch (fixed.action) {
+ case video_coding::H264SpsPpsTracker::kRequestKeyframe:
+ rtcp_feedback_buffer_.RequestKeyFrame();
+ rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
+ ABSL_FALLTHROUGH_INTENDED;
+ case video_coding::H264SpsPpsTracker::kDrop:
+ return;
+ case video_coding::H264SpsPpsTracker::kInsert:
+ packet->video_payload = std::move(fixed.bitstream);
+ break;
+ }
+
+ } else {
+ packet->video_payload = std::move(codec_payload);
+ }
+
+ rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
+ frame_counter_.Add(packet->timestamp);
+ OnInsertedPacket(packet_buffer_.InsertPacket(std::move(packet)));
+}
+
+void RtpVideoStreamReceiver2::OnRecoveredPacket(const uint8_t* rtp_packet,
+ size_t rtp_packet_length) {
+ RtpPacketReceived packet;
+ if (!packet.Parse(rtp_packet, rtp_packet_length))
+ return;
+ if (packet.PayloadType() == config_.rtp.red_payload_type) {
+ RTC_LOG(LS_WARNING) << "Discarding recovered packet with RED encapsulation";
+ return;
+ }
+
+ packet.IdentifyExtensions(rtp_header_extensions_);
+ packet.set_payload_type_frequency(kVideoPayloadTypeFrequency);
+ // TODO(nisse): UlpfecReceiverImpl::ProcessReceivedFec passes both
+ // original (decapsulated) media packets and recovered packets to
+ // this callback. We need a way to distinguish, for setting
+ // packet.recovered() correctly. Ideally, move RED decapsulation out
+ // of the Ulpfec implementation.
+
+ ReceivePacket(packet);
+}
+
+// This method handles both regular RTP packets and packets recovered
+// via FlexFEC.
+void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ if (!receiving_) {
+ return;
+ }
+
+ if (!packet.recovered()) {
+ // TODO(nisse): Exclude out-of-order packets?
+ int64_t now_ms = clock_->TimeInMilliseconds();
+
+ last_received_rtp_timestamp_ = packet.Timestamp();
+ last_received_rtp_system_time_ms_ = now_ms;
+
+ // Periodically log the RTP header of incoming packets.
+ if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) {
+ rtc::StringBuilder ss;
+ ss << "Packet received on SSRC: " << packet.Ssrc()
+ << " with payload type: " << static_cast<int>(packet.PayloadType())
+ << ", timestamp: " << packet.Timestamp()
+ << ", sequence number: " << packet.SequenceNumber()
+ << ", arrival time: " << packet.arrival_time_ms();
+ int32_t time_offset;
+ if (packet.GetExtension<TransmissionOffset>(&time_offset)) {
+ ss << ", toffset: " << time_offset;
+ }
+ uint32_t send_time;
+ if (packet.GetExtension<AbsoluteSendTime>(&send_time)) {
+ ss << ", abs send time: " << send_time;
+ }
+ RTC_LOG(LS_INFO) << ss.str();
+ last_packet_log_ms_ = now_ms;
+ }
+ }
+
+ ReceivePacket(packet);
+
+ // Update receive statistics after ReceivePacket.
+ // Receive statistics will be reset if the payload type changes (make sure
+ // that the first packet is included in the stats).
+ if (!packet.recovered()) {
+ rtp_receive_statistics_->OnRtpPacket(packet);
+ }
+
+ for (RtpPacketSinkInterface* secondary_sink : secondary_sinks_) {
+ secondary_sink->OnRtpPacket(packet);
+ }
+}
+
+void RtpVideoStreamReceiver2::RequestKeyFrame() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ // TODO(bugs.webrtc.org/10336): Allow the sender to ignore key frame requests
+ // issued by anything other than the LossNotificationController if it (the
+ // sender) is relying on LNTF alone.
+ if (keyframe_request_sender_) {
+ keyframe_request_sender_->RequestKeyFrame();
+ } else {
+ rtp_rtcp_->SendPictureLossIndication();
+ }
+}
+
+void RtpVideoStreamReceiver2::SendLossNotification(
+ uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) {
+ RTC_DCHECK(config_.rtp.lntf.enabled);
+ rtp_rtcp_->SendLossNotification(last_decoded_seq_num, last_received_seq_num,
+ decodability_flag, buffering_allowed);
+}
+
+bool RtpVideoStreamReceiver2::IsUlpfecEnabled() const {
+ return config_.rtp.ulpfec_payload_type != -1;
+}
+
+bool RtpVideoStreamReceiver2::IsRetransmissionsEnabled() const {
+ return config_.rtp.nack.rtp_history_ms > 0;
+}
+
+void RtpVideoStreamReceiver2::RequestPacketRetransmit(
+ const std::vector<uint16_t>& sequence_numbers) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ rtp_rtcp_->SendNack(sequence_numbers);
+}
+
+bool RtpVideoStreamReceiver2::IsDecryptable() const {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ return frames_decryptable_;
+}
+
+void RtpVideoStreamReceiver2::OnInsertedPacket(
+ video_coding::PacketBuffer::InsertResult result) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ video_coding::PacketBuffer::Packet* first_packet = nullptr;
+ int max_nack_count;
+ int64_t min_recv_time;
+ int64_t max_recv_time;
+ std::vector<rtc::ArrayView<const uint8_t>> payloads;
+ RtpPacketInfos::vector_type packet_infos;
+
+ bool frame_boundary = true;
+ for (auto& packet : result.packets) {
+ // PacketBuffer promisses frame boundaries are correctly set on each
+ // packet. Document that assumption with the DCHECKs.
+ RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame());
+ if (packet->is_first_packet_in_frame()) {
+ first_packet = packet.get();
+ max_nack_count = packet->times_nacked;
+ min_recv_time = packet->packet_info.receive_time_ms();
+ max_recv_time = packet->packet_info.receive_time_ms();
+ payloads.clear();
+ packet_infos.clear();
+ } else {
+ max_nack_count = std::max(max_nack_count, packet->times_nacked);
+ min_recv_time =
+ std::min(min_recv_time, packet->packet_info.receive_time_ms());
+ max_recv_time =
+ std::max(max_recv_time, packet->packet_info.receive_time_ms());
+ }
+ payloads.emplace_back(packet->video_payload);
+ packet_infos.push_back(packet->packet_info);
+
+ frame_boundary = packet->is_last_packet_in_frame();
+ if (packet->is_last_packet_in_frame()) {
+ auto depacketizer_it = payload_type_map_.find(first_packet->payload_type);
+ RTC_CHECK(depacketizer_it != payload_type_map_.end());
+
+ rtc::scoped_refptr<EncodedImageBuffer> bitstream =
+ depacketizer_it->second->AssembleFrame(payloads);
+ if (!bitstream) {
+ // Failed to assemble a frame. Discard and continue.
+ continue;
+ }
+
+ const video_coding::PacketBuffer::Packet& last_packet = *packet;
+ OnAssembledFrame(std::make_unique<video_coding::RtpFrameObject>(
+ first_packet->seq_num, //
+ last_packet.seq_num, //
+ last_packet.marker_bit, //
+ max_nack_count, //
+ min_recv_time, //
+ max_recv_time, //
+ first_packet->timestamp, //
+ first_packet->ntp_time_ms, //
+ last_packet.video_header.video_timing, //
+ first_packet->payload_type, //
+ first_packet->codec(), //
+ last_packet.video_header.rotation, //
+ last_packet.video_header.content_type, //
+ first_packet->video_header, //
+ last_packet.video_header.color_space, //
+ RtpPacketInfos(std::move(packet_infos)), //
+ std::move(bitstream)));
+ }
+ }
+ RTC_DCHECK(frame_boundary);
+ if (result.buffer_cleared) {
+ RequestKeyFrame();
+ }
+}
+
+void RtpVideoStreamReceiver2::OnAssembledFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK(frame);
+
+ const absl::optional<RTPVideoHeader::GenericDescriptorInfo>& descriptor =
+ frame->GetRtpVideoHeader().generic;
+
+ if (loss_notification_controller_ && descriptor) {
+ loss_notification_controller_->OnAssembledFrame(
+ frame->first_seq_num(), descriptor->frame_id,
+ absl::c_linear_search(descriptor->decode_target_indications,
+ DecodeTargetIndication::kDiscardable),
+ descriptor->dependencies);
+ }
+
+ // If frames arrive before a key frame, they would not be decodable.
+ // In that case, request a key frame ASAP.
+ if (!has_received_frame_) {
+ if (frame->FrameType() != VideoFrameType::kVideoFrameKey) {
+ // |loss_notification_controller_|, if present, would have already
+ // requested a key frame when the first packet for the non-key frame
+ // had arrived, so no need to replicate the request.
+ if (!loss_notification_controller_) {
+ RequestKeyFrame();
+ }
+ }
+ has_received_frame_ = true;
+ }
+
+ // Reset |reference_finder_| if |frame| is new and the codec have changed.
+ if (current_codec_) {
+ bool frame_is_newer =
+ AheadOf(frame->Timestamp(), last_assembled_frame_rtp_timestamp_);
+
+ if (frame->codec_type() != current_codec_) {
+ if (frame_is_newer) {
+ // When we reset the |reference_finder_| we don't want new picture ids
+ // to overlap with old picture ids. To ensure that doesn't happen we
+ // start from the |last_completed_picture_id_| and add an offset in case
+ // of reordering.
+ reference_finder_ =
+ std::make_unique<video_coding::RtpFrameReferenceFinder>(
+ this, last_completed_picture_id_ +
+ std::numeric_limits<uint16_t>::max());
+ current_codec_ = frame->codec_type();
+ } else {
+ // Old frame from before the codec switch, discard it.
+ return;
+ }
+ }
+
+ if (frame_is_newer) {
+ last_assembled_frame_rtp_timestamp_ = frame->Timestamp();
+ }
+ } else {
+ current_codec_ = frame->codec_type();
+ last_assembled_frame_rtp_timestamp_ = frame->Timestamp();
+ }
+
+ if (buffered_frame_decryptor_ != nullptr) {
+ buffered_frame_decryptor_->ManageEncryptedFrame(std::move(frame));
+ } else if (frame_transformer_delegate_) {
+ frame_transformer_delegate_->TransformFrame(std::move(frame));
+ } else {
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+}
+
+void RtpVideoStreamReceiver2::OnCompleteFrame(
+ std::unique_ptr<video_coding::EncodedFrame> frame) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ video_coding::RtpFrameObject* rtp_frame =
+ static_cast<video_coding::RtpFrameObject*>(frame.get());
+ last_seq_num_for_pic_id_[rtp_frame->id.picture_id] =
+ rtp_frame->last_seq_num();
+
+ last_completed_picture_id_ =
+ std::max(last_completed_picture_id_, frame->id.picture_id);
+ complete_frame_callback_->OnCompleteFrame(std::move(frame));
+}
+
+void RtpVideoStreamReceiver2::OnDecryptedFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ reference_finder_->ManageFrame(std::move(frame));
+}
+
+void RtpVideoStreamReceiver2::OnDecryptionStatusChange(
+ FrameDecryptorInterface::Status status) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ // Called from BufferedFrameDecryptor::DecryptFrame.
+ frames_decryptable_ =
+ (status == FrameDecryptorInterface::Status::kOk) ||
+ (status == FrameDecryptorInterface::Status::kRecoverable);
+}
+
+void RtpVideoStreamReceiver2::SetFrameDecryptor(
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (buffered_frame_decryptor_ == nullptr) {
+ buffered_frame_decryptor_ =
+ std::make_unique<BufferedFrameDecryptor>(this, this);
+ }
+ buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor));
+}
+
+void RtpVideoStreamReceiver2::SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ frame_transformer_delegate_ =
+ new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ this, std::move(frame_transformer), rtc::Thread::Current(),
+ config_.rtp.remote_ssrc);
+ frame_transformer_delegate_->Init();
+}
+
+void RtpVideoStreamReceiver2::UpdateRtt(int64_t max_rtt_ms) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (nack_module_)
+ nack_module_->UpdateRtt(max_rtt_ms);
+}
+
+absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedPacketMs() const {
+ return packet_buffer_.LastReceivedPacketMs();
+}
+
+absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs()
+ const {
+ return packet_buffer_.LastReceivedKeyframePacketMs();
+}
+
+void RtpVideoStreamReceiver2::AddSecondarySink(RtpPacketSinkInterface* sink) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK(!absl::c_linear_search(secondary_sinks_, sink));
+ secondary_sinks_.push_back(sink);
+}
+
+void RtpVideoStreamReceiver2::RemoveSecondarySink(
+ const RtpPacketSinkInterface* sink) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ auto it = absl::c_find(secondary_sinks_, sink);
+ if (it == secondary_sinks_.end()) {
+ // We might be rolling-back a call whose setup failed mid-way. In such a
+ // case, it's simpler to remove "everything" rather than remember what
+ // has already been added.
+ RTC_LOG(LS_WARNING) << "Removal of unknown sink.";
+ return;
+ }
+ secondary_sinks_.erase(it);
+}
+
+void RtpVideoStreamReceiver2::ManageFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ reference_finder_->ManageFrame(std::move(frame));
+}
+
+void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (packet.payload_size() == 0) {
+ // Padding or keep-alive packet.
+ // TODO(nisse): Could drop empty packets earlier, but need to figure out how
+ // they should be counted in stats.
+ NotifyReceiverOfEmptyPacket(packet.SequenceNumber());
+ return;
+ }
+ if (packet.PayloadType() == config_.rtp.red_payload_type) {
+ ParseAndHandleEncapsulatingHeader(packet);
+ return;
+ }
+
+ const auto type_it = payload_type_map_.find(packet.PayloadType());
+ if (type_it == payload_type_map_.end()) {
+ return;
+ }
+ absl::optional<VideoRtpDepacketizer::ParsedRtpPayload> parsed_payload =
+ type_it->second->Parse(packet.PayloadBuffer());
+ if (parsed_payload == absl::nullopt) {
+ RTC_LOG(LS_WARNING) << "Failed parsing payload.";
+ return;
+ }
+
+ OnReceivedPayloadData(std::move(parsed_payload->video_payload), packet,
+ parsed_payload->video_header);
+}
+
+void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader(
+ const RtpPacketReceived& packet) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (packet.PayloadType() == config_.rtp.red_payload_type &&
+ packet.payload_size() > 0) {
+ if (packet.payload()[0] == config_.rtp.ulpfec_payload_type) {
+ // Notify video_receiver about received FEC packets to avoid NACKing these
+ // packets.
+ NotifyReceiverOfEmptyPacket(packet.SequenceNumber());
+ }
+ if (!ulpfec_receiver_->AddReceivedRedPacket(
+ packet, config_.rtp.ulpfec_payload_type)) {
+ return;
+ }
+ ulpfec_receiver_->ProcessReceivedFec();
+ }
+}
+
+// In the case of a video stream without picture ids and no rtx the
+// RtpFrameReferenceFinder will need to know about padding to
+// correctly calculate frame references.
+void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket(uint16_t seq_num) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ reference_finder_->PaddingReceived(seq_num);
+
+ OnInsertedPacket(packet_buffer_.InsertPadding(seq_num));
+ if (nack_module_) {
+ nack_module_->OnReceivedPacket(seq_num, /* is_keyframe = */ false,
+ /* is _recovered = */ false);
+ }
+ if (loss_notification_controller_) {
+ // TODO(bugs.webrtc.org/10336): Handle empty packets.
+ RTC_LOG(LS_WARNING)
+ << "LossNotificationController does not expect empty packets.";
+ }
+}
+
+bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet,
+ size_t rtcp_packet_length) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ if (!receiving_) {
+ return false;
+ }
+
+ rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+
+ int64_t rtt = 0;
+ rtp_rtcp_->RTT(config_.rtp.remote_ssrc, &rtt, nullptr, nullptr, nullptr);
+ if (rtt == 0) {
+ // Waiting for valid rtt.
+ return true;
+ }
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ uint32_t recieved_ntp_secs = 0;
+ uint32_t recieved_ntp_frac = 0;
+ if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, &recieved_ntp_secs,
+ &recieved_ntp_frac, &rtp_timestamp) != 0) {
+ // Waiting for RTCP.
+ return true;
+ }
+ NtpTime recieved_ntp(recieved_ntp_secs, recieved_ntp_frac);
+ int64_t time_since_recieved =
+ clock_->CurrentNtpInMilliseconds() - recieved_ntp.ToMs();
+ // Don't use old SRs to estimate time.
+ if (time_since_recieved <= 1) {
+ ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
+ absl::optional<int64_t> remote_to_local_clock_offset_ms =
+ ntp_estimator_.EstimateRemoteToLocalClockOffsetMs();
+ if (remote_to_local_clock_offset_ms.has_value()) {
+ absolute_capture_time_receiver_.SetRemoteToLocalClockOffset(
+ Int64MsToQ32x32(*remote_to_local_clock_offset_ms));
+ }
+ }
+
+ return true;
+}
+
+void RtpVideoStreamReceiver2::FrameContinuous(int64_t picture_id) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (!nack_module_)
+ return;
+
+ int seq_num = -1;
+ auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
+ if (seq_num_it != last_seq_num_for_pic_id_.end())
+ seq_num = seq_num_it->second;
+ if (seq_num != -1)
+ nack_module_->ClearUpTo(seq_num);
+}
+
+void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ // Running on the decoder thread.
+ int seq_num = -1;
+ auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
+ if (seq_num_it != last_seq_num_for_pic_id_.end()) {
+ seq_num = seq_num_it->second;
+ last_seq_num_for_pic_id_.erase(last_seq_num_for_pic_id_.begin(),
+ ++seq_num_it);
+ }
+
+ if (seq_num != -1) {
+ packet_buffer_.ClearTo(seq_num);
+ reference_finder_->ClearTo(seq_num);
+ }
+}
+
+void RtpVideoStreamReceiver2::SignalNetworkState(NetworkState state) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ rtp_rtcp_->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode
+ : RtcpMode::kOff);
+}
+
+void RtpVideoStreamReceiver2::StartReceive() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ receiving_ = true;
+}
+
+void RtpVideoStreamReceiver2::StopReceive() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ receiving_ = false;
+}
+
+void RtpVideoStreamReceiver2::UpdateHistograms() {
+ FecPacketCounter counter = ulpfec_receiver_->GetPacketCounter();
+ if (counter.first_packet_time_ms == -1)
+ return;
+
+ int64_t elapsed_sec =
+ (clock_->TimeInMilliseconds() - counter.first_packet_time_ms) / 1000;
+ if (elapsed_sec < metrics::kMinRunTimeInSeconds)
+ return;
+
+ if (counter.num_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE(
+ "WebRTC.Video.ReceivedFecPacketsInPercent",
+ static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets));
+ }
+ if (counter.num_fec_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.RecoveredMediaPacketsInPercentOfFec",
+ static_cast<int>(counter.num_recovered_packets *
+ 100 / counter.num_fec_packets));
+ }
+ if (config_.rtp.ulpfec_payload_type != -1) {
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.FecBitrateReceivedInKbps",
+ static_cast<int>(counter.num_bytes * 8 / elapsed_sec / 1000));
+ }
+}
+
+void RtpVideoStreamReceiver2::InsertSpsPpsIntoTracker(uint8_t payload_type) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ auto codec_params_it = pt_codec_params_.find(payload_type);
+ if (codec_params_it == pt_codec_params_.end())
+ return;
+
+ RTC_LOG(LS_INFO) << "Found out of band supplied codec parameters for"
+ " payload type: "
+ << static_cast<int>(payload_type);
+
+ H264SpropParameterSets sprop_decoder;
+ auto sprop_base64_it =
+ codec_params_it->second.find(cricket::kH264FmtpSpropParameterSets);
+
+ if (sprop_base64_it == codec_params_it->second.end())
+ return;
+
+ if (!sprop_decoder.DecodeSprop(sprop_base64_it->second.c_str()))
+ return;
+
+ tracker_.InsertSpsPpsNalus(sprop_decoder.sps_nalu(),
+ sprop_decoder.pps_nalu());
+}
+
+} // namespace webrtc
diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h
new file mode 100644
index 0000000000..d82a7abbfe
--- /dev/null
+++ b/video/rtp_video_stream_receiver2.h
@@ -0,0 +1,367 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_
+#define VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/crypto/frame_decryptor_interface.h"
+#include "api/video/color_space.h"
+#include "api/video_codecs/video_codec.h"
+#include "call/rtp_packet_sink_interface.h"
+#include "call/syncable.h"
+#include "call/video_receive_stream.h"
+#include "modules/rtp_rtcp/include/receive_statistics.h"
+#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
+#include "modules/rtp_rtcp/source/rtp_video_header.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h"
+#include "modules/video_coding/h264_sps_pps_tracker.h"
+#include "modules/video_coding/loss_notification_controller.h"
+#include "modules/video_coding/packet_buffer.h"
+#include "modules/video_coding/rtp_frame_reference_finder.h"
+#include "modules/video_coding/unique_timestamp_counter.h"
+#include "rtc_base/constructor_magic.h"
+#include "rtc_base/experiments/field_trial_parser.h"
+#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/thread_annotations.h"
+#include "video/buffered_frame_decryptor.h"
+#include "video/rtp_video_stream_receiver_frame_transformer_delegate.h"
+
+namespace webrtc {
+
+class NackModule2;
+class PacketRouter;
+class ProcessThread;
+class ReceiveStatistics;
+class RtcpRttStats;
+class RtpPacketReceived;
+class Transport;
+class UlpfecReceiver;
+
+class RtpVideoStreamReceiver2 : public LossNotificationSender,
+ public RecoveredPacketReceiver,
+ public RtpPacketSinkInterface,
+ public KeyFrameRequestSender,
+ public video_coding::OnCompleteFrameCallback,
+ public OnDecryptedFrameCallback,
+ public OnDecryptionStatusChangeCallback,
+ public RtpVideoFrameReceiver {
+ public:
+ RtpVideoStreamReceiver2(
+ TaskQueueBase* current_queue,
+ Clock* clock,
+ Transport* transport,
+ RtcpRttStats* rtt_stats,
+ // The packet router is optional; if provided, the RtpRtcp module for this
+ // stream is registered as a candidate for sending REMB and transport
+ // feedback.
+ PacketRouter* packet_router,
+ const VideoReceiveStream::Config* config,
+ ReceiveStatistics* rtp_receive_statistics,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RtcpCnameCallback* rtcp_cname_callback,
+ ProcessThread* process_thread,
+ NackSender* nack_sender,
+ // The KeyFrameRequestSender is optional; if not provided, key frame
+ // requests are sent via the internal RtpRtcp module.
+ KeyFrameRequestSender* keyframe_request_sender,
+ video_coding::OnCompleteFrameCallback* complete_frame_callback,
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer);
+ ~RtpVideoStreamReceiver2() override;
+
+ void AddReceiveCodec(const VideoCodec& video_codec,
+ const std::map<std::string, std::string>& codec_params,
+ bool raw_payload);
+
+ void StartReceive();
+ void StopReceive();
+
+ // Produces the transport-related timestamps; current_delay_ms is left unset.
+ absl::optional<Syncable::Info> GetSyncInfo() const;
+
+ bool DeliverRtcp(const uint8_t* rtcp_packet, size_t rtcp_packet_length);
+
+ void FrameContinuous(int64_t seq_num);
+
+ void FrameDecoded(int64_t seq_num);
+
+ void SignalNetworkState(NetworkState state);
+
+ // Returns number of different frames seen.
+ int GetUniqueFramesSeen() const {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ return frame_counter_.GetUniqueSeen();
+ }
+
+ // Implements RtpPacketSinkInterface.
+ void OnRtpPacket(const RtpPacketReceived& packet) override;
+
+ // TODO(philipel): Stop using VCMPacket in the new jitter buffer and then
+ // remove this function. Public only for tests.
+ void OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload,
+ const RtpPacketReceived& rtp_packet,
+ const RTPVideoHeader& video);
+
+ // Implements RecoveredPacketReceiver.
+ void OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override;
+
+ // Send an RTCP keyframe request.
+ void RequestKeyFrame() override;
+
+ // Implements LossNotificationSender.
+ void SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) override;
+
+ bool IsUlpfecEnabled() const;
+ bool IsRetransmissionsEnabled() const;
+
+ // Returns true if a decryptor is attached and frames can be decrypted.
+ // Updated by OnDecryptionStatusChangeCallback. Note this refers to Frame
+ // Decryption not SRTP.
+ bool IsDecryptable() const;
+
+ // Don't use, still experimental.
+ void RequestPacketRetransmit(const std::vector<uint16_t>& sequence_numbers);
+
+ // Implements OnCompleteFrameCallback.
+ void OnCompleteFrame(
+ std::unique_ptr<video_coding::EncodedFrame> frame) override;
+
+ // Implements OnDecryptedFrameCallback.
+ void OnDecryptedFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) override;
+
+ // Implements OnDecryptionStatusChangeCallback.
+ void OnDecryptionStatusChange(
+ FrameDecryptorInterface::Status status) override;
+
+ // Optionally set a frame decryptor after a stream has started. This will not
+ // reset the decoder state.
+ void SetFrameDecryptor(
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor);
+
+ // Sets a frame transformer after a stream has started, if no transformer
+ // has previously been set. Does not reset the decoder state.
+ void SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer);
+
+ // Called by VideoReceiveStream when stats are updated.
+ void UpdateRtt(int64_t max_rtt_ms);
+
+ absl::optional<int64_t> LastReceivedPacketMs() const;
+ absl::optional<int64_t> LastReceivedKeyframePacketMs() const;
+
+ // RtpDemuxer only forwards a given RTP packet to one sink. However, some
+ // sinks, such as FlexFEC, might wish to be informed of all of the packets
+ // a given sink receives (or any set of sinks). They may do so by registering
+ // themselves as secondary sinks.
+ void AddSecondarySink(RtpPacketSinkInterface* sink);
+ void RemoveSecondarySink(const RtpPacketSinkInterface* sink);
+
+ private:
+ // Implements RtpVideoFrameReceiver.
+ void ManageFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) override;
+
+ // Used for buffering RTCP feedback messages and sending them all together.
+ // Note:
+ // 1. Key frame requests and NACKs are mutually exclusive, with the
+ // former taking precedence over the latter.
+ // 2. Loss notifications are orthogonal to either. (That is, may be sent
+ // alongside either.)
+ class RtcpFeedbackBuffer : public KeyFrameRequestSender,
+ public NackSender,
+ public LossNotificationSender {
+ public:
+ RtcpFeedbackBuffer(KeyFrameRequestSender* key_frame_request_sender,
+ NackSender* nack_sender,
+ LossNotificationSender* loss_notification_sender);
+
+ ~RtcpFeedbackBuffer() override = default;
+
+ // KeyFrameRequestSender implementation.
+ void RequestKeyFrame() override;
+
+ // NackSender implementation.
+ void SendNack(const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed) override;
+
+ // LossNotificationSender implementation.
+ void SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) override;
+
+ // Send all RTCP feedback messages buffered thus far.
+ void SendBufferedRtcpFeedback();
+
+ private:
+ // LNTF-related state.
+ struct LossNotificationState {
+ LossNotificationState(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag)
+ : last_decoded_seq_num(last_decoded_seq_num),
+ last_received_seq_num(last_received_seq_num),
+ decodability_flag(decodability_flag) {}
+
+ uint16_t last_decoded_seq_num;
+ uint16_t last_received_seq_num;
+ bool decodability_flag;
+ };
+
+ SequenceChecker worker_task_checker_;
+ KeyFrameRequestSender* const key_frame_request_sender_;
+ NackSender* const nack_sender_;
+ LossNotificationSender* const loss_notification_sender_;
+
+ // Key-frame-request-related state.
+ bool request_key_frame_ RTC_GUARDED_BY(worker_task_checker_);
+
+ // NACK-related state.
+ std::vector<uint16_t> nack_sequence_numbers_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ absl::optional<LossNotificationState> lntf_state_
+ RTC_GUARDED_BY(worker_task_checker_);
+ };
+ enum ParseGenericDependenciesResult {
+ kDropPacket,
+ kHasGenericDescriptor,
+ kNoGenericDescriptor
+ };
+
+ // Entry point doing non-stats work for a received packet. Called
+ // for the same packet both before and after RED decapsulation.
+ void ReceivePacket(const RtpPacketReceived& packet);
+ // Parses and handles RED headers.
+ // This function assumes that it's being called from only one thread.
+ void ParseAndHandleEncapsulatingHeader(const RtpPacketReceived& packet);
+ void NotifyReceiverOfEmptyPacket(uint16_t seq_num);
+ void UpdateHistograms();
+ bool IsRedEnabled() const;
+ void InsertSpsPpsIntoTracker(uint8_t payload_type);
+ void OnInsertedPacket(video_coding::PacketBuffer::InsertResult result);
+ ParseGenericDependenciesResult ParseGenericDependenciesExtension(
+ const RtpPacketReceived& rtp_packet,
+ RTPVideoHeader* video_header) RTC_RUN_ON(worker_task_checker_);
+ void OnAssembledFrame(std::unique_ptr<video_coding::RtpFrameObject> frame);
+
+ Clock* const clock_;
+ // Ownership of this object lies with VideoReceiveStream, which owns |this|.
+ const VideoReceiveStream::Config& config_;
+ PacketRouter* const packet_router_;
+ ProcessThread* const process_thread_;
+
+ RemoteNtpTimeEstimator ntp_estimator_;
+
+ RtpHeaderExtensionMap rtp_header_extensions_;
+ // Set by the field trial WebRTC-ForcePlayoutDelay to override any playout
+ // delay that is specified in the received packets.
+ FieldTrialOptional<int> forced_playout_delay_max_ms_;
+ FieldTrialOptional<int> forced_playout_delay_min_ms_;
+ ReceiveStatistics* const rtp_receive_statistics_;
+ std::unique_ptr<UlpfecReceiver> ulpfec_receiver_;
+
+ SequenceChecker worker_task_checker_;
+ bool receiving_ RTC_GUARDED_BY(worker_task_checker_);
+ int64_t last_packet_log_ms_ RTC_GUARDED_BY(worker_task_checker_);
+
+ const std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
+
+ video_coding::OnCompleteFrameCallback* complete_frame_callback_;
+ KeyFrameRequestSender* const keyframe_request_sender_;
+
+ RtcpFeedbackBuffer rtcp_feedback_buffer_;
+ const std::unique_ptr<NackModule2> nack_module_;
+ std::unique_ptr<LossNotificationController> loss_notification_controller_;
+
+ video_coding::PacketBuffer packet_buffer_;
+ UniqueTimestampCounter frame_counter_ RTC_GUARDED_BY(worker_task_checker_);
+ SeqNumUnwrapper<uint16_t> frame_id_unwrapper_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ // Video structure provided in the dependency descriptor in a first packet
+ // of a key frame. It is required to parse dependency descriptor in the
+ // following delta packets.
+ std::unique_ptr<FrameDependencyStructure> video_structure_
+ RTC_GUARDED_BY(worker_task_checker_);
+ // Frame id of the last frame with the attached video structure.
+ // absl::nullopt when `video_structure_ == nullptr`;
+ absl::optional<int64_t> video_structure_frame_id_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ std::unique_ptr<video_coding::RtpFrameReferenceFinder> reference_finder_
+ RTC_GUARDED_BY(worker_task_checker_);
+ absl::optional<VideoCodecType> current_codec_
+ RTC_GUARDED_BY(worker_task_checker_);
+ uint32_t last_assembled_frame_rtp_timestamp_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ std::map<int64_t, uint16_t> last_seq_num_for_pic_id_
+ RTC_GUARDED_BY(worker_task_checker_);
+ video_coding::H264SpsPpsTracker tracker_ RTC_GUARDED_BY(worker_task_checker_);
+
+ // Maps payload id to the depacketizer.
+ std::map<uint8_t, std::unique_ptr<VideoRtpDepacketizer>> payload_type_map_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ // TODO(johan): Remove pt_codec_params_ once
+ // https://bugs.chromium.org/p/webrtc/issues/detail?id=6883 is resolved.
+ // Maps a payload type to a map of out-of-band supplied codec parameters.
+ std::map<uint8_t, std::map<std::string, std::string>> pt_codec_params_
+ RTC_GUARDED_BY(worker_task_checker_);
+ int16_t last_payload_type_ RTC_GUARDED_BY(worker_task_checker_) = -1;
+
+ bool has_received_frame_ RTC_GUARDED_BY(worker_task_checker_);
+
+ std::vector<RtpPacketSinkInterface*> secondary_sinks_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ absl::optional<uint32_t> last_received_rtp_timestamp_
+ RTC_GUARDED_BY(worker_task_checker_);
+ absl::optional<int64_t> last_received_rtp_system_time_ms_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ // Handles incoming encrypted frames and forwards them to the
+ // rtp_reference_finder if they are decryptable.
+ std::unique_ptr<BufferedFrameDecryptor> buffered_frame_decryptor_
+ RTC_PT_GUARDED_BY(worker_task_checker_);
+ bool frames_decryptable_ RTC_GUARDED_BY(worker_task_checker_);
+ absl::optional<ColorSpace> last_color_space_;
+
+ AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ int64_t last_completed_picture_id_ = 0;
+
+ rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate>
+ frame_transformer_delegate_;
+};
+
+} // namespace webrtc
+
+#endif // VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_
diff --git a/video/rtp_video_stream_receiver2_unittest.cc b/video/rtp_video_stream_receiver2_unittest.cc
new file mode 100644
index 0000000000..cda0fe5cfa
--- /dev/null
+++ b/video/rtp_video_stream_receiver2_unittest.cc
@@ -0,0 +1,1221 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/rtp_video_stream_receiver2.h"
+
+#include <memory>
+#include <utility>
+
+#include "api/video/video_codec_type.h"
+#include "api/video/video_frame_type.h"
+#include "common_video/h264/h264_common.h"
+#include "media/base/media_constants.h"
+#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
+#include "modules/rtp_rtcp/source/rtp_format.h"
+#include "modules/rtp_rtcp/source/rtp_format_vp9.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/utility/include/process_thread.h"
+#include "modules/video_coding/frame_object.h"
+#include "modules/video_coding/include/video_coding_defines.h"
+#include "modules/video_coding/rtp_frame_reference_finder.h"
+#include "rtc_base/byte_buffer.h"
+#include "rtc_base/logging.h"
+#include "system_wrappers/include/clock.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/mock_frame_transformer.h"
+#include "test/time_controller/simulated_task_queue.h"
+
+using ::testing::_;
+using ::testing::ElementsAre;
+using ::testing::Invoke;
+using ::testing::SizeIs;
+using ::testing::Values;
+
+namespace webrtc {
+
+namespace {
+
+const uint8_t kH264StartCode[] = {0x00, 0x00, 0x00, 0x01};
+
+std::vector<uint64_t> GetAbsoluteCaptureTimestamps(
+ const video_coding::EncodedFrame* frame) {
+ std::vector<uint64_t> result;
+ for (const auto& packet_info : frame->PacketInfos()) {
+ if (packet_info.absolute_capture_time()) {
+ result.push_back(
+ packet_info.absolute_capture_time()->absolute_capture_timestamp);
+ }
+ }
+ return result;
+}
+
+RTPVideoHeader GetGenericVideoHeader(VideoFrameType frame_type) {
+ RTPVideoHeader video_header;
+ video_header.is_first_packet_in_frame = true;
+ video_header.is_last_packet_in_frame = true;
+ video_header.codec = kVideoCodecGeneric;
+ video_header.frame_type = frame_type;
+ return video_header;
+}
+
+class MockTransport : public Transport {
+ public:
+ MOCK_METHOD(bool,
+ SendRtp,
+ (const uint8_t*, size_t length, const PacketOptions& options),
+ (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
+};
+
+class MockNackSender : public NackSender {
+ public:
+ MOCK_METHOD(void,
+ SendNack,
+ (const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed),
+ (override));
+};
+
+class MockKeyFrameRequestSender : public KeyFrameRequestSender {
+ public:
+ MOCK_METHOD(void, RequestKeyFrame, (), (override));
+};
+
+class MockOnCompleteFrameCallback
+ : public video_coding::OnCompleteFrameCallback {
+ public:
+ MOCK_METHOD(void, DoOnCompleteFrame, (video_coding::EncodedFrame*), ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailNullptr,
+ (video_coding::EncodedFrame*),
+ ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailLength,
+ (video_coding::EncodedFrame*),
+ ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailBitstream,
+ (video_coding::EncodedFrame*),
+ ());
+ void OnCompleteFrame(
+ std::unique_ptr<video_coding::EncodedFrame> frame) override {
+ if (!frame) {
+ DoOnCompleteFrameFailNullptr(nullptr);
+ return;
+ }
+ EXPECT_EQ(buffer_.Length(), frame->size());
+ if (buffer_.Length() != frame->size()) {
+ DoOnCompleteFrameFailLength(frame.get());
+ return;
+ }
+ if (frame->size() != buffer_.Length() ||
+ memcmp(buffer_.Data(), frame->data(), buffer_.Length()) != 0) {
+ DoOnCompleteFrameFailBitstream(frame.get());
+ return;
+ }
+ DoOnCompleteFrame(frame.get());
+ }
+
+ void ClearExpectedBitstream() { buffer_.Clear(); }
+
+ void AppendExpectedBitstream(const uint8_t data[], size_t size_in_bytes) {
+ // TODO(Johan): Let rtc::ByteBuffer handle uint8_t* instead of char*.
+ buffer_.WriteBytes(reinterpret_cast<const char*>(data), size_in_bytes);
+ }
+ rtc::ByteBufferWriter buffer_;
+};
+
+class MockRtpPacketSink : public RtpPacketSinkInterface {
+ public:
+ MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override));
+};
+
+constexpr uint32_t kSsrc = 111;
+constexpr uint16_t kSequenceNumber = 222;
+constexpr int kPayloadType = 100;
+constexpr int kRedPayloadType = 125;
+
+std::unique_ptr<RtpPacketReceived> CreateRtpPacketReceived() {
+ auto packet = std::make_unique<RtpPacketReceived>();
+ packet->SetSsrc(kSsrc);
+ packet->SetSequenceNumber(kSequenceNumber);
+ packet->SetPayloadType(kPayloadType);
+ return packet;
+}
+
+MATCHER_P(SamePacketAs, other, "") {
+ return arg.Ssrc() == other.Ssrc() &&
+ arg.SequenceNumber() == other.SequenceNumber();
+}
+
+} // namespace
+
+class RtpVideoStreamReceiver2Test : public ::testing::Test {
+ public:
+ RtpVideoStreamReceiver2Test() : RtpVideoStreamReceiver2Test("") {}
+ explicit RtpVideoStreamReceiver2Test(std::string field_trials)
+ : override_field_trials_(field_trials),
+ config_(CreateConfig()),
+ process_thread_(ProcessThread::Create("TestThread")) {
+ rtp_receive_statistics_ =
+ ReceiveStatistics::Create(Clock::GetRealTimeClock());
+ rtp_video_stream_receiver_ = std::make_unique<RtpVideoStreamReceiver2>(
+ TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_,
+ nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr,
+ nullptr, process_thread_.get(), &mock_nack_sender_,
+ &mock_key_frame_request_sender_, &mock_on_complete_frame_callback_,
+ nullptr, nullptr);
+ VideoCodec codec;
+ codec.plType = kPayloadType;
+ codec.codecType = kVideoCodecGeneric;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, {},
+ /*raw_payload=*/false);
+ }
+
+ RTPVideoHeader GetDefaultH264VideoHeader() {
+ RTPVideoHeader video_header;
+ video_header.codec = kVideoCodecH264;
+ video_header.video_type_header.emplace<RTPVideoHeaderH264>();
+ return video_header;
+ }
+
+ // TODO(Johan): refactor h264_sps_pps_tracker_unittests.cc to avoid duplicate
+ // code.
+ void AddSps(RTPVideoHeader* video_header,
+ uint8_t sps_id,
+ rtc::CopyOnWriteBuffer* data) {
+ NaluInfo info;
+ info.type = H264::NaluType::kSps;
+ info.sps_id = sps_id;
+ info.pps_id = -1;
+ data->AppendData({H264::NaluType::kSps, sps_id});
+ auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
+ h264.nalus[h264.nalus_length++] = info;
+ }
+
+ void AddPps(RTPVideoHeader* video_header,
+ uint8_t sps_id,
+ uint8_t pps_id,
+ rtc::CopyOnWriteBuffer* data) {
+ NaluInfo info;
+ info.type = H264::NaluType::kPps;
+ info.sps_id = sps_id;
+ info.pps_id = pps_id;
+ data->AppendData({H264::NaluType::kPps, pps_id});
+ auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
+ h264.nalus[h264.nalus_length++] = info;
+ }
+
+ void AddIdr(RTPVideoHeader* video_header, int pps_id) {
+ NaluInfo info;
+ info.type = H264::NaluType::kIdr;
+ info.sps_id = -1;
+ info.pps_id = pps_id;
+ auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
+ h264.nalus[h264.nalus_length++] = info;
+ }
+
+ protected:
+ static VideoReceiveStream::Config CreateConfig() {
+ VideoReceiveStream::Config config(nullptr);
+ config.rtp.remote_ssrc = 1111;
+ config.rtp.local_ssrc = 2222;
+ config.rtp.red_payload_type = kRedPayloadType;
+ return config;
+ }
+
+ TokenTaskQueue task_queue_;
+ TokenTaskQueue::CurrentTaskQueueSetter task_queue_setter_{&task_queue_};
+
+ const webrtc::test::ScopedFieldTrials override_field_trials_;
+ VideoReceiveStream::Config config_;
+ MockNackSender mock_nack_sender_;
+ MockKeyFrameRequestSender mock_key_frame_request_sender_;
+ MockTransport mock_transport_;
+ MockOnCompleteFrameCallback mock_on_complete_frame_callback_;
+ std::unique_ptr<ProcessThread> process_thread_;
+ std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
+ std::unique_ptr<RtpVideoStreamReceiver2> rtp_video_stream_receiver_;
+};
+
+TEST_F(RtpVideoStreamReceiver2Test, CacheColorSpaceFromLastPacketOfKeyframe) {
+ // Test that color space is cached from the last packet of a key frame and
+ // that it's not reset by padding packets without color space.
+ constexpr int kVp9PayloadType = 99;
+ const ColorSpace kColorSpace(
+ ColorSpace::PrimaryID::kFILM, ColorSpace::TransferID::kBT2020_12,
+ ColorSpace::MatrixID::kBT2020_NCL, ColorSpace::RangeID::kFull);
+ const std::vector<uint8_t> kKeyFramePayload = {0, 1, 2, 3, 4, 5,
+ 6, 7, 8, 9, 10};
+ const std::vector<uint8_t> kDeltaFramePayload = {0, 1, 2, 3, 4};
+
+ // Anonymous helper class that generates received packets.
+ class {
+ public:
+ void SetPayload(const std::vector<uint8_t>& payload,
+ VideoFrameType video_frame_type) {
+ video_frame_type_ = video_frame_type;
+ RtpPacketizer::PayloadSizeLimits pay_load_size_limits;
+ // Reduce max payload length to make sure the key frame generates two
+ // packets.
+ pay_load_size_limits.max_payload_len = 8;
+ RTPVideoHeaderVP9 rtp_video_header_vp9;
+ rtp_video_header_vp9.InitRTPVideoHeaderVP9();
+ rtp_video_header_vp9.inter_pic_predicted =
+ (video_frame_type == VideoFrameType::kVideoFrameDelta);
+ rtp_packetizer_ = std::make_unique<RtpPacketizerVp9>(
+ payload, pay_load_size_limits, rtp_video_header_vp9);
+ }
+
+ size_t NumPackets() { return rtp_packetizer_->NumPackets(); }
+ void SetColorSpace(const ColorSpace& color_space) {
+ color_space_ = color_space;
+ }
+
+ RtpPacketReceived NextPacket() {
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<ColorSpaceExtension>(1);
+ RtpPacketToSend packet_to_send(&extension_map);
+ packet_to_send.SetSequenceNumber(sequence_number_++);
+ packet_to_send.SetSsrc(kSsrc);
+ packet_to_send.SetPayloadType(kVp9PayloadType);
+ bool include_color_space =
+ (rtp_packetizer_->NumPackets() == 1u &&
+ video_frame_type_ == VideoFrameType::kVideoFrameKey);
+ if (include_color_space) {
+ EXPECT_TRUE(
+ packet_to_send.SetExtension<ColorSpaceExtension>(color_space_));
+ }
+ rtp_packetizer_->NextPacket(&packet_to_send);
+
+ RtpPacketReceived received_packet(&extension_map);
+ received_packet.Parse(packet_to_send.data(), packet_to_send.size());
+ return received_packet;
+ }
+
+ private:
+ uint16_t sequence_number_ = 0;
+ VideoFrameType video_frame_type_;
+ ColorSpace color_space_;
+ std::unique_ptr<RtpPacketizer> rtp_packetizer_;
+ } received_packet_generator;
+ received_packet_generator.SetColorSpace(kColorSpace);
+
+ // Prepare the receiver for VP9.
+ VideoCodec codec;
+ codec.plType = kVp9PayloadType;
+ codec.codecType = kVideoCodecVP9;
+ std::map<std::string, std::string> codec_params;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, codec_params,
+ /*raw_payload=*/false);
+
+ // Generate key frame packets.
+ received_packet_generator.SetPayload(kKeyFramePayload,
+ VideoFrameType::kVideoFrameKey);
+ EXPECT_EQ(received_packet_generator.NumPackets(), 2u);
+ RtpPacketReceived key_frame_packet1 = received_packet_generator.NextPacket();
+ RtpPacketReceived key_frame_packet2 = received_packet_generator.NextPacket();
+
+ // Generate delta frame packet.
+ received_packet_generator.SetPayload(kDeltaFramePayload,
+ VideoFrameType::kVideoFrameDelta);
+ EXPECT_EQ(received_packet_generator.NumPackets(), 1u);
+ RtpPacketReceived delta_frame_packet = received_packet_generator.NextPacket();
+
+ rtp_video_stream_receiver_->StartReceive();
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kKeyFramePayload.data(), kKeyFramePayload.size());
+
+ // Send the key frame and expect a callback with color space information.
+ EXPECT_FALSE(key_frame_packet1.GetExtension<ColorSpaceExtension>());
+ EXPECT_TRUE(key_frame_packet2.GetExtension<ColorSpaceExtension>());
+ rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1);
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke([kColorSpace](video_coding::EncodedFrame* frame) {
+ ASSERT_TRUE(frame->EncodedImage().ColorSpace());
+ EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace);
+ }));
+ rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet2);
+ // Resend the first key frame packet to simulate padding for example.
+ rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1);
+
+ mock_on_complete_frame_callback_.ClearExpectedBitstream();
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kDeltaFramePayload.data(), kDeltaFramePayload.size());
+
+ // Expect delta frame to have color space set even though color space not
+ // included in the RTP packet.
+ EXPECT_FALSE(delta_frame_packet.GetExtension<ColorSpaceExtension>());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke([kColorSpace](video_coding::EncodedFrame* frame) {
+ ASSERT_TRUE(frame->EncodedImage().ColorSpace());
+ EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace);
+ }));
+ rtp_video_stream_receiver_->OnRtpPacket(delta_frame_packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrame) {
+ RtpPacketReceived rtp_packet;
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(1);
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, PacketInfoIsPropagatedIntoVideoFrames) {
+ constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
+ constexpr int kId0 = 1;
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
+ RtpPacketReceived rtp_packet(&extension_map);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetSequenceNumber(1);
+ rtp_packet.SetTimestamp(1);
+ rtp_packet.SetSsrc(kSsrc);
+ rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
+ AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
+ /*estimated_capture_clock_offset=*/absl::nullopt});
+
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke(
+ [kAbsoluteCaptureTimestamp](video_coding::EncodedFrame* frame) {
+ EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame),
+ ElementsAre(kAbsoluteCaptureTimestamp));
+ }));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ MissingAbsoluteCaptureTimeIsFilledWithExtrapolatedValue) {
+ constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
+ constexpr int kId0 = 1;
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
+ RtpPacketReceived rtp_packet(&extension_map);
+ rtp_packet.SetPayloadType(kPayloadType);
+
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ uint16_t sequence_number = 1;
+ uint32_t rtp_timestamp = 1;
+ rtp_packet.SetSequenceNumber(sequence_number);
+ rtp_packet.SetTimestamp(rtp_timestamp);
+ rtp_packet.SetSsrc(kSsrc);
+ rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
+ AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
+ /*estimated_capture_clock_offset=*/absl::nullopt});
+
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+
+ // Rtp packet without absolute capture time.
+ rtp_packet = RtpPacketReceived(&extension_map);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(++sequence_number);
+ rtp_packet.SetTimestamp(++rtp_timestamp);
+ rtp_packet.SetSsrc(kSsrc);
+
+ // There is no absolute capture time in the second packet.
+ // Expect rtp video stream receiver to extrapolate it for the resulting video
+ // frame using absolute capture time from the previous packet.
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke([](video_coding::EncodedFrame* frame) {
+ EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame), SizeIs(1));
+ }));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ NoInfiniteRecursionOnEncapsulatedRedPacket) {
+ const std::vector<uint8_t> data({
+ 0x80, // RTP version.
+ kRedPayloadType, // Payload type.
+ 0, 0, 0, 0, 0, 0, // Don't care.
+ 0, 0, 0x4, 0x57, // SSRC
+ kRedPayloadType, // RED header.
+ 0, 0, 0, 0, 0 // Don't care.
+ });
+ RtpPacketReceived packet;
+ EXPECT_TRUE(packet.Parse(data.data(), data.size()));
+ rtp_video_stream_receiver_->StartReceive();
+ rtp_video_stream_receiver_->OnRtpPacket(packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ DropsPacketWithRedPayloadTypeAndEmptyPayload) {
+ const uint8_t kRedPayloadType = 125;
+ config_.rtp.red_payload_type = kRedPayloadType;
+ SetUp(); // re-create rtp_video_stream_receiver with red payload type.
+ // clang-format off
+ const uint8_t data[] = {
+ 0x80, // RTP version.
+ kRedPayloadType, // Payload type.
+ 0, 0, 0, 0, 0, 0, // Don't care.
+ 0, 0, 0x4, 0x57, // SSRC
+ // Empty rtp payload.
+ };
+ // clang-format on
+ RtpPacketReceived packet;
+ // Manually convert to CopyOnWriteBuffer to be sure capacity == size
+ // and asan bot can catch read buffer overflow.
+ EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(data)));
+ rtp_video_stream_receiver_->StartReceive();
+ rtp_video_stream_receiver_->OnRtpPacket(packet);
+ // Expect asan doesn't find anything.
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrameBitstreamError) {
+ RtpPacketReceived rtp_packet;
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetSequenceNumber(1);
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ constexpr uint8_t expected_bitsteam[] = {1, 2, 3, 0xff};
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ expected_bitsteam, sizeof(expected_bitsteam));
+ EXPECT_CALL(mock_on_complete_frame_callback_,
+ DoOnCompleteFrameFailBitstream(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+class RtpVideoStreamReceiver2TestH264
+ : public RtpVideoStreamReceiver2Test,
+ public ::testing::WithParamInterface<std::string> {
+ protected:
+ RtpVideoStreamReceiver2TestH264() : RtpVideoStreamReceiver2Test(GetParam()) {}
+};
+
+INSTANTIATE_TEST_SUITE_P(SpsPpsIdrIsKeyframe,
+ RtpVideoStreamReceiver2TestH264,
+ Values("", "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/"));
+
+// Fails on MSAN: https://bugs.chromium.org/p/webrtc/issues/detail?id=11376.
+#if defined(MEMORY_SANITIZER)
+#define MAYBE_InBandSpsPps DISABLED_InBandSpsPps
+#else
+#define MAYBE_InBandSpsPps InBandSpsPps
+#endif
+TEST_P(RtpVideoStreamReceiver2TestH264, MAYBE_InBandSpsPps) {
+ rtc::CopyOnWriteBuffer sps_data;
+ RtpPacketReceived rtp_packet;
+ RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader();
+ AddSps(&sps_video_header, 0, &sps_data);
+ rtp_packet.SetSequenceNumber(0);
+ rtp_packet.SetPayloadType(kPayloadType);
+ sps_video_header.is_first_packet_in_frame = true;
+ sps_video_header.frame_type = VideoFrameType::kEmptyFrame;
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(),
+ sps_data.size());
+ rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet,
+ sps_video_header);
+
+ rtc::CopyOnWriteBuffer pps_data;
+ RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader();
+ AddPps(&pps_video_header, 0, 1, &pps_data);
+ rtp_packet.SetSequenceNumber(1);
+ pps_video_header.is_first_packet_in_frame = true;
+ pps_video_header.frame_type = VideoFrameType::kEmptyFrame;
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(),
+ pps_data.size());
+ rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet,
+ pps_video_header);
+
+ rtc::CopyOnWriteBuffer idr_data;
+ RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader();
+ AddIdr(&idr_video_header, 1);
+ rtp_packet.SetSequenceNumber(2);
+ idr_video_header.is_first_packet_in_frame = true;
+ idr_video_header.is_last_packet_in_frame = true;
+ idr_video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ const uint8_t idr[] = {0x65, 1, 2, 3};
+ idr_data.AppendData(idr);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(),
+ idr_data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet,
+ idr_video_header);
+}
+
+TEST_P(RtpVideoStreamReceiver2TestH264, OutOfBandFmtpSpsPps) {
+ constexpr int kPayloadType = 99;
+ VideoCodec codec;
+ codec.plType = kPayloadType;
+ std::map<std::string, std::string> codec_params;
+ // Example parameter sets from https://tools.ietf.org/html/rfc3984#section-8.2
+ // .
+ codec_params.insert(
+ {cricket::kH264FmtpSpropParameterSets, "Z0IACpZTBYmI,aMljiA=="});
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, codec_params,
+ /*raw_payload=*/false);
+ const uint8_t binary_sps[] = {0x67, 0x42, 0x00, 0x0a, 0x96,
+ 0x53, 0x05, 0x89, 0x88};
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_sps,
+ sizeof(binary_sps));
+ const uint8_t binary_pps[] = {0x68, 0xc9, 0x63, 0x88};
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_pps,
+ sizeof(binary_pps));
+
+ RtpPacketReceived rtp_packet;
+ RTPVideoHeader video_header = GetDefaultH264VideoHeader();
+ AddIdr(&video_header, 0);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(2);
+ video_header.is_first_packet_in_frame = true;
+ video_header.is_last_packet_in_frame = true;
+ video_header.codec = kVideoCodecH264;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ rtc::CopyOnWriteBuffer data({1, 2, 3});
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, PaddingInMediaStream) {
+ RtpPacketReceived rtp_packet;
+ RTPVideoHeader video_header = GetDefaultH264VideoHeader();
+ rtc::CopyOnWriteBuffer data({1, 2, 3});
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(2);
+ video_header.is_first_packet_in_frame = true;
+ video_header.is_last_packet_in_frame = true;
+ video_header.codec = kVideoCodecGeneric;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+
+ rtp_packet.SetSequenceNumber(3);
+ rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet,
+ video_header);
+
+ rtp_packet.SetSequenceNumber(4);
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ video_header.frame_type = VideoFrameType::kVideoFrameDelta;
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+
+ rtp_packet.SetSequenceNumber(6);
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_packet.SetSequenceNumber(5);
+ rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeIfFirstFrameIsDelta) {
+ RtpPacketReceived rtp_packet;
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetSequenceNumber(1);
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta);
+ EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame());
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeWhenPacketBufferGetsFull) {
+ constexpr int kPacketBufferMaxSize = 2048;
+
+ RtpPacketReceived rtp_packet;
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta);
+ // Incomplete frames so that the packet buffer is filling up.
+ video_header.is_last_packet_in_frame = false;
+ uint16_t start_sequence_number = 1234;
+ rtp_packet.SetSequenceNumber(start_sequence_number);
+ while (rtp_packet.SequenceNumber() - start_sequence_number <
+ kPacketBufferMaxSize) {
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+ rtp_packet.SetSequenceNumber(rtp_packet.SequenceNumber() + 2);
+ }
+
+ EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame());
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, SecondarySinksGetRtpNotifications) {
+ rtp_video_stream_receiver_->StartReceive();
+
+ MockRtpPacketSink secondary_sink_1;
+ MockRtpPacketSink secondary_sink_2;
+
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink_1);
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink_2);
+
+ auto rtp_packet = CreateRtpPacketReceived();
+ EXPECT_CALL(secondary_sink_1, OnRtpPacket(SamePacketAs(*rtp_packet)));
+ EXPECT_CALL(secondary_sink_2, OnRtpPacket(SamePacketAs(*rtp_packet)));
+
+ rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->StopReceive();
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink_1);
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink_2);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ RemovedSecondarySinksGetNoRtpNotifications) {
+ rtp_video_stream_receiver_->StartReceive();
+
+ MockRtpPacketSink secondary_sink;
+
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink);
+
+ auto rtp_packet = CreateRtpPacketReceived();
+
+ EXPECT_CALL(secondary_sink, OnRtpPacket(_)).Times(0);
+
+ rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->StopReceive();
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ OnlyRemovedSecondarySinksExcludedFromNotifications) {
+ rtp_video_stream_receiver_->StartReceive();
+
+ MockRtpPacketSink kept_secondary_sink;
+ MockRtpPacketSink removed_secondary_sink;
+
+ rtp_video_stream_receiver_->AddSecondarySink(&kept_secondary_sink);
+ rtp_video_stream_receiver_->AddSecondarySink(&removed_secondary_sink);
+ rtp_video_stream_receiver_->RemoveSecondarySink(&removed_secondary_sink);
+
+ auto rtp_packet = CreateRtpPacketReceived();
+ EXPECT_CALL(kept_secondary_sink, OnRtpPacket(SamePacketAs(*rtp_packet)));
+
+ rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->StopReceive();
+ rtp_video_stream_receiver_->RemoveSecondarySink(&kept_secondary_sink);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ SecondariesOfNonStartedStreamGetNoNotifications) {
+ // Explicitly showing that the stream is not in the |started| state,
+ // regardless of whether streams start out |started| or |stopped|.
+ rtp_video_stream_receiver_->StopReceive();
+
+ MockRtpPacketSink secondary_sink;
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
+
+ auto rtp_packet = CreateRtpPacketReceived();
+ EXPECT_CALL(secondary_sink, OnRtpPacket(_)).Times(0);
+
+ rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorOnePacket) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ const int kSpatialIndex = 1;
+
+ rtp_video_stream_receiver_->StartReceive();
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
+ RtpPacketReceived rtp_packet(&extension_map);
+ rtp_packet.SetPayloadType(kPayloadType);
+
+ RtpGenericFrameDescriptor generic_descriptor;
+ generic_descriptor.SetFirstPacketInSubFrame(true);
+ generic_descriptor.SetLastPacketInSubFrame(true);
+ generic_descriptor.SetFrameId(100);
+ generic_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex);
+ generic_descriptor.AddFrameDependencyDiff(90);
+ generic_descriptor.AddFrameDependencyDiff(80);
+ ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ generic_descriptor));
+
+ uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
+ memcpy(payload, data.data(), data.size());
+ // The first byte is the header, so we ignore the first byte of |data|.
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
+ data.size() - 1);
+
+ rtp_packet.SetMarker(true);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(1);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce(Invoke([kSpatialIndex](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->num_references, 2U);
+ EXPECT_EQ(frame->references[0], frame->id.picture_id - 90);
+ EXPECT_EQ(frame->references[1], frame->id.picture_id - 80);
+ EXPECT_EQ(frame->id.spatial_layer, kSpatialIndex);
+ EXPECT_THAT(frame->PacketInfos(), SizeIs(1));
+ }));
+
+ rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorTwoPackets) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ const int kSpatialIndex = 1;
+
+ rtp_video_stream_receiver_->StartReceive();
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
+ RtpPacketReceived first_packet(&extension_map);
+
+ RtpGenericFrameDescriptor first_packet_descriptor;
+ first_packet_descriptor.SetFirstPacketInSubFrame(true);
+ first_packet_descriptor.SetLastPacketInSubFrame(false);
+ first_packet_descriptor.SetFrameId(100);
+ first_packet_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex);
+ first_packet_descriptor.SetResolution(480, 360);
+ ASSERT_TRUE(first_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ first_packet_descriptor));
+
+ uint8_t* first_packet_payload = first_packet.SetPayloadSize(data.size());
+ memcpy(first_packet_payload, data.data(), data.size());
+ // The first byte is the header, so we ignore the first byte of |data|.
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
+ data.size() - 1);
+
+ first_packet.SetPayloadType(kPayloadType);
+ first_packet.SetSequenceNumber(1);
+ rtp_video_stream_receiver_->OnRtpPacket(first_packet);
+
+ RtpPacketReceived second_packet(&extension_map);
+ RtpGenericFrameDescriptor second_packet_descriptor;
+ second_packet_descriptor.SetFirstPacketInSubFrame(false);
+ second_packet_descriptor.SetLastPacketInSubFrame(true);
+ ASSERT_TRUE(second_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ second_packet_descriptor));
+
+ second_packet.SetMarker(true);
+ second_packet.SetPayloadType(kPayloadType);
+ second_packet.SetSequenceNumber(2);
+
+ uint8_t* second_packet_payload = second_packet.SetPayloadSize(data.size());
+ memcpy(second_packet_payload, data.data(), data.size());
+ // The first byte is the header, so we ignore the first byte of |data|.
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
+ data.size() - 1);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce(Invoke([kSpatialIndex](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->num_references, 0U);
+ EXPECT_EQ(frame->id.spatial_layer, kSpatialIndex);
+ EXPECT_EQ(frame->EncodedImage()._encodedWidth, 480u);
+ EXPECT_EQ(frame->EncodedImage()._encodedHeight, 360u);
+ EXPECT_THAT(frame->PacketInfos(), SizeIs(2));
+ }));
+
+ rtp_video_stream_receiver_->OnRtpPacket(second_packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorRawPayload) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ const int kRawPayloadType = 123;
+
+ VideoCodec codec;
+ codec.plType = kRawPayloadType;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/true);
+ rtp_video_stream_receiver_->StartReceive();
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
+ RtpPacketReceived rtp_packet(&extension_map);
+
+ RtpGenericFrameDescriptor generic_descriptor;
+ generic_descriptor.SetFirstPacketInSubFrame(true);
+ generic_descriptor.SetLastPacketInSubFrame(true);
+ ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ generic_descriptor));
+
+ uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
+ memcpy(payload, data.data(), data.size());
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+
+ rtp_packet.SetMarker(true);
+ rtp_packet.SetPayloadType(kRawPayloadType);
+ rtp_packet.SetSequenceNumber(1);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
+ rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, UnwrapsFrameId) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ const int kPayloadType = 123;
+
+ VideoCodec codec;
+ codec.plType = kPayloadType;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/true);
+ rtp_video_stream_receiver_->StartReceive();
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
+
+ uint16_t rtp_sequence_number = 1;
+ auto inject_packet = [&](uint16_t wrapped_frame_id) {
+ RtpPacketReceived rtp_packet(&extension_map);
+
+ RtpGenericFrameDescriptor generic_descriptor;
+ generic_descriptor.SetFirstPacketInSubFrame(true);
+ generic_descriptor.SetLastPacketInSubFrame(true);
+ generic_descriptor.SetFrameId(wrapped_frame_id);
+ ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ generic_descriptor));
+
+ uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
+ ASSERT_TRUE(payload);
+ memcpy(payload, data.data(), data.size());
+ mock_on_complete_frame_callback_.ClearExpectedBitstream();
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ rtp_packet.SetMarker(true);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(++rtp_sequence_number);
+ rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
+ };
+
+ int64_t first_picture_id;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ first_picture_id = frame->id.picture_id;
+ });
+ inject_packet(/*wrapped_frame_id=*/0xffff);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->id.picture_id - first_picture_id, 3);
+ });
+ inject_packet(/*wrapped_frame_id=*/0x0002);
+}
+
+class RtpVideoStreamReceiver2DependencyDescriptorTest
+ : public RtpVideoStreamReceiver2Test {
+ public:
+ RtpVideoStreamReceiver2DependencyDescriptorTest() {
+ VideoCodec codec;
+ codec.plType = payload_type_;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, {},
+ /*raw_payload=*/true);
+ extension_map_.Register<RtpDependencyDescriptorExtension>(7);
+ rtp_video_stream_receiver_->StartReceive();
+ }
+
+ // Returns some valid structure for the DependencyDescriptors.
+ // First template of that structure always fit for a key frame.
+ static FrameDependencyStructure CreateStreamStructure() {
+ FrameDependencyStructure stream_structure;
+ stream_structure.num_decode_targets = 1;
+ stream_structure.templates = {
+ FrameDependencyTemplate().Dtis("S"),
+ FrameDependencyTemplate().Dtis("S").FrameDiffs({1}),
+ };
+ return stream_structure;
+ }
+
+ void InjectPacketWith(const FrameDependencyStructure& stream_structure,
+ const DependencyDescriptor& dependency_descriptor) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ RtpPacketReceived rtp_packet(&extension_map_);
+ ASSERT_TRUE(rtp_packet.SetExtension<RtpDependencyDescriptorExtension>(
+ stream_structure, dependency_descriptor));
+ uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
+ ASSERT_TRUE(payload);
+ memcpy(payload, data.data(), data.size());
+ mock_on_complete_frame_callback_.ClearExpectedBitstream();
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ rtp_packet.SetMarker(true);
+ rtp_packet.SetPayloadType(payload_type_);
+ rtp_packet.SetSequenceNumber(++rtp_sequence_number_);
+ rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
+ }
+
+ private:
+ const int payload_type_ = 123;
+ RtpHeaderExtensionMap extension_map_;
+ uint16_t rtp_sequence_number_ = 321;
+};
+
+TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, UnwrapsFrameId) {
+ FrameDependencyStructure stream_structure = CreateStreamStructure();
+
+ DependencyDescriptor keyframe_descriptor;
+ keyframe_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure);
+ keyframe_descriptor.frame_dependencies = stream_structure.templates[0];
+ keyframe_descriptor.frame_number = 0xfff0;
+ // DependencyDescriptor doesn't support reordering delta frame before
+ // keyframe. Thus feed a key frame first, then test reodered delta frames.
+ int64_t first_picture_id;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ first_picture_id = frame->id.picture_id;
+ });
+ InjectPacketWith(stream_structure, keyframe_descriptor);
+
+ DependencyDescriptor deltaframe1_descriptor;
+ deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1];
+ deltaframe1_descriptor.frame_number = 0xfffe;
+
+ DependencyDescriptor deltaframe2_descriptor;
+ deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1];
+ deltaframe2_descriptor.frame_number = 0x0002;
+
+ // Parser should unwrap frame ids correctly even if packets were reordered by
+ // the network.
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ // 0x0002 - 0xfff0
+ EXPECT_EQ(frame->id.picture_id - first_picture_id, 18);
+ })
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ // 0xfffe - 0xfff0
+ EXPECT_EQ(frame->id.picture_id - first_picture_id, 14);
+ });
+ InjectPacketWith(stream_structure, deltaframe2_descriptor);
+ InjectPacketWith(stream_structure, deltaframe1_descriptor);
+}
+
+TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest,
+ DropsLateDeltaFramePacketWithDependencyDescriptorExtension) {
+ FrameDependencyStructure stream_structure1 = CreateStreamStructure();
+ FrameDependencyStructure stream_structure2 = CreateStreamStructure();
+ // Make sure template ids for these two structures do not collide:
+ // adjust structure_id (that is also used as template id offset).
+ stream_structure1.structure_id = 13;
+ stream_structure2.structure_id =
+ stream_structure1.structure_id + stream_structure1.templates.size();
+
+ DependencyDescriptor keyframe1_descriptor;
+ keyframe1_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure1);
+ keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0];
+ keyframe1_descriptor.frame_number = 1;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
+ InjectPacketWith(stream_structure1, keyframe1_descriptor);
+
+ // Pass in 2nd key frame with different structure.
+ DependencyDescriptor keyframe2_descriptor;
+ keyframe2_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure2);
+ keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0];
+ keyframe2_descriptor.frame_number = 3;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
+ InjectPacketWith(stream_structure2, keyframe2_descriptor);
+
+ // Pass in late delta frame that uses structure of the 1st key frame.
+ DependencyDescriptor deltaframe_descriptor;
+ deltaframe_descriptor.frame_dependencies = stream_structure1.templates[0];
+ deltaframe_descriptor.frame_number = 2;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame).Times(0);
+ InjectPacketWith(stream_structure1, deltaframe_descriptor);
+}
+
+TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest,
+ DropsLateKeyFramePacketWithDependencyDescriptorExtension) {
+ FrameDependencyStructure stream_structure1 = CreateStreamStructure();
+ FrameDependencyStructure stream_structure2 = CreateStreamStructure();
+ // Make sure template ids for these two structures do not collide:
+ // adjust structure_id (that is also used as template id offset).
+ stream_structure1.structure_id = 13;
+ stream_structure2.structure_id =
+ stream_structure1.structure_id + stream_structure1.templates.size();
+
+ DependencyDescriptor keyframe1_descriptor;
+ keyframe1_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure1);
+ keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0];
+ keyframe1_descriptor.frame_number = 1;
+
+ DependencyDescriptor keyframe2_descriptor;
+ keyframe2_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure2);
+ keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0];
+ keyframe2_descriptor.frame_number = 3;
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->id.picture_id & 0xFFFF, 3);
+ });
+ InjectPacketWith(stream_structure2, keyframe2_descriptor);
+ InjectPacketWith(stream_structure1, keyframe1_descriptor);
+
+ // Pass in delta frame that uses structure of the 2nd key frame. Late key
+ // frame shouldn't block it.
+ DependencyDescriptor deltaframe_descriptor;
+ deltaframe_descriptor.frame_dependencies = stream_structure2.templates[0];
+ deltaframe_descriptor.frame_number = 4;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->id.picture_id & 0xFFFF, 4);
+ });
+ InjectPacketWith(stream_structure2, deltaframe_descriptor);
+}
+
+#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+using RtpVideoStreamReceiver2DeathTest = RtpVideoStreamReceiver2Test;
+TEST_F(RtpVideoStreamReceiver2DeathTest, RepeatedSecondarySinkDisallowed) {
+ MockRtpPacketSink secondary_sink;
+
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
+ EXPECT_DEATH(rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink),
+ "");
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink);
+}
+#endif
+
+TEST_F(RtpVideoStreamReceiver2Test, TransformFrame) {
+ rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
+ new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+ EXPECT_CALL(*mock_frame_transformer,
+ RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc));
+ auto receiver = std::make_unique<RtpVideoStreamReceiver2>(
+ TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_,
+ nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr,
+ nullptr, process_thread_.get(), &mock_nack_sender_, nullptr,
+ &mock_on_complete_frame_callback_, nullptr, mock_frame_transformer);
+ VideoCodec video_codec;
+ video_codec.plType = kPayloadType;
+ video_codec.codecType = kVideoCodecGeneric;
+ receiver->AddReceiveCodec(video_codec, {}, /*raw_payload=*/false);
+
+ RtpPacketReceived rtp_packet;
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetSequenceNumber(1);
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(*mock_frame_transformer, Transform(_));
+ receiver->OnReceivedPayloadData(data, rtp_packet, video_header);
+
+ EXPECT_CALL(*mock_frame_transformer,
+ UnregisterTransformedFrameSinkCallback(config_.rtp.remote_ssrc));
+ receiver = nullptr;
+}
+
+// Test default behavior and when playout delay is overridden by field trial.
+const PlayoutDelay kTransmittedPlayoutDelay = {100, 200};
+const PlayoutDelay kForcedPlayoutDelay = {70, 90};
+struct PlayoutDelayOptions {
+ std::string field_trial;
+ PlayoutDelay expected_delay;
+};
+const PlayoutDelayOptions kDefaultBehavior = {
+ /*field_trial=*/"", /*expected_delay=*/kTransmittedPlayoutDelay};
+const PlayoutDelayOptions kOverridePlayoutDelay = {
+ /*field_trial=*/"WebRTC-ForcePlayoutDelay/min_ms:70,max_ms:90/",
+ /*expected_delay=*/kForcedPlayoutDelay};
+
+class RtpVideoStreamReceiver2TestPlayoutDelay
+ : public RtpVideoStreamReceiver2Test,
+ public ::testing::WithParamInterface<PlayoutDelayOptions> {
+ protected:
+ RtpVideoStreamReceiver2TestPlayoutDelay()
+ : RtpVideoStreamReceiver2Test(GetParam().field_trial) {}
+};
+
+INSTANTIATE_TEST_SUITE_P(PlayoutDelay,
+ RtpVideoStreamReceiver2TestPlayoutDelay,
+ Values(kDefaultBehavior, kOverridePlayoutDelay));
+
+TEST_P(RtpVideoStreamReceiver2TestPlayoutDelay, PlayoutDelay) {
+ rtc::CopyOnWriteBuffer payload_data({1, 2, 3, 4});
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<PlayoutDelayLimits>(1);
+ RtpPacketToSend packet_to_send(&extension_map);
+ packet_to_send.SetPayloadType(kPayloadType);
+ packet_to_send.SetSequenceNumber(1);
+
+ // Set playout delay on outgoing packet.
+ EXPECT_TRUE(packet_to_send.SetExtension<PlayoutDelayLimits>(
+ kTransmittedPlayoutDelay));
+ uint8_t* payload = packet_to_send.AllocatePayload(payload_data.size());
+ memcpy(payload, payload_data.data(), payload_data.size());
+
+ RtpPacketReceived received_packet(&extension_map);
+ received_packet.Parse(packet_to_send.data(), packet_to_send.size());
+
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(payload_data.data(),
+ payload_data.size());
+ // Expect the playout delay of encoded frame to be the same as the transmitted
+ // playout delay unless it was overridden by a field trial.
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke([expected_playout_delay = GetParam().expected_delay](
+ video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->EncodedImage().playout_delay_, expected_playout_delay);
+ }));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(
+ received_packet.PayloadBuffer(), received_packet, video_header);
+}
+
+} // namespace webrtc
diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate.cc b/video/rtp_video_stream_receiver_frame_transformer_delegate.cc
index 8324b19136..31eb344d5b 100644
--- a/video/rtp_video_stream_receiver_frame_transformer_delegate.cc
+++ b/video/rtp_video_stream_receiver_frame_transformer_delegate.cc
@@ -17,7 +17,6 @@
#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/thread.h"
-#include "video/rtp_video_stream_receiver.h"
namespace webrtc {
@@ -28,7 +27,9 @@ class TransformableVideoReceiverFrame
TransformableVideoReceiverFrame(
std::unique_ptr<video_coding::RtpFrameObject> frame,
uint32_t ssrc)
- : frame_(std::move(frame)), ssrc_(ssrc) {}
+ : frame_(std::move(frame)),
+ metadata_(frame_->GetRtpVideoHeader()),
+ ssrc_(ssrc) {}
~TransformableVideoReceiverFrame() override = default;
// Implements TransformableVideoFrameInterface.
@@ -52,19 +53,22 @@ class TransformableVideoReceiverFrame
return RtpDescriptorAuthentication(frame_->GetRtpVideoHeader());
}
+ const VideoFrameMetadata& GetMetadata() const override { return metadata_; }
+
std::unique_ptr<video_coding::RtpFrameObject> ExtractFrame() && {
return std::move(frame_);
}
private:
std::unique_ptr<video_coding::RtpFrameObject> frame_;
+ const VideoFrameMetadata metadata_;
const uint32_t ssrc_;
};
} // namespace
RtpVideoStreamReceiverFrameTransformerDelegate::
RtpVideoStreamReceiverFrameTransformerDelegate(
- RtpVideoStreamReceiver* receiver,
+ RtpVideoFrameReceiver* receiver,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
rtc::Thread* network_thread,
uint32_t ssrc)
diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate.h b/video/rtp_video_stream_receiver_frame_transformer_delegate.h
index 0a106c956a..e687e7f47b 100644
--- a/video/rtp_video_stream_receiver_frame_transformer_delegate.h
+++ b/video/rtp_video_stream_receiver_frame_transformer_delegate.h
@@ -20,7 +20,16 @@
namespace webrtc {
-class RtpVideoStreamReceiver;
+// Called back by RtpVideoStreamReceiverFrameTransformerDelegate on the network
+// thread after transformation.
+class RtpVideoFrameReceiver {
+ public:
+ virtual void ManageFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) = 0;
+
+ protected:
+ virtual ~RtpVideoFrameReceiver() = default;
+};
// Delegates calls to FrameTransformerInterface to transform frames, and to
// RtpVideoStreamReceiver to manage transformed frames on the |network_thread_|.
@@ -28,7 +37,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate
: public TransformedFrameCallback {
public:
RtpVideoStreamReceiverFrameTransformerDelegate(
- RtpVideoStreamReceiver* receiver,
+ RtpVideoFrameReceiver* receiver,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
rtc::Thread* network_thread,
uint32_t ssrc);
@@ -44,7 +53,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate
void OnTransformedFrame(
std::unique_ptr<TransformableFrameInterface> frame) override;
- // Delegates the call to RtpVideoReceiver::ManageFrame on the
+ // Delegates the call to RtpVideoFrameReceiver::ManageFrame on the
// |network_thread_|.
void ManageFrame(std::unique_ptr<TransformableFrameInterface> frame);
@@ -53,7 +62,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate
private:
SequenceChecker network_sequence_checker_;
- RtpVideoStreamReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_);
+ RtpVideoFrameReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_
RTC_GUARDED_BY(network_sequence_checker_);
rtc::Thread* const network_thread_;
diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
index c481f502a4..a411ca6e9a 100644
--- a/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
+++ b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
@@ -15,95 +15,42 @@
#include <utility>
#include <vector>
+#include "absl/memory/memory.h"
#include "api/call/transport.h"
#include "call/video_receive_stream.h"
#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/event.h"
+#include "rtc_base/ref_counted_object.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/mock_frame_transformer.h"
-#include "video/rtp_video_stream_receiver.h"
namespace webrtc {
namespace {
using ::testing::_;
+using ::testing::ElementsAre;
using ::testing::NiceMock;
using ::testing::SaveArg;
-std::unique_ptr<video_coding::RtpFrameObject> CreateRtpFrameObject() {
+std::unique_ptr<video_coding::RtpFrameObject> CreateRtpFrameObject(
+ const RTPVideoHeader& video_header) {
return std::make_unique<video_coding::RtpFrameObject>(
- 0, 0, true, 0, 0, 0, 0, 0, VideoSendTiming(), 0, kVideoCodecGeneric,
- kVideoRotation_0, VideoContentType::UNSPECIFIED, RTPVideoHeader(),
+ 0, 0, true, 0, 0, 0, 0, 0, VideoSendTiming(), 0, video_header.codec,
+ kVideoRotation_0, VideoContentType::UNSPECIFIED, video_header,
absl::nullopt, RtpPacketInfos(), EncodedImageBuffer::Create(0));
}
-class FakeTransport : public Transport {
- public:
- bool SendRtp(const uint8_t* packet,
- size_t length,
- const PacketOptions& options) {
- return true;
- }
- bool SendRtcp(const uint8_t* packet, size_t length) { return true; }
-};
-
-class FakeNackSender : public NackSender {
- public:
- void SendNack(const std::vector<uint16_t>& sequence_numbers) {}
- void SendNack(const std::vector<uint16_t>& sequence_numbers,
- bool buffering_allowed) {}
-};
-
-class FakeOnCompleteFrameCallback
- : public video_coding::OnCompleteFrameCallback {
- public:
- void OnCompleteFrame(
- std::unique_ptr<video_coding::EncodedFrame> frame) override {}
-};
-
-class TestRtpVideoStreamReceiverInitializer {
- public:
- TestRtpVideoStreamReceiverInitializer()
- : test_config_(nullptr),
- test_process_thread_(ProcessThread::Create("TestThread")) {
- test_config_.rtp.remote_ssrc = 1111;
- test_config_.rtp.local_ssrc = 2222;
- test_rtp_receive_statistics_ =
- ReceiveStatistics::Create(Clock::GetRealTimeClock());
- }
-
- protected:
- VideoReceiveStream::Config test_config_;
- FakeTransport fake_transport_;
- FakeNackSender fake_nack_sender_;
- FakeOnCompleteFrameCallback fake_on_complete_frame_callback_;
- std::unique_ptr<ProcessThread> test_process_thread_;
- std::unique_ptr<ReceiveStatistics> test_rtp_receive_statistics_;
-};
+std::unique_ptr<video_coding::RtpFrameObject> CreateRtpFrameObject() {
+ return CreateRtpFrameObject(RTPVideoHeader());
+}
-class TestRtpVideoStreamReceiver : public TestRtpVideoStreamReceiverInitializer,
- public RtpVideoStreamReceiver {
+class TestRtpVideoFrameReceiver : public RtpVideoFrameReceiver {
public:
- TestRtpVideoStreamReceiver()
- : TestRtpVideoStreamReceiverInitializer(),
- RtpVideoStreamReceiver(Clock::GetRealTimeClock(),
- &fake_transport_,
- nullptr,
- nullptr,
- &test_config_,
- test_rtp_receive_statistics_.get(),
- nullptr,
- nullptr,
- test_process_thread_.get(),
- &fake_nack_sender_,
- nullptr,
- &fake_on_complete_frame_callback_,
- nullptr,
- nullptr) {}
- ~TestRtpVideoStreamReceiver() override = default;
+ TestRtpVideoFrameReceiver() {}
+ ~TestRtpVideoFrameReceiver() override = default;
MOCK_METHOD(void,
ManageFrame,
@@ -113,7 +60,7 @@ class TestRtpVideoStreamReceiver : public TestRtpVideoStreamReceiverInitializer,
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
RegisterTransformedFrameCallbackSinkOnInit) {
- TestRtpVideoStreamReceiver receiver;
+ TestRtpVideoFrameReceiver receiver;
rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
new rtc::RefCountedObject<MockFrameTransformer>());
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
@@ -127,7 +74,7 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
UnregisterTransformedFrameSinkCallbackOnReset) {
- TestRtpVideoStreamReceiver receiver;
+ TestRtpVideoFrameReceiver receiver;
rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
new rtc::RefCountedObject<MockFrameTransformer>());
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
@@ -139,7 +86,7 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
}
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) {
- TestRtpVideoStreamReceiver receiver;
+ TestRtpVideoFrameReceiver receiver;
rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>());
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
@@ -153,7 +100,7 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) {
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
ManageFrameOnTransformedFrame) {
- TestRtpVideoStreamReceiver receiver;
+ TestRtpVideoFrameReceiver receiver;
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer(
new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>());
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate =
@@ -177,5 +124,48 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
rtc::ThreadManager::ProcessAllMessageQueuesForTesting();
}
+TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
+ TransformableFrameMetadataHasCorrectValue) {
+ TestRtpVideoFrameReceiver receiver;
+ rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
+ new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
+ rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate =
+ new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ &receiver, mock_frame_transformer, rtc::Thread::Current(), 1111);
+ delegate->Init();
+ RTPVideoHeader video_header;
+ video_header.width = 1280u;
+ video_header.height = 720u;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.frame_id = 10;
+ generic.temporal_index = 3;
+ generic.spatial_index = 2;
+ generic.decode_target_indications = {DecodeTargetIndication::kSwitch};
+ generic.dependencies = {5};
+
+ // Check that the transformable frame passed to the frame transformer has the
+ // correct metadata.
+ EXPECT_CALL(*mock_frame_transformer, Transform)
+ .WillOnce(
+ [](std::unique_ptr<TransformableFrameInterface> transformable_frame) {
+ auto frame =
+ absl::WrapUnique(static_cast<TransformableVideoFrameInterface*>(
+ transformable_frame.release()));
+ ASSERT_TRUE(frame);
+ auto metadata = frame->GetMetadata();
+ EXPECT_EQ(metadata.GetWidth(), 1280u);
+ EXPECT_EQ(metadata.GetHeight(), 720u);
+ EXPECT_EQ(metadata.GetFrameId(), 10);
+ EXPECT_EQ(metadata.GetTemporalIndex(), 3);
+ EXPECT_EQ(metadata.GetSpatialIndex(), 2);
+ EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5));
+ EXPECT_THAT(metadata.GetDecodeTargetIndications(),
+ ElementsAre(DecodeTargetIndication::kSwitch));
+ });
+ // The delegate creates a transformable frame from the RtpFrameObject.
+ delegate->TransformFrame(CreateRtpFrameObject(video_header));
+}
+
} // namespace
} // namespace webrtc
diff --git a/video/rtp_video_stream_receiver_unittest.cc b/video/rtp_video_stream_receiver_unittest.cc
index 255de54e8b..20d6ae88ad 100644
--- a/video/rtp_video_stream_receiver_unittest.cc
+++ b/video/rtp_video_stream_receiver_unittest.cc
@@ -73,37 +73,45 @@ RTPVideoHeader GetGenericVideoHeader(VideoFrameType frame_type) {
class MockTransport : public Transport {
public:
- MOCK_METHOD3(SendRtp,
- bool(const uint8_t* packet,
- size_t length,
- const PacketOptions& options));
- MOCK_METHOD2(SendRtcp, bool(const uint8_t* packet, size_t length));
+ MOCK_METHOD(bool,
+ SendRtp,
+ (const uint8_t*, size_t length, const PacketOptions& options),
+ (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
};
class MockNackSender : public NackSender {
public:
- MOCK_METHOD1(SendNack, void(const std::vector<uint16_t>& sequence_numbers));
- MOCK_METHOD2(SendNack,
- void(const std::vector<uint16_t>& sequence_numbers,
- bool buffering_allowed));
+ MOCK_METHOD(void,
+ SendNack,
+ (const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed),
+ (override));
};
class MockKeyFrameRequestSender : public KeyFrameRequestSender {
public:
- MOCK_METHOD0(RequestKeyFrame, void());
+ MOCK_METHOD(void, RequestKeyFrame, (), (override));
};
class MockOnCompleteFrameCallback
: public video_coding::OnCompleteFrameCallback {
public:
- MOCK_METHOD1(DoOnCompleteFrame, void(video_coding::EncodedFrame* frame));
- MOCK_METHOD1(DoOnCompleteFrameFailNullptr,
- void(video_coding::EncodedFrame* frame));
- MOCK_METHOD1(DoOnCompleteFrameFailLength,
- void(video_coding::EncodedFrame* frame));
- MOCK_METHOD1(DoOnCompleteFrameFailBitstream,
- void(video_coding::EncodedFrame* frame));
- void OnCompleteFrame(std::unique_ptr<video_coding::EncodedFrame> frame) {
+ MOCK_METHOD(void, DoOnCompleteFrame, (video_coding::EncodedFrame*), ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailNullptr,
+ (video_coding::EncodedFrame*),
+ ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailLength,
+ (video_coding::EncodedFrame*),
+ ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailBitstream,
+ (video_coding::EncodedFrame*),
+ ());
+ void OnCompleteFrame(
+ std::unique_ptr<video_coding::EncodedFrame> frame) override {
if (!frame) {
DoOnCompleteFrameFailNullptr(nullptr);
return;
@@ -132,7 +140,7 @@ class MockOnCompleteFrameCallback
class MockRtpPacketSink : public RtpPacketSinkInterface {
public:
- MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived&));
+ MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override));
};
constexpr uint32_t kSsrc = 111;
@@ -955,8 +963,8 @@ class RtpVideoStreamReceiverDependencyDescriptorTest
FrameDependencyStructure stream_structure;
stream_structure.num_decode_targets = 1;
stream_structure.templates = {
- GenericFrameInfo::Builder().Dtis("S").Build(),
- GenericFrameInfo::Builder().Dtis("S").Fdiffs({1}).Build(),
+ FrameDependencyTemplate().Dtis("S"),
+ FrameDependencyTemplate().Dtis("S").FrameDiffs({1}),
};
return stream_structure;
}
@@ -1102,7 +1110,8 @@ TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest,
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(RtpVideoStreamReceiverTest, RepeatedSecondarySinkDisallowed) {
+using RtpVideoStreamReceiverDeathTest = RtpVideoStreamReceiverTest;
+TEST_F(RtpVideoStreamReceiverDeathTest, RepeatedSecondarySinkDisallowed) {
MockRtpPacketSink secondary_sink;
rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
diff --git a/video/screenshare_loopback.cc b/video/screenshare_loopback.cc
index b8121dc85c..239e472f6e 100644
--- a/video/screenshare_loopback.cc
+++ b/video/screenshare_loopback.cc
@@ -325,32 +325,29 @@ void Loopback() {
call_bitrate_config.max_bitrate_bps = -1; // Don't cap bandwidth estimate.
VideoQualityTest::Params params;
- params.call = {absl::GetFlag(FLAGS_send_side_bwe),
- absl::GetFlag(FLAGS_generic_descriptor), call_bitrate_config};
- params.video[0] = {true,
- Width(),
- Height(),
- Fps(),
- MinBitrateKbps() * 1000,
- TargetBitrateKbps() * 1000,
- MaxBitrateKbps() * 1000,
- false,
- Codec(),
- NumTemporalLayers(),
- SelectedTL(),
- MinTransmitBitrateKbps() * 1000,
- false, // ULPFEC disabled.
- false, // FlexFEC disabled.
- false, // Automatic scaling disabled.
- "",
- 0, // capture_device_index.
- SdpVideoFormat::Parameters()};
- params.screenshare[0] = {true, GenerateSlides(), SlideChangeInterval(),
- ScrollDuration(), Slides()};
- params.analyzer = {"screenshare", 0.0, 0.0, DurationSecs(),
- OutputFilename(), GraphTitle()};
+ params.call.send_side_bwe = absl::GetFlag(FLAGS_send_side_bwe);
+ params.call.generic_descriptor = absl::GetFlag(FLAGS_generic_descriptor);
+ params.call.call_bitrate_config = call_bitrate_config;
+ params.video[0].enabled = true;
+ params.video[0].width = Width();
+ params.video[0].height = Height();
+ params.video[0].fps = Fps();
+ params.video[0].min_bitrate_bps = MinBitrateKbps() * 1000;
+ params.video[0].target_bitrate_bps = TargetBitrateKbps() * 1000;
+ params.video[0].max_bitrate_bps = MaxBitrateKbps() * 1000;
+ params.video[0].codec = Codec();
+ params.video[0].num_temporal_layers = NumTemporalLayers();
+ params.video[0].selected_tl = SelectedTL();
+ params.video[0].min_transmit_bps = MinTransmitBitrateKbps() * 1000;
+ params.screenshare[0].enabled = true;
+ params.screenshare[0].generate_slides = GenerateSlides();
+ params.screenshare[0].slide_change_interval = SlideChangeInterval();
+ params.screenshare[0].scroll_duration = ScrollDuration();
+ params.screenshare[0].slides = Slides();
params.config = pipe_config;
- params.logging = {RtcEventLogName(), RtpDumpName(), EncodedFramePath()};
+ params.logging.rtc_event_log_name = RtcEventLogName();
+ params.logging.rtp_dump_name = RtpDumpName();
+ params.logging.encoded_frame_base_path = EncodedFramePath();
if (NumStreams() > 1 && Stream0().empty() && Stream1().empty()) {
params.ss[0].infer_streams = true;
diff --git a/video/send_delay_stats.cc b/video/send_delay_stats.cc
index a243eda292..56c4164424 100644
--- a/video/send_delay_stats.cc
+++ b/video/send_delay_stats.cc
@@ -41,7 +41,7 @@ SendDelayStats::~SendDelayStats() {
}
void SendDelayStats::UpdateHistograms() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
for (const auto& it : send_delay_counters_) {
AggregatedStats stats = it.second->GetStats();
if (stats.num_samples >= kMinRequiredPeriodicSamples) {
@@ -52,7 +52,7 @@ void SendDelayStats::UpdateHistograms() {
}
void SendDelayStats::AddSsrcs(const VideoSendStream::Config& config) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (ssrcs_.size() > kMaxSsrcMapSize)
return;
for (const auto& ssrc : config.rtp.ssrcs)
@@ -73,7 +73,7 @@ void SendDelayStats::OnSendPacket(uint16_t packet_id,
int64_t capture_time_ms,
uint32_t ssrc) {
// Packet sent to transport.
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (ssrcs_.find(ssrc) == ssrcs_.end())
return;
@@ -93,7 +93,7 @@ bool SendDelayStats::OnSentPacket(int packet_id, int64_t time_ms) {
if (packet_id == -1)
return false;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
auto it = packets_.find(packet_id);
if (it == packets_.end())
return false;
diff --git a/video/send_delay_stats.h b/video/send_delay_stats.h
index d9fa16a126..20f9804d64 100644
--- a/video/send_delay_stats.h
+++ b/video/send_delay_stats.h
@@ -20,7 +20,7 @@
#include "call/video_send_stream.h"
#include "modules/include/module_common_types_public.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
#include "video/stats_counter.h"
@@ -66,22 +66,22 @@ class SendDelayStats : public SendPacketObserver {
void UpdateHistograms();
void RemoveOld(int64_t now, PacketMap* packets)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
AvgCounter* GetSendDelayCounter(uint32_t ssrc)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
Clock* const clock_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
- PacketMap packets_ RTC_GUARDED_BY(crit_);
- size_t num_old_packets_ RTC_GUARDED_BY(crit_);
- size_t num_skipped_packets_ RTC_GUARDED_BY(crit_);
+ PacketMap packets_ RTC_GUARDED_BY(mutex_);
+ size_t num_old_packets_ RTC_GUARDED_BY(mutex_);
+ size_t num_skipped_packets_ RTC_GUARDED_BY(mutex_);
- std::set<uint32_t> ssrcs_ RTC_GUARDED_BY(crit_);
+ std::set<uint32_t> ssrcs_ RTC_GUARDED_BY(mutex_);
// Mapped by SSRC.
std::map<uint32_t, std::unique_ptr<AvgCounter>> send_delay_counters_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
};
} // namespace webrtc
diff --git a/video/send_statistics_proxy.cc b/video/send_statistics_proxy.cc
index f8d768f9d2..ee32fd91c1 100644
--- a/video/send_statistics_proxy.cc
+++ b/video/send_statistics_proxy.cc
@@ -154,7 +154,7 @@ SendStatisticsProxy::SendStatisticsProxy(
}
SendStatisticsProxy::~SendStatisticsProxy() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
uma_container_->UpdateHistograms(rtp_config_, stats_);
int64_t elapsed_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000;
@@ -670,7 +670,7 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
void SendStatisticsProxy::OnEncoderReconfigured(
const VideoEncoderConfig& config,
const std::vector<VideoStream>& streams) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (content_type_ != config.content_type) {
uma_container_->UpdateHistograms(rtp_config_, stats_);
@@ -687,7 +687,7 @@ void SendStatisticsProxy::OnEncoderReconfigured(
void SendStatisticsProxy::OnEncodedFrameTimeMeasured(int encode_time_ms,
int encode_usage_percent) {
RTC_DCHECK_GE(encode_time_ms, 0);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
uma_container_->encode_time_counter_.Add(encode_time_ms);
encode_time_.Apply(1.0f, encode_time_ms);
stats_.avg_encode_time_ms = std::round(encode_time_.filtered());
@@ -697,7 +697,7 @@ void SendStatisticsProxy::OnEncodedFrameTimeMeasured(int encode_time_ms,
void SendStatisticsProxy::OnSuspendChange(bool is_suspended) {
int64_t now_ms = clock_->TimeInMilliseconds();
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
stats_.suspended = is_suspended;
if (is_suspended) {
// Pause framerate (add min pause time since there may be frames/packets
@@ -717,9 +717,11 @@ void SendStatisticsProxy::OnSuspendChange(bool is_suspended) {
uma_container_->quality_adapt_timer_.Stop(now_ms);
} else {
// Start adaptation stats if scaling is enabled.
- if (adaptations_.MaskedCpuCounts().resolution_adaptations.has_value())
+ if (adaptation_limitations_.MaskedCpuCounts()
+ .resolution_adaptations.has_value())
uma_container_->cpu_adapt_timer_.Start(now_ms);
- if (adaptations_.MaskedQualityCounts().resolution_adaptations.has_value())
+ if (adaptation_limitations_.MaskedQualityCounts()
+ .resolution_adaptations.has_value())
uma_container_->quality_adapt_timer_.Start(now_ms);
// Stop pause explicitly for stats that may be zero/not updated for some
// time.
@@ -731,7 +733,7 @@ void SendStatisticsProxy::OnSuspendChange(bool is_suspended) {
}
VideoSendStream::Stats SendStatisticsProxy::GetStats() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
PurgeOldStats();
stats_.input_frame_rate =
round(uma_container_->input_frame_rate_tracker_.ComputeRate());
@@ -801,7 +803,7 @@ VideoSendStream::StreamStats* SendStatisticsProxy::GetStatsEntry(
}
void SendStatisticsProxy::OnInactiveSsrc(uint32_t ssrc) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (!stats)
return;
@@ -813,7 +815,7 @@ void SendStatisticsProxy::OnInactiveSsrc(uint32_t ssrc) {
}
void SendStatisticsProxy::OnSetEncoderTargetRate(uint32_t bitrate_bps) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (uma_container_->target_rate_updates_.last_ms == -1 && bitrate_bps == 0)
return; // Start on first non-zero bitrate, may initially be zero.
@@ -912,7 +914,7 @@ void SendStatisticsProxy::UpdateFallbackDisabledStats(
}
void SendStatisticsProxy::OnMinPixelLimitReached() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
uma_container_->fallback_info_disabled_.min_pixel_limit_reached = true;
}
@@ -927,7 +929,7 @@ void SendStatisticsProxy::OnSendEncodedImage(
? encoded_image.SpatialIndex().value_or(0)
: 0;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
++stats_.frames_encoded;
// The current encode frame rate is based on previously encoded frames.
double encode_frame_rate = encoded_frame_rate_tracker_.ComputeRate();
@@ -1021,7 +1023,7 @@ void SendStatisticsProxy::OnSendEncodedImage(
}
absl::optional<int> downscales =
- adaptations_.MaskedQualityCounts().resolution_adaptations;
+ adaptation_limitations_.MaskedQualityCounts().resolution_adaptations;
stats_.bw_limited_resolution |=
(downscales.has_value() && downscales.value() > 0);
@@ -1034,29 +1036,30 @@ void SendStatisticsProxy::OnSendEncodedImage(
void SendStatisticsProxy::OnEncoderImplementationChanged(
const std::string& implementation_name) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
encoder_changed_ = EncoderChangeEvent{stats_.encoder_implementation_name,
implementation_name};
stats_.encoder_implementation_name = implementation_name;
}
int SendStatisticsProxy::GetInputFrameRate() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return round(uma_container_->input_frame_rate_tracker_.ComputeRate());
}
int SendStatisticsProxy::GetSendFrameRate() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return round(encoded_frame_rate_tracker_.ComputeRate());
}
void SendStatisticsProxy::OnIncomingFrame(int width, int height) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
uma_container_->input_frame_rate_tracker_.AddSamples(1);
uma_container_->input_fps_counter_.Add(1);
uma_container_->input_width_counter_.Add(width);
uma_container_->input_height_counter_.Add(height);
- if (adaptations_.MaskedCpuCounts().resolution_adaptations.has_value()) {
+ if (adaptation_limitations_.MaskedCpuCounts()
+ .resolution_adaptations.has_value()) {
uma_container_->cpu_limited_frame_counter_.Add(
stats_.cpu_limited_resolution);
}
@@ -1068,7 +1071,7 @@ void SendStatisticsProxy::OnIncomingFrame(int width, int height) {
}
void SendStatisticsProxy::OnFrameDropped(DropReason reason) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
switch (reason) {
case DropReason::kSource:
++stats_.frames_dropped_by_capturer;
@@ -1089,20 +1092,20 @@ void SendStatisticsProxy::OnFrameDropped(DropReason reason) {
}
void SendStatisticsProxy::ClearAdaptationStats() {
- rtc::CritScope lock(&crit_);
- adaptations_.set_cpu_counts(VideoAdaptationCounters());
- adaptations_.set_quality_counts(VideoAdaptationCounters());
+ MutexLock lock(&mutex_);
+ adaptation_limitations_.set_cpu_counts(VideoAdaptationCounters());
+ adaptation_limitations_.set_quality_counts(VideoAdaptationCounters());
UpdateAdaptationStats();
}
void SendStatisticsProxy::UpdateAdaptationSettings(
VideoStreamEncoderObserver::AdaptationSettings cpu_settings,
VideoStreamEncoderObserver::AdaptationSettings quality_settings) {
- rtc::CritScope lock(&crit_);
- adaptations_.UpdateMaskingSettings(cpu_settings, quality_settings);
- SetAdaptTimer(adaptations_.MaskedCpuCounts(),
+ MutexLock lock(&mutex_);
+ adaptation_limitations_.UpdateMaskingSettings(cpu_settings, quality_settings);
+ SetAdaptTimer(adaptation_limitations_.MaskedCpuCounts(),
&uma_container_->cpu_adapt_timer_);
- SetAdaptTimer(adaptations_.MaskedQualityCounts(),
+ SetAdaptTimer(adaptation_limitations_.MaskedQualityCounts(),
&uma_container_->quality_adapt_timer_);
UpdateAdaptationStats();
}
@@ -1111,11 +1114,12 @@ void SendStatisticsProxy::OnAdaptationChanged(
VideoAdaptationReason reason,
const VideoAdaptationCounters& cpu_counters,
const VideoAdaptationCounters& quality_counters) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
- MaskedAdaptationCounts receiver = adaptations_.MaskedQualityCounts();
- adaptations_.set_cpu_counts(cpu_counters);
- adaptations_.set_quality_counts(quality_counters);
+ MaskedAdaptationCounts receiver =
+ adaptation_limitations_.MaskedQualityCounts();
+ adaptation_limitations_.set_cpu_counts(cpu_counters);
+ adaptation_limitations_.set_quality_counts(quality_counters);
switch (reason) {
case VideoAdaptationReason::kCpu:
++stats_.number_of_cpu_adapt_changes;
@@ -1123,7 +1127,7 @@ void SendStatisticsProxy::OnAdaptationChanged(
case VideoAdaptationReason::kQuality:
TryUpdateInitialQualityResolutionAdaptUp(
receiver.resolution_adaptations,
- adaptations_.MaskedQualityCounts().resolution_adaptations);
+ adaptation_limitations_.MaskedQualityCounts().resolution_adaptations);
++stats_.number_of_quality_adapt_changes;
break;
}
@@ -1131,8 +1135,8 @@ void SendStatisticsProxy::OnAdaptationChanged(
}
void SendStatisticsProxy::UpdateAdaptationStats() {
- auto cpu_counts = adaptations_.MaskedCpuCounts();
- auto quality_counts = adaptations_.MaskedQualityCounts();
+ auto cpu_counts = adaptation_limitations_.MaskedCpuCounts();
+ auto quality_counts = adaptation_limitations_.MaskedQualityCounts();
bool is_cpu_limited = cpu_counts.resolution_adaptations > 0 ||
cpu_counts.num_framerate_reductions > 0;
@@ -1204,7 +1208,7 @@ void SendStatisticsProxy::OnBitrateAllocationUpdated(
spatial_layers[i] = (allocation.GetSpatialLayerSum(i) > 0);
}
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
bw_limited_layers_ = allocation.is_bw_limited();
UpdateAdaptationStats();
@@ -1227,14 +1231,14 @@ void SendStatisticsProxy::OnBitrateAllocationUpdated(
// resolution or not. |is_scaled| is a flag indicating if the video is scaled
// down.
void SendStatisticsProxy::OnEncoderInternalScalerUpdate(bool is_scaled) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
internal_encoder_scaler_ = is_scaled;
UpdateAdaptationStats();
}
// TODO(asapersson): Include fps changes.
void SendStatisticsProxy::OnInitialQualityResolutionAdaptDown() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
++uma_container_->initial_quality_changes_.down;
}
@@ -1270,7 +1274,7 @@ void SendStatisticsProxy::SetAdaptTimer(const MaskedAdaptationCounts& counts,
void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
uint32_t ssrc,
const RtcpPacketTypeCounter& packet_counter) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (!stats)
return;
@@ -1282,7 +1286,7 @@ void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics,
uint32_t ssrc) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (!stats)
return;
@@ -1293,7 +1297,7 @@ void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics,
void SendStatisticsProxy::OnReportBlockDataUpdated(
ReportBlockData report_block_data) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
VideoSendStream::StreamStats* stats =
GetStatsEntry(report_block_data.report_block().source_ssrc);
if (!stats)
@@ -1304,7 +1308,7 @@ void SendStatisticsProxy::OnReportBlockDataUpdated(
void SendStatisticsProxy::DataCountersUpdated(
const StreamDataCounters& counters,
uint32_t ssrc) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
RTC_DCHECK(stats) << "DataCountersUpdated reported for unknown ssrc " << ssrc;
@@ -1346,7 +1350,7 @@ void SendStatisticsProxy::DataCountersUpdated(
void SendStatisticsProxy::Notify(uint32_t total_bitrate_bps,
uint32_t retransmit_bitrate_bps,
uint32_t ssrc) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (!stats)
return;
@@ -1357,7 +1361,7 @@ void SendStatisticsProxy::Notify(uint32_t total_bitrate_bps,
void SendStatisticsProxy::FrameCountUpdated(const FrameCounts& frame_counts,
uint32_t ssrc) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (!stats)
return;
@@ -1369,7 +1373,7 @@ void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms,
int max_delay_ms,
uint64_t total_delay_ms,
uint32_t ssrc) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (!stats)
return;
@@ -1459,6 +1463,16 @@ void SendStatisticsProxy::Adaptations::set_quality_counts(
const VideoAdaptationCounters& quality_counts) {
quality_counts_ = quality_counts;
}
+
+VideoAdaptationCounters SendStatisticsProxy::Adaptations::cpu_counts() const {
+ return cpu_counts_;
+}
+
+VideoAdaptationCounters SendStatisticsProxy::Adaptations::quality_counts()
+ const {
+ return quality_counts_;
+}
+
void SendStatisticsProxy::Adaptations::UpdateMaskingSettings(
VideoStreamEncoderObserver::AdaptationSettings cpu_settings,
VideoStreamEncoderObserver::AdaptationSettings quality_settings) {
diff --git a/video/send_statistics_proxy.h b/video/send_statistics_proxy.h
index 1d2fd21cfa..0de7df290e 100644
--- a/video/send_statistics_proxy.h
+++ b/video/send_statistics_proxy.h
@@ -25,9 +25,9 @@
#include "modules/rtp_rtcp/include/report_block_data.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/include/video_coding_defines.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/numerics/exp_filter.h"
#include "rtc_base/rate_tracker.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
#include "video/quality_limitation_reason_tracker.h"
@@ -223,9 +223,9 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
};
typedef std::map<uint32_t, Frame, TimestampOlderThan> EncodedFrameMap;
- void PurgeOldStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void PurgeOldStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
VideoSendStream::StreamStats* GetStatsEntry(uint32_t ssrc)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
struct MaskedAdaptationCounts {
absl::optional<int> resolution_adaptations = absl::nullopt;
@@ -240,6 +240,9 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
void set_cpu_counts(const VideoAdaptationCounters& cpu_counts);
void set_quality_counts(const VideoAdaptationCounters& quality_counts);
+ VideoAdaptationCounters cpu_counts() const;
+ VideoAdaptationCounters quality_counts() const;
+
void UpdateMaskingSettings(AdaptationSettings cpu_settings,
AdaptationSettings quality_settings);
@@ -254,52 +257,52 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
};
void SetAdaptTimer(const MaskedAdaptationCounts& counts, StatsTimer* timer)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void UpdateAdaptationStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ void UpdateAdaptationStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void TryUpdateInitialQualityResolutionAdaptUp(
absl::optional<int> old_quality_downscales,
absl::optional<int> updated_quality_downscales)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void UpdateEncoderFallbackStats(const CodecSpecificInfo* codec_info,
int pixels,
int simulcast_index)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void UpdateFallbackDisabledStats(const CodecSpecificInfo* codec_info,
int pixels,
int simulcast_index)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
Clock* const clock_;
const std::string payload_name_;
const RtpConfig rtp_config_;
const absl::optional<int> fallback_max_pixels_;
const absl::optional<int> fallback_max_pixels_disabled_;
- rtc::CriticalSection crit_;
- VideoEncoderConfig::ContentType content_type_ RTC_GUARDED_BY(crit_);
+ mutable Mutex mutex_;
+ VideoEncoderConfig::ContentType content_type_ RTC_GUARDED_BY(mutex_);
const int64_t start_ms_;
- VideoSendStream::Stats stats_ RTC_GUARDED_BY(crit_);
- std::map<uint32_t, StatsUpdateTimes> update_times_ RTC_GUARDED_BY(crit_);
- rtc::ExpFilter encode_time_ RTC_GUARDED_BY(crit_);
+ VideoSendStream::Stats stats_ RTC_GUARDED_BY(mutex_);
+ std::map<uint32_t, StatsUpdateTimes> update_times_ RTC_GUARDED_BY(mutex_);
+ rtc::ExpFilter encode_time_ RTC_GUARDED_BY(mutex_);
QualityLimitationReasonTracker quality_limitation_reason_tracker_
- RTC_GUARDED_BY(crit_);
- rtc::RateTracker media_byte_rate_tracker_ RTC_GUARDED_BY(crit_);
- rtc::RateTracker encoded_frame_rate_tracker_ RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
+ rtc::RateTracker media_byte_rate_tracker_ RTC_GUARDED_BY(mutex_);
+ rtc::RateTracker encoded_frame_rate_tracker_ RTC_GUARDED_BY(mutex_);
std::map<uint32_t, std::unique_ptr<rtc::RateTracker>>
- encoded_frame_rate_trackers_ RTC_GUARDED_BY(crit_);
+ encoded_frame_rate_trackers_ RTC_GUARDED_BY(mutex_);
- absl::optional<int64_t> last_outlier_timestamp_ RTC_GUARDED_BY(crit_);
+ absl::optional<int64_t> last_outlier_timestamp_ RTC_GUARDED_BY(mutex_);
- int last_num_spatial_layers_ RTC_GUARDED_BY(crit_);
- int last_num_simulcast_streams_ RTC_GUARDED_BY(crit_);
+ int last_num_spatial_layers_ RTC_GUARDED_BY(mutex_);
+ int last_num_simulcast_streams_ RTC_GUARDED_BY(mutex_);
std::array<bool, kMaxSpatialLayers> last_spatial_layer_use_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
// Indicates if the latest bitrate allocation had layers disabled by low
// available bandwidth.
- bool bw_limited_layers_ RTC_GUARDED_BY(crit_);
+ bool bw_limited_layers_ RTC_GUARDED_BY(mutex_);
// Indicastes if the encoder internally downscales input image.
- bool internal_encoder_scaler_ RTC_GUARDED_BY(crit_);
- Adaptations adaptations_ RTC_GUARDED_BY(crit_);
+ bool internal_encoder_scaler_ RTC_GUARDED_BY(mutex_);
+ Adaptations adaptation_limitations_ RTC_GUARDED_BY(mutex_);
struct EncoderChangeEvent {
std::string previous_encoder_implementation;
@@ -371,7 +374,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
qp_counters_; // QP counters mapped by spatial idx.
};
- std::unique_ptr<UmaSamplesContainer> uma_container_ RTC_GUARDED_BY(crit_);
+ std::unique_ptr<UmaSamplesContainer> uma_container_ RTC_GUARDED_BY(mutex_);
};
} // namespace webrtc
diff --git a/video/sv_loopback.cc b/video/sv_loopback.cc
index ec236d6e62..af475ae4eb 100644
--- a/video/sv_loopback.cc
+++ b/video/sv_loopback.cc
@@ -605,56 +605,62 @@ void Loopback() {
call_bitrate_config.max_bitrate_bps =
(ScreenshareMaxBitrateKbps() + VideoMaxBitrateKbps()) * 1000;
- VideoQualityTest::Params params, camera_params, screenshare_params;
- params.call = {absl::GetFlag(FLAGS_send_side_bwe),
- absl::GetFlag(FLAGS_generic_descriptor), call_bitrate_config,
- 0};
+ VideoQualityTest::Params params;
+ params.call.send_side_bwe = absl::GetFlag(FLAGS_send_side_bwe);
+ params.call.generic_descriptor = absl::GetFlag(FLAGS_generic_descriptor);
+ params.call.call_bitrate_config = call_bitrate_config;
params.call.dual_video = true;
- params.video[screenshare_idx] = {true,
- ScreenshareWidth(),
- ScreenshareHeight(),
- ScreenshareFps(),
- ScreenshareMinBitrateKbps() * 1000,
- ScreenshareTargetBitrateKbps() * 1000,
- ScreenshareMaxBitrateKbps() * 1000,
- false,
- Codec(),
- ScreenshareNumTemporalLayers(),
- ScreenshareSelectedTL(),
- ScreenshareMinTransmitBitrateKbps() * 1000,
- false, // ULPFEC disabled.
- false, // FlexFEC disabled.
- false, // Automatic scaling disabled
- ""};
- params.video[camera_idx] = {absl::GetFlag(FLAGS_video),
- VideoWidth(),
- VideoHeight(),
- VideoFps(),
- VideoMinBitrateKbps() * 1000,
- VideoTargetBitrateKbps() * 1000,
- VideoMaxBitrateKbps() * 1000,
- absl::GetFlag(FLAGS_suspend_below_min_bitrate),
- Codec(),
- VideoNumTemporalLayers(),
- VideoSelectedTL(),
- 0, // No min transmit bitrate.
- absl::GetFlag(FLAGS_use_ulpfec),
- absl::GetFlag(FLAGS_use_flexfec),
- false,
- VideoClip(),
- GetCaptureDevice()};
- params.audio = {absl::GetFlag(FLAGS_audio),
- absl::GetFlag(FLAGS_audio_video_sync),
- absl::GetFlag(FLAGS_audio_dtx)};
- params.logging = {RtcEventLogName(), RtpDumpName(), EncodedFramePath()};
- params.analyzer = {"dual_streams", 0.0, 0.0, DurationSecs(),
- OutputFilename(), GraphTitle()};
+ params.video[screenshare_idx].enabled = true;
+ params.video[screenshare_idx].width = ScreenshareWidth();
+ params.video[screenshare_idx].height = ScreenshareHeight();
+ params.video[screenshare_idx].fps = ScreenshareFps();
+ params.video[screenshare_idx].min_bitrate_bps =
+ ScreenshareMinBitrateKbps() * 1000;
+ params.video[screenshare_idx].target_bitrate_bps =
+ ScreenshareTargetBitrateKbps() * 1000;
+ params.video[screenshare_idx].max_bitrate_bps =
+ ScreenshareMaxBitrateKbps() * 1000;
+ params.video[screenshare_idx].codec = Codec();
+ params.video[screenshare_idx].num_temporal_layers =
+ ScreenshareNumTemporalLayers();
+ params.video[screenshare_idx].selected_tl = ScreenshareSelectedTL();
+ params.video[screenshare_idx].min_transmit_bps =
+ ScreenshareMinTransmitBitrateKbps() * 1000;
+ params.video[camera_idx].enabled = absl::GetFlag(FLAGS_video);
+ params.video[camera_idx].width = VideoWidth();
+ params.video[camera_idx].height = VideoHeight();
+ params.video[camera_idx].fps = VideoFps();
+ params.video[camera_idx].min_bitrate_bps = VideoMinBitrateKbps() * 1000;
+ params.video[camera_idx].target_bitrate_bps = VideoTargetBitrateKbps() * 1000;
+ params.video[camera_idx].max_bitrate_bps = VideoMaxBitrateKbps() * 1000;
+ params.video[camera_idx].suspend_below_min_bitrate =
+ absl::GetFlag(FLAGS_suspend_below_min_bitrate);
+ params.video[camera_idx].codec = Codec();
+ params.video[camera_idx].num_temporal_layers = VideoNumTemporalLayers();
+ params.video[camera_idx].selected_tl = VideoSelectedTL();
+ params.video[camera_idx].ulpfec = absl::GetFlag(FLAGS_use_ulpfec);
+ params.video[camera_idx].flexfec = absl::GetFlag(FLAGS_use_flexfec);
+ params.video[camera_idx].clip_path = VideoClip();
+ params.video[camera_idx].capture_device_index = GetCaptureDevice();
+ params.audio.enabled = absl::GetFlag(FLAGS_audio);
+ params.audio.sync_video = absl::GetFlag(FLAGS_audio_video_sync);
+ params.audio.dtx = absl::GetFlag(FLAGS_audio_dtx);
+ params.logging.rtc_event_log_name = RtcEventLogName();
+ params.logging.rtp_dump_name = RtpDumpName();
+ params.logging.encoded_frame_base_path = EncodedFramePath();
+ params.analyzer.test_label = "dual_streams";
+ params.analyzer.test_durations_secs = DurationSecs();
+ params.analyzer.graph_data_output_filename = OutputFilename();
+ params.analyzer.graph_title = GraphTitle();
params.config = pipe_config;
params.screenshare[camera_idx].enabled = false;
- params.screenshare[screenshare_idx] = {true, GenerateSlides(),
- SlideChangeInterval(),
- ScrollDuration(), Slides()};
+ params.screenshare[screenshare_idx].enabled = true;
+ params.screenshare[screenshare_idx].generate_slides = GenerateSlides();
+ params.screenshare[screenshare_idx].slide_change_interval =
+ SlideChangeInterval();
+ params.screenshare[screenshare_idx].scroll_duration = ScrollDuration();
+ params.screenshare[screenshare_idx].slides = Slides();
if (VideoNumStreams() > 1 && VideoStream0().empty() &&
VideoStream1().empty()) {
diff --git a/video/test/mock_video_stream_encoder.h b/video/test/mock_video_stream_encoder.h
index 8e429681b8..c9efc76598 100644
--- a/video/test/mock_video_stream_encoder.h
+++ b/video/test/mock_video_stream_encoder.h
@@ -10,6 +10,8 @@
#ifndef VIDEO_TEST_MOCK_VIDEO_STREAM_ENCODER_H_
#define VIDEO_TEST_MOCK_VIDEO_STREAM_ENCODER_H_
+#include <vector>
+
#include "api/video/video_stream_encoder_interface.h"
#include "test/gmock.h"
@@ -17,22 +19,44 @@ namespace webrtc {
class MockVideoStreamEncoder : public VideoStreamEncoderInterface {
public:
- MOCK_METHOD2(SetSource,
- void(rtc::VideoSourceInterface<VideoFrame>*,
- const DegradationPreference&));
- MOCK_METHOD2(SetSink, void(EncoderSink*, bool));
- MOCK_METHOD1(SetStartBitrate, void(int));
- MOCK_METHOD0(SendKeyFrame, void());
- MOCK_METHOD1(OnLossNotification, void(const VideoEncoder::LossNotification&));
- MOCK_METHOD6(OnBitrateUpdated,
- void(DataRate, DataRate, DataRate, uint8_t, int64_t, double));
- MOCK_METHOD1(OnFrame, void(const VideoFrame&));
- MOCK_METHOD1(SetBitrateAllocationObserver,
- void(VideoBitrateAllocationObserver*));
- MOCK_METHOD1(SetFecControllerOverride, void(FecControllerOverride*));
- MOCK_METHOD0(Stop, void());
+ MOCK_METHOD(void,
+ AddAdaptationResource,
+ (rtc::scoped_refptr<Resource>),
+ (override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<Resource>>,
+ GetAdaptationResources,
+ (),
+ (override));
+ MOCK_METHOD(void,
+ SetSource,
+ (rtc::VideoSourceInterface<VideoFrame>*,
+ const DegradationPreference&),
+ (override));
+ MOCK_METHOD(void, SetSink, (EncoderSink*, bool), (override));
+ MOCK_METHOD(void, SetStartBitrate, (int), (override));
+ MOCK_METHOD(void, SendKeyFrame, (), (override));
+ MOCK_METHOD(void,
+ OnLossNotification,
+ (const VideoEncoder::LossNotification&),
+ (override));
+ MOCK_METHOD(void,
+ OnBitrateUpdated,
+ (DataRate, DataRate, DataRate, uint8_t, int64_t, double),
+ (override));
+ MOCK_METHOD(void, OnFrame, (const VideoFrame&), (override));
+ MOCK_METHOD(void,
+ SetBitrateAllocationObserver,
+ (VideoBitrateAllocationObserver*),
+ (override));
+ MOCK_METHOD(void,
+ SetFecControllerOverride,
+ (FecControllerOverride*),
+ (override));
+ MOCK_METHOD(void, Stop, (), (override));
- MOCK_METHOD2(MockedConfigureEncoder, void(const VideoEncoderConfig&, size_t));
+ MOCK_METHOD(void,
+ MockedConfigureEncoder,
+ (const VideoEncoderConfig&, size_t));
// gtest generates implicit copy which is not allowed on VideoEncoderConfig,
// so we can't mock ConfigureEncoder directly.
void ConfigureEncoder(VideoEncoderConfig config,
diff --git a/video/video_analyzer.cc b/video/video_analyzer.cc
index f4a1c96d74..365a089302 100644
--- a/video/video_analyzer.cc
+++ b/video/video_analyzer.cc
@@ -151,7 +151,7 @@ VideoAnalyzer::VideoAnalyzer(test::LayerFilteringTransport* transport,
VideoAnalyzer::~VideoAnalyzer() {
{
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
quit_ = true;
}
for (rtc::PlatformThread* thread : comparison_thread_pool_) {
@@ -174,25 +174,25 @@ void VideoAnalyzer::SetSource(
}
void VideoAnalyzer::SetCall(Call* call) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
RTC_DCHECK(!call_);
call_ = call;
}
void VideoAnalyzer::SetSendStream(VideoSendStream* stream) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
RTC_DCHECK(!send_stream_);
send_stream_ = stream;
}
void VideoAnalyzer::SetReceiveStream(VideoReceiveStream* stream) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
RTC_DCHECK(!receive_stream_);
receive_stream_ = stream;
}
void VideoAnalyzer::SetAudioReceiveStream(AudioReceiveStream* recv_stream) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
RTC_CHECK(!audio_receive_stream_);
audio_receive_stream_ = recv_stream;
}
@@ -234,7 +234,7 @@ PacketReceiver::DeliveryStatus VideoAnalyzer::DeliverPacket(
// (FlexFEC and media are sent on different SSRCs, which have different
// timestamps spaces.)
// Also ignore packets from wrong SSRC, but include retransmits.
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
int64_t timestamp =
wrap_handler_.Unwrap(rtp_packet.Timestamp() - rtp_timestamp_delta_);
recv_times_[timestamp] = clock_->CurrentNtpInMilliseconds();
@@ -245,7 +245,7 @@ PacketReceiver::DeliveryStatus VideoAnalyzer::DeliverPacket(
}
void VideoAnalyzer::PreEncodeOnFrame(const VideoFrame& video_frame) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
if (!first_encoded_timestamp_) {
while (frames_.front().timestamp() != video_frame.timestamp()) {
++dropped_frames_before_first_encode_;
@@ -257,7 +257,7 @@ void VideoAnalyzer::PreEncodeOnFrame(const VideoFrame& video_frame) {
}
void VideoAnalyzer::PostEncodeOnFrame(size_t stream_id, uint32_t timestamp) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
if (!first_sent_timestamp_ && stream_id == selected_stream_) {
first_sent_timestamp_ = timestamp;
}
@@ -273,7 +273,7 @@ bool VideoAnalyzer::SendRtp(const uint8_t* packet,
bool result = transport_->SendRtp(packet, length, options);
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
if (rtp_timestamp_delta_ == 0 && rtp_packet.Ssrc() == ssrc_to_analyze_) {
RTC_CHECK(static_cast<bool>(first_sent_timestamp_));
rtp_timestamp_delta_ = rtp_packet.Timestamp() - *first_sent_timestamp_;
@@ -304,7 +304,7 @@ bool VideoAnalyzer::SendRtcp(const uint8_t* packet, size_t length) {
void VideoAnalyzer::OnFrame(const VideoFrame& video_frame) {
int64_t render_time_ms = clock_->CurrentNtpInMilliseconds();
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
StartExcludingCpuThreadTime();
@@ -361,7 +361,7 @@ void VideoAnalyzer::Wait() {
int frames_processed;
int frames_captured;
{
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
frames_processed = frames_processed_;
frames_captured = captured_frames_;
}
@@ -401,29 +401,29 @@ void VideoAnalyzer::Wait() {
}
void VideoAnalyzer::StartMeasuringCpuProcessTime() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
cpu_time_ -= rtc::GetProcessCpuTimeNanos();
wallclock_time_ -= rtc::SystemTimeNanos();
}
void VideoAnalyzer::StopMeasuringCpuProcessTime() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
cpu_time_ += rtc::GetProcessCpuTimeNanos();
wallclock_time_ += rtc::SystemTimeNanos();
}
void VideoAnalyzer::StartExcludingCpuThreadTime() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
cpu_time_ += rtc::GetThreadCpuTimeNanos();
}
void VideoAnalyzer::StopExcludingCpuThreadTime() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
cpu_time_ -= rtc::GetThreadCpuTimeNanos();
}
double VideoAnalyzer::GetCpuUsagePercent() {
- rtc::CritScope lock(&cpu_measurement_lock_);
+ MutexLock lock(&cpu_measurement_lock_);
return static_cast<double>(cpu_time_) / wallclock_time_ * 100.0;
}
@@ -456,7 +456,7 @@ bool VideoAnalyzer::IsInSelectedSpatialAndTemporalLayer(
}
void VideoAnalyzer::PollStats() {
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
Call::Stats call_stats = call_->GetStats();
send_bandwidth_bps_.AddSample(call_stats.send_bandwidth_bps);
@@ -564,13 +564,13 @@ bool VideoAnalyzer::CompareFrames() {
}
bool VideoAnalyzer::PopComparison(VideoAnalyzer::FrameComparison* comparison) {
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
// If AllFramesRecorded() is true, it means we have already popped
// frames_to_process_ frames from comparisons_, so there is no more work
// for this thread to be done. frames_processed_ might still be lower if
// all comparisons are not done, but those frames are currently being
// worked on by other threads.
- if (comparisons_.empty() || AllFramesRecorded())
+ if (comparisons_.empty() || AllFramesRecordedLocked())
return false;
*comparison = comparisons_.front();
@@ -581,19 +581,22 @@ bool VideoAnalyzer::PopComparison(VideoAnalyzer::FrameComparison* comparison) {
}
void VideoAnalyzer::FrameRecorded() {
- rtc::CritScope crit(&comparison_lock_);
++frames_recorded_;
}
bool VideoAnalyzer::AllFramesRecorded() {
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
+ return AllFramesRecordedLocked();
+}
+
+bool VideoAnalyzer::AllFramesRecordedLocked() {
RTC_DCHECK(frames_recorded_ <= frames_to_process_);
return frames_recorded_ == frames_to_process_ ||
(clock_->CurrentTime() > test_end_ && comparisons_.empty()) || quit_;
}
bool VideoAnalyzer::FrameProcessed() {
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
++frames_processed_;
assert(frames_processed_ <= frames_to_process_);
return frames_processed_ == frames_to_process_ ||
@@ -606,11 +609,11 @@ void VideoAnalyzer::PrintResults() {
StopMeasuringCpuProcessTime();
int dropped_frames_diff;
{
- rtc::CritScope crit(&crit_);
+ MutexLock lock(&lock_);
dropped_frames_diff = dropped_frames_before_first_encode_ +
dropped_frames_before_rendering_ + frames_.size();
}
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
PrintResult("psnr", psnr_, "dB", ImproveDirection::kBiggerIsBetter);
PrintResult("ssim", ssim_, "unitless", ImproveDirection::kBiggerIsBetter);
PrintResult("sender_time", sender_time_, "ms",
@@ -753,7 +756,7 @@ void VideoAnalyzer::PerformFrameComparison(
ssim = I420SSIM(&*comparison.reference, &*comparison.render);
}
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
if (psnr >= 0.0 && (!worst_frame_ || worst_frame_->psnr > psnr)) {
worst_frame_.emplace(FrameWithPsnr{psnr, *comparison.render});
@@ -842,7 +845,7 @@ void VideoAnalyzer::PrintResultWithExternalMean(
void VideoAnalyzer::PrintSamplesToFile() {
FILE* out = graph_data_output_file_;
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
absl::c_sort(samples_, [](const Sample& A, const Sample& B) -> bool {
return A.input_time_ms < B.input_time_ms;
});
@@ -873,14 +876,14 @@ void VideoAnalyzer::AddCapturedFrameForComparison(
const VideoFrame& video_frame) {
bool must_capture = false;
{
- rtc::CritScope lock(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
must_capture = captured_frames_ < frames_to_process_;
if (must_capture) {
++captured_frames_;
}
}
if (must_capture) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
frames_.push_back(video_frame);
}
}
@@ -903,7 +906,7 @@ void VideoAnalyzer::AddFrameComparison(const VideoFrame& reference,
if (it != encoded_frame_sizes_.end())
encoded_frame_sizes_.erase(it);
- rtc::CritScope crit(&comparison_lock_);
+ MutexLock lock(&comparison_lock_);
if (comparisons_.size() < kMaxComparisons) {
comparisons_.push_back(FrameComparison(
reference, render, dropped, reference.ntp_time_ms(), send_time_ms,
@@ -999,7 +1002,7 @@ void VideoAnalyzer::CapturedFrameForwarder::OnFrame(
copy.set_ntp_time_ms(clock_->CurrentNtpInMilliseconds());
copy.set_timestamp(copy.ntp_time_ms() * 90);
analyzer_->AddCapturedFrameForComparison(copy);
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
++captured_frames_;
if (send_stream_input_ && clock_->CurrentTime() <= test_end_ &&
captured_frames_ <= frames_to_capture_) {
@@ -1011,7 +1014,7 @@ void VideoAnalyzer::CapturedFrameForwarder::AddOrUpdateSink(
rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
RTC_DCHECK(!send_stream_input_ || send_stream_input_ == sink);
send_stream_input_ = sink;
}
@@ -1022,7 +1025,7 @@ void VideoAnalyzer::CapturedFrameForwarder::AddOrUpdateSink(
void VideoAnalyzer::CapturedFrameForwarder::RemoveSink(
rtc::VideoSinkInterface<VideoFrame>* sink) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&lock_);
RTC_DCHECK(sink == send_stream_input_);
send_stream_input_ = nullptr;
}
diff --git a/video/video_analyzer.h b/video/video_analyzer.h
index 14f77ac53c..c2401d2828 100644
--- a/video/video_analyzer.h
+++ b/video/video_analyzer.h
@@ -23,6 +23,7 @@
#include "rtc_base/event.h"
#include "rtc_base/numerics/running_statistics.h"
#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/time_utils.h"
#include "test/layer_filtering_transport.h"
#include "test/rtp_file_writer.h"
@@ -83,9 +84,9 @@ class VideoAnalyzer : public PacketReceiver,
void StartMeasuringCpuProcessTime();
void StopMeasuringCpuProcessTime();
- void StartExcludingCpuThreadTime();
- void StopExcludingCpuThreadTime();
- double GetCpuUsagePercent();
+ void StartExcludingCpuThreadTime() RTC_LOCKS_EXCLUDED(cpu_measurement_lock_);
+ void StopExcludingCpuThreadTime() RTC_LOCKS_EXCLUDED(cpu_measurement_lock_);
+ double GetCpuUsagePercent() RTC_LOCKS_EXCLUDED(cpu_measurement_lock_);
test::LayerFilteringTransport* const transport_;
PacketReceiver* receiver_;
@@ -153,22 +154,25 @@ class VideoAnalyzer : public PacketReceiver,
void SetSource(rtc::VideoSourceInterface<VideoFrame>* video_source);
private:
- void OnFrame(const VideoFrame& video_frame) override;
+ void OnFrame(const VideoFrame& video_frame)
+ RTC_LOCKS_EXCLUDED(lock_) override;
// Called when |send_stream_.SetSource()| is called.
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
- const rtc::VideoSinkWants& wants) override;
+ const rtc::VideoSinkWants& wants)
+ RTC_LOCKS_EXCLUDED(lock_) override;
// Called by |send_stream_| when |send_stream_.SetSource()| is called.
- void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink)
+ RTC_LOCKS_EXCLUDED(lock_) override;
VideoAnalyzer* const analyzer_;
- rtc::CriticalSection crit_;
+ Mutex lock_;
rtc::VideoSinkInterface<VideoFrame>* send_stream_input_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(lock_);
VideoSourceInterface<VideoFrame>* video_source_;
Clock* clock_;
- int captured_frames_ RTC_GUARDED_BY(crit_);
+ int captured_frames_ RTC_GUARDED_BY(lock_);
const int frames_to_capture_;
const Timestamp test_end_;
};
@@ -184,21 +188,23 @@ class VideoAnalyzer : public PacketReceiver,
const VideoFrame& render,
bool dropped,
int64_t render_time_ms)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
- void PollStats();
+ void PollStats() RTC_LOCKS_EXCLUDED(comparison_lock_);
static void FrameComparisonThread(void* obj);
bool CompareFrames();
bool PopComparison(FrameComparison* comparison);
// Increment counter for number of frames received for comparison.
- void FrameRecorded();
+ void FrameRecorded() RTC_EXCLUSIVE_LOCKS_REQUIRED(comparison_lock_);
// Returns true if all frames to be compared have been taken from the queue.
- bool AllFramesRecorded();
+ bool AllFramesRecorded() RTC_LOCKS_EXCLUDED(comparison_lock_);
+ bool AllFramesRecordedLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(comparison_lock_);
// Increase count of number of frames processed. Returns true if this was the
// last frame to be processed.
- bool FrameProcessed();
- void PrintResults();
- void PerformFrameComparison(const FrameComparison& comparison);
+ bool FrameProcessed() RTC_LOCKS_EXCLUDED(comparison_lock_);
+ void PrintResults() RTC_LOCKS_EXCLUDED(lock_, comparison_lock_);
+ void PerformFrameComparison(const FrameComparison& comparison)
+ RTC_LOCKS_EXCLUDED(comparison_lock_);
void PrintResult(const char* result_type,
Statistics stats,
const char* unit,
@@ -209,8 +215,9 @@ class VideoAnalyzer : public PacketReceiver,
Statistics stats,
const char* unit,
webrtc::test::ImproveDirection improve_direction);
- void PrintSamplesToFile(void);
- void AddCapturedFrameForComparison(const VideoFrame& video_frame);
+ void PrintSamplesToFile(void) RTC_LOCKS_EXCLUDED(comparison_lock_);
+ void AddCapturedFrameForComparison(const VideoFrame& video_frame)
+ RTC_LOCKS_EXCLUDED(lock_, comparison_lock_);
Call* call_;
VideoSendStream* send_stream_;
@@ -226,7 +233,7 @@ class VideoAnalyzer : public PacketReceiver,
const int selected_sl_;
const int selected_tl_;
- rtc::CriticalSection comparison_lock_;
+ Mutex comparison_lock_;
std::vector<Sample> samples_ RTC_GUARDED_BY(comparison_lock_);
Statistics sender_time_ RTC_GUARDED_BY(comparison_lock_);
Statistics receiver_time_ RTC_GUARDED_BY(comparison_lock_);
@@ -264,32 +271,33 @@ class VideoAnalyzer : public PacketReceiver,
size_t last_fec_bytes_;
- rtc::CriticalSection crit_;
+ Mutex lock_ RTC_ACQUIRED_BEFORE(comparison_lock_)
+ RTC_ACQUIRED_BEFORE(cpu_measurement_lock_);
const int frames_to_process_;
const Timestamp test_end_;
int frames_recorded_ RTC_GUARDED_BY(comparison_lock_);
int frames_processed_ RTC_GUARDED_BY(comparison_lock_);
int captured_frames_ RTC_GUARDED_BY(comparison_lock_);
int dropped_frames_ RTC_GUARDED_BY(comparison_lock_);
- int dropped_frames_before_first_encode_ RTC_GUARDED_BY(crit_);
- int dropped_frames_before_rendering_ RTC_GUARDED_BY(crit_);
+ int dropped_frames_before_first_encode_ RTC_GUARDED_BY(lock_);
+ int dropped_frames_before_rendering_ RTC_GUARDED_BY(lock_);
int64_t last_render_time_ RTC_GUARDED_BY(comparison_lock_);
int64_t last_render_delta_ms_ RTC_GUARDED_BY(comparison_lock_);
int64_t last_unfreeze_time_ms_ RTC_GUARDED_BY(comparison_lock_);
- uint32_t rtp_timestamp_delta_ RTC_GUARDED_BY(crit_);
+ uint32_t rtp_timestamp_delta_ RTC_GUARDED_BY(lock_);
- rtc::CriticalSection cpu_measurement_lock_;
+ Mutex cpu_measurement_lock_;
int64_t cpu_time_ RTC_GUARDED_BY(cpu_measurement_lock_);
int64_t wallclock_time_ RTC_GUARDED_BY(cpu_measurement_lock_);
- std::deque<VideoFrame> frames_ RTC_GUARDED_BY(crit_);
- absl::optional<VideoFrame> last_rendered_frame_ RTC_GUARDED_BY(crit_);
- rtc::TimestampWrapAroundHandler wrap_handler_ RTC_GUARDED_BY(crit_);
- std::map<int64_t, int64_t> send_times_ RTC_GUARDED_BY(crit_);
- std::map<int64_t, int64_t> recv_times_ RTC_GUARDED_BY(crit_);
- std::map<int64_t, size_t> encoded_frame_sizes_ RTC_GUARDED_BY(crit_);
- absl::optional<uint32_t> first_encoded_timestamp_ RTC_GUARDED_BY(crit_);
- absl::optional<uint32_t> first_sent_timestamp_ RTC_GUARDED_BY(crit_);
+ std::deque<VideoFrame> frames_ RTC_GUARDED_BY(lock_);
+ absl::optional<VideoFrame> last_rendered_frame_ RTC_GUARDED_BY(lock_);
+ rtc::TimestampWrapAroundHandler wrap_handler_ RTC_GUARDED_BY(lock_);
+ std::map<int64_t, int64_t> send_times_ RTC_GUARDED_BY(lock_);
+ std::map<int64_t, int64_t> recv_times_ RTC_GUARDED_BY(lock_);
+ std::map<int64_t, size_t> encoded_frame_sizes_ RTC_GUARDED_BY(lock_);
+ absl::optional<uint32_t> first_encoded_timestamp_ RTC_GUARDED_BY(lock_);
+ absl::optional<uint32_t> first_sent_timestamp_ RTC_GUARDED_BY(lock_);
const double avg_psnr_threshold_;
const double avg_ssim_threshold_;
bool is_quick_test_enabled_;
diff --git a/video/video_loopback.cc b/video/video_loopback.cc
index 99ff4493bd..7762d9653d 100644
--- a/video/video_loopback.cc
+++ b/video/video_loopback.cc
@@ -376,33 +376,40 @@ void Loopback() {
call_bitrate_config.max_bitrate_bps = -1; // Don't cap bandwidth estimate.
VideoQualityTest::Params params;
- params.call = {absl::GetFlag(FLAGS_send_side_bwe),
- absl::GetFlag(FLAGS_generic_descriptor), call_bitrate_config,
- 0};
- params.video[0] = {absl::GetFlag(FLAGS_video),
- Width(),
- Height(),
- Fps(),
- MinBitrateKbps() * 1000,
- TargetBitrateKbps() * 1000,
- MaxBitrateKbps() * 1000,
- absl::GetFlag(FLAGS_suspend_below_min_bitrate),
- Codec(),
- NumTemporalLayers(),
- SelectedTL(),
- 0, // No min transmit bitrate.
- absl::GetFlag(FLAGS_use_ulpfec),
- absl::GetFlag(FLAGS_use_flexfec),
- NumStreams() < 2, // Automatic quality scaling.
- Clip(),
- GetCaptureDevice()};
- params.audio = {
- absl::GetFlag(FLAGS_audio), absl::GetFlag(FLAGS_audio_video_sync),
- absl::GetFlag(FLAGS_audio_dtx), absl::GetFlag(FLAGS_use_real_adm)};
- params.logging = {RtcEventLogName(), RtpDumpName(), EncodedFramePath()};
+ params.call.send_side_bwe = absl::GetFlag(FLAGS_send_side_bwe);
+ params.call.generic_descriptor = absl::GetFlag(FLAGS_generic_descriptor);
+ params.call.call_bitrate_config = call_bitrate_config;
+
+ params.video[0].enabled = absl::GetFlag(FLAGS_video);
+ params.video[0].width = Width();
+ params.video[0].height = Height();
+ params.video[0].fps = Fps();
+ params.video[0].min_bitrate_bps = MinBitrateKbps() * 1000;
+ params.video[0].target_bitrate_bps = TargetBitrateKbps() * 1000;
+ params.video[0].max_bitrate_bps = MaxBitrateKbps() * 1000;
+ params.video[0].suspend_below_min_bitrate =
+ absl::GetFlag(FLAGS_suspend_below_min_bitrate);
+ params.video[0].codec = Codec();
+ params.video[0].num_temporal_layers = NumTemporalLayers();
+ params.video[0].selected_tl = SelectedTL();
+ params.video[0].min_transmit_bps = 0;
+ params.video[0].ulpfec = absl::GetFlag(FLAGS_use_ulpfec);
+ params.video[0].flexfec = absl::GetFlag(FLAGS_use_flexfec);
+ params.video[0].automatic_scaling = NumStreams() < 2;
+ params.video[0].clip_path = Clip();
+ params.video[0].capture_device_index = GetCaptureDevice();
+ params.audio.enabled = absl::GetFlag(FLAGS_audio);
+ params.audio.sync_video = absl::GetFlag(FLAGS_audio_video_sync);
+ params.audio.dtx = absl::GetFlag(FLAGS_audio_dtx);
+ params.audio.use_real_adm = absl::GetFlag(FLAGS_use_real_adm);
+ params.logging.rtc_event_log_name = RtcEventLogName();
+ params.logging.rtp_dump_name = RtpDumpName();
+ params.logging.encoded_frame_base_path = EncodedFramePath();
params.screenshare[0].enabled = false;
- params.analyzer = {"video", 0.0, 0.0, DurationSecs(),
- OutputFilename(), GraphTitle()};
+ params.analyzer.test_label = "video";
+ params.analyzer.test_durations_secs = DurationSecs();
+ params.analyzer.graph_data_output_filename = OutputFilename();
+ params.analyzer.graph_title = GraphTitle();
params.config = pipe_config;
if (NumStreams() > 1 && Stream0().empty() && Stream1().empty()) {
diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc
index 94ce268fa9..d2f3f9b5b8 100644
--- a/video/video_quality_test.cc
+++ b/video/video_quality_test.cc
@@ -434,58 +434,6 @@ VideoQualityTest::VideoQualityTest(
std::move(injection_components_->network_controller_factory);
}
-VideoQualityTest::Params::Params()
- : call({false, false, BitrateConstraints(), 0}),
- video{{false,
- 640,
- 480,
- 30,
- 50,
- 800,
- 800,
- false,
- "VP8",
- 1,
- -1,
- 0,
- false,
- false,
- false,
- "",
- 0,
- {},
- 0.0},
- {false,
- 640,
- 480,
- 30,
- 50,
- 800,
- 800,
- false,
- "VP8",
- 1,
- -1,
- 0,
- false,
- false,
- false,
- "",
- 0,
- {},
- 0.0}},
- audio({false, false, false, false}),
- screenshare{{false, false, 10, 0}, {false, false, 10, 0}},
- analyzer({"", 0.0, 0.0, 0, "", ""}),
- config(absl::nullopt),
- ss{{std::vector<VideoStream>(), 0, 0, -1, InterLayerPredMode::kOn,
- std::vector<SpatialLayer>()},
- {std::vector<VideoStream>(), 0, 0, -1, InterLayerPredMode::kOn,
- std::vector<SpatialLayer>()}},
- logging({"", "", ""}) {}
-
-VideoQualityTest::Params::~Params() = default;
-
VideoQualityTest::InjectionComponents::InjectionComponents() = default;
VideoQualityTest::InjectionComponents::~InjectionComponents() = default;
@@ -815,11 +763,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
}
if (params_.call.generic_descriptor) {
- // The generic descriptor is currently behind a field trial, so it needs
- // to be set for this flag to have any effect.
- // TODO(philipel): Remove this check when the experiment is removed.
- RTC_CHECK(field_trial::IsEnabled("WebRTC-GenericDescriptor"));
-
video_send_configs_[video_idx].rtp.extensions.emplace_back(
RtpExtension::kGenericFrameDescriptorUri00,
kGenericFrameDescriptorExtensionId00);
diff --git a/video/video_receive_stream.cc b/video/video_receive_stream.cc
index f1b3fc7b5b..b4c6ddf10f 100644
--- a/video/video_receive_stream.cc
+++ b/video/video_receive_stream.cc
@@ -494,7 +494,7 @@ bool VideoReceiveStream::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
return false;
}
- rtc::CritScope cs(&playout_delay_lock_);
+ MutexLock lock(&playout_delay_lock_);
base_minimum_playout_delay_ms_ = delay_ms;
UpdatePlayoutDelays();
return true;
@@ -503,7 +503,7 @@ bool VideoReceiveStream::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
int VideoReceiveStream::GetBaseMinimumPlayoutDelayMs() const {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
- rtc::CritScope cs(&playout_delay_lock_);
+ MutexLock lock(&playout_delay_lock_);
return base_minimum_playout_delay_ms_;
}
@@ -566,13 +566,13 @@ void VideoReceiveStream::OnCompleteFrame(
const PlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_;
if (playout_delay.min_ms >= 0) {
- rtc::CritScope cs(&playout_delay_lock_);
+ MutexLock lock(&playout_delay_lock_);
frame_minimum_playout_delay_ms_ = playout_delay.min_ms;
UpdatePlayoutDelays();
}
if (playout_delay.max_ms >= 0) {
- rtc::CritScope cs(&playout_delay_lock_);
+ MutexLock lock(&playout_delay_lock_);
frame_maximum_playout_delay_ms_ = playout_delay.max_ms;
UpdatePlayoutDelays();
}
@@ -619,7 +619,7 @@ void VideoReceiveStream::SetEstimatedPlayoutNtpTimestampMs(
void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) {
RTC_DCHECK_RUN_ON(&module_process_sequence_checker_);
- rtc::CritScope cs(&playout_delay_lock_);
+ MutexLock lock(&playout_delay_lock_);
syncable_minimum_playout_delay_ms_ = delay_ms;
UpdatePlayoutDelays();
}
diff --git a/video/video_receive_stream.h b/video/video_receive_stream.h
index c1ebf2b600..57329f4927 100644
--- a/video/video_receive_stream.h
+++ b/video/video_receive_stream.h
@@ -15,7 +15,6 @@
#include <vector>
#include "api/task_queue/task_queue_factory.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/video/recordable_encoded_frame.h"
#include "call/rtp_packet_sink_interface.h"
#include "call/syncable.h"
@@ -24,6 +23,7 @@
#include "modules/rtp_rtcp/source/source_tracker.h"
#include "modules/video_coding/frame_buffer2.h"
#include "modules/video_coding/video_receiver2.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
#include "system_wrappers/include/clock.h"
@@ -206,7 +206,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
const int max_wait_for_keyframe_ms_;
const int max_wait_for_frame_ms_;
- rtc::CriticalSection playout_delay_lock_;
+ mutable Mutex playout_delay_lock_;
// All of them tries to change current min_playout_delay on |timing_| but
// source of the change request is different in each case. Among them the
diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc
index 510c2602c4..9413b72354 100644
--- a/video/video_receive_stream2.cc
+++ b/video/video_receive_stream2.cc
@@ -49,7 +49,7 @@
#include "system_wrappers/include/field_trial.h"
#include "video/call_stats2.h"
#include "video/frame_dumping_decoder.h"
-#include "video/receive_statistics_proxy.h"
+#include "video/receive_statistics_proxy2.h"
namespace webrtc {
@@ -201,7 +201,8 @@ VideoReceiveStream2::VideoReceiveStream2(
rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
timing_(timing),
video_receiver_(clock_, timing_.get()),
- rtp_video_stream_receiver_(clock_,
+ rtp_video_stream_receiver_(worker_thread_,
+ clock_,
&transport_adapter_,
call_stats->AsRtcpRttStats(),
packet_router,
@@ -232,7 +233,6 @@ VideoReceiveStream2::VideoReceiveStream2(
RTC_DCHECK(call_stats_);
module_process_sequence_checker_.Detach();
- network_sequence_checker_.Detach();
RTC_DCHECK(!config_.decoders.empty());
std::set<int> decoder_payload_types;
@@ -269,7 +269,6 @@ VideoReceiveStream2::~VideoReceiveStream2() {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
RTC_LOG(LS_INFO) << "~VideoReceiveStream2: " << config_.ToString();
Stop();
- task_safety_flag_->SetNotAlive();
}
void VideoReceiveStream2::SignalNetworkState(NetworkState state) {
@@ -473,8 +472,6 @@ bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
return false;
}
- // TODO(bugs.webrtc.org/11489): Consider posting to worker.
- rtc::CritScope cs(&playout_delay_lock_);
base_minimum_playout_delay_ms_ = delay_ms;
UpdatePlayoutDelays();
return true;
@@ -482,8 +479,6 @@ bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
-
- rtc::CritScope cs(&playout_delay_lock_);
return base_minimum_playout_delay_ms_;
}
@@ -491,7 +486,7 @@ void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) {
VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime());
worker_thread_->PostTask(
- ToQueuedTask(task_safety_flag_, [frame_meta, this]() {
+ ToQueuedTask(task_safety_, [frame_meta, this]() {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
int64_t video_playout_ntp_ms;
int64_t sync_offset_ms;
@@ -523,18 +518,26 @@ void VideoReceiveStream2::SetDepacketizerToDecoderFrameTransformer(
void VideoReceiveStream2::SendNack(
const std::vector<uint16_t>& sequence_numbers,
bool buffering_allowed) {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
RTC_DCHECK(buffering_allowed);
rtp_video_stream_receiver_.RequestPacketRetransmit(sequence_numbers);
}
void VideoReceiveStream2::RequestKeyFrame(int64_t timestamp_ms) {
+ // Running on worker_sequence_checker_.
+ // Called from RtpVideoStreamReceiver (rtp_video_stream_receiver_ is
+ // ultimately responsible).
rtp_video_stream_receiver_.RequestKeyFrame();
- last_keyframe_request_ms_ = timestamp_ms;
+ decode_queue_.PostTask([this, timestamp_ms]() {
+ RTC_DCHECK_RUN_ON(&decode_queue_);
+ last_keyframe_request_ms_ = timestamp_ms;
+ });
}
void VideoReceiveStream2::OnCompleteFrame(
std::unique_ptr<video_coding::EncodedFrame> frame) {
- RTC_DCHECK_RUN_ON(&network_sequence_checker_);
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+
// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround.
int64_t time_now_ms = clock_->TimeInMilliseconds();
if (last_complete_frame_time_ms_ > 0 &&
@@ -543,19 +546,13 @@ void VideoReceiveStream2::OnCompleteFrame(
}
last_complete_frame_time_ms_ = time_now_ms;
- // TODO(bugs.webrtc.org/11489): We grab the playout_delay_lock_ lock
- // potentially twice. Consider checking both min/max and posting to worker if
- // there's a change. If we always update playout delays on the worker, we
- // don't need a lock.
const PlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_;
if (playout_delay.min_ms >= 0) {
- rtc::CritScope cs(&playout_delay_lock_);
frame_minimum_playout_delay_ms_ = playout_delay.min_ms;
UpdatePlayoutDelays();
}
if (playout_delay.max_ms >= 0) {
- rtc::CritScope cs(&playout_delay_lock_);
frame_maximum_playout_delay_ms_ = playout_delay.max_ms;
UpdatePlayoutDelays();
}
@@ -603,22 +600,20 @@ void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs(
void VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
- // TODO(bugs.webrtc.org/11489): See if we can't get rid of the
- // |playout_delay_lock_|
- rtc::CritScope cs(&playout_delay_lock_);
syncable_minimum_playout_delay_ms_ = delay_ms;
UpdatePlayoutDelays();
}
-int64_t VideoReceiveStream2::GetWaitMs() const {
+int64_t VideoReceiveStream2::GetMaxWaitMs() const {
return keyframe_required_ ? max_wait_for_keyframe_ms_
: max_wait_for_frame_ms_;
}
void VideoReceiveStream2::StartNextDecode() {
+ // Running on the decode thread.
TRACE_EVENT0("webrtc", "VideoReceiveStream2::StartNextDecode");
frame_buffer_->NextFrame(
- GetWaitMs(), keyframe_required_, &decode_queue_,
+ GetMaxWaitMs(), keyframe_required_, &decode_queue_,
/* encoded frame handler */
[this](std::unique_ptr<EncodedFrame> frame, ReturnReason res) {
RTC_DCHECK_EQ(frame == nullptr, res == ReturnReason::kTimeout);
@@ -630,7 +625,12 @@ void VideoReceiveStream2::StartNextDecode() {
if (frame) {
HandleEncodedFrame(std::move(frame));
} else {
- HandleFrameBufferTimeout();
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ worker_thread_->PostTask(ToQueuedTask(
+ task_safety_, [this, now_ms, wait_ms = GetMaxWaitMs()]() {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ HandleFrameBufferTimeout(now_ms, wait_ms);
+ }));
}
StartNextDecode();
});
@@ -650,25 +650,48 @@ void VideoReceiveStream2::HandleEncodedFrame(
}
}
stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp);
- HandleKeyFrameGeneration(frame->FrameType() == VideoFrameType::kVideoFrameKey,
- now_ms);
+
+ bool force_request_key_frame = false;
+ int64_t decoded_frame_picture_id = -1;
+
+ const bool keyframe_request_is_due =
+ now_ms >= (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_);
+
int decode_result = video_receiver_.Decode(frame.get());
if (decode_result == WEBRTC_VIDEO_CODEC_OK ||
decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) {
keyframe_required_ = false;
frame_decoded_ = true;
- rtp_video_stream_receiver_.FrameDecoded(frame->id.picture_id);
+
+ decoded_frame_picture_id = frame->id.picture_id;
if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME)
- RequestKeyFrame(now_ms);
+ force_request_key_frame = true;
} else if (!frame_decoded_ || !keyframe_required_ ||
- (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ < now_ms)) {
+ keyframe_request_is_due) {
keyframe_required_ = true;
// TODO(philipel): Remove this keyframe request when downstream project
// has been fixed.
- RequestKeyFrame(now_ms);
+ force_request_key_frame = true;
}
+ bool received_frame_is_keyframe =
+ frame->FrameType() == VideoFrameType::kVideoFrameKey;
+
+ worker_thread_->PostTask(ToQueuedTask(
+ task_safety_,
+ [this, now_ms, received_frame_is_keyframe, force_request_key_frame,
+ decoded_frame_picture_id, keyframe_request_is_due]() {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+
+ if (decoded_frame_picture_id != -1)
+ rtp_video_stream_receiver_.FrameDecoded(decoded_frame_picture_id);
+
+ HandleKeyFrameGeneration(received_frame_is_keyframe, now_ms,
+ force_request_key_frame,
+ keyframe_request_is_due);
+ }));
+
if (encoded_frame_buffer_function_) {
frame->Retain();
encoded_frame_buffer_function_(WebRtcRecordableEncodedFrame(*frame));
@@ -677,48 +700,58 @@ void VideoReceiveStream2::HandleEncodedFrame(
void VideoReceiveStream2::HandleKeyFrameGeneration(
bool received_frame_is_keyframe,
- int64_t now_ms) {
+ int64_t now_ms,
+ bool always_request_key_frame,
+ bool keyframe_request_is_due) {
+ // Running on worker_sequence_checker_.
+
+ bool request_key_frame = always_request_key_frame;
+
// Repeat sending keyframe requests if we've requested a keyframe.
- if (!keyframe_generation_requested_) {
- return;
- }
- if (received_frame_is_keyframe) {
- keyframe_generation_requested_ = false;
- } else if (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ <= now_ms) {
- if (!IsReceivingKeyFrame(now_ms)) {
- RequestKeyFrame(now_ms);
+ if (keyframe_generation_requested_) {
+ if (received_frame_is_keyframe) {
+ keyframe_generation_requested_ = false;
+ } else if (keyframe_request_is_due) {
+ if (!IsReceivingKeyFrame(now_ms)) {
+ request_key_frame = true;
+ }
+ } else {
+ // It hasn't been long enough since the last keyframe request, do nothing.
}
- } else {
- // It hasn't been long enough since the last keyframe request, do nothing.
+ }
+
+ if (request_key_frame) {
+ // HandleKeyFrameGeneration is initated from the decode thread -
+ // RequestKeyFrame() triggers a call back to the decode thread.
+ // Perhaps there's a way to avoid that.
+ RequestKeyFrame(now_ms);
}
}
-void VideoReceiveStream2::HandleFrameBufferTimeout() {
- // Running on |decode_queue_|.
- int64_t now_ms = clock_->TimeInMilliseconds();
+void VideoReceiveStream2::HandleFrameBufferTimeout(int64_t now_ms,
+ int64_t wait_ms) {
+ // Running on |worker_sequence_checker_|.
absl::optional<int64_t> last_packet_ms =
rtp_video_stream_receiver_.LastReceivedPacketMs();
// To avoid spamming keyframe requests for a stream that is not active we
// check if we have received a packet within the last 5 seconds.
- bool stream_is_active = last_packet_ms && now_ms - *last_packet_ms < 5000;
- if (!stream_is_active) {
- worker_thread_->PostTask(ToQueuedTask(task_safety_flag_, [this]() {
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
- stats_proxy_.OnStreamInactive();
- }));
- }
+ const bool stream_is_active =
+ last_packet_ms && now_ms - *last_packet_ms < 5000;
+ if (!stream_is_active)
+ stats_proxy_.OnStreamInactive();
if (stream_is_active && !IsReceivingKeyFrame(now_ms) &&
(!config_.crypto_options.sframe.require_frame_encryption ||
rtp_video_stream_receiver_.IsDecryptable())) {
- RTC_LOG(LS_WARNING) << "No decodable frame in " << GetWaitMs()
+ RTC_LOG(LS_WARNING) << "No decodable frame in " << wait_ms
<< " ms, requesting keyframe.";
RequestKeyFrame(now_ms);
}
}
bool VideoReceiveStream2::IsReceivingKeyFrame(int64_t timestamp_ms) const {
+ // Running on worker_sequence_checker_.
absl::optional<int64_t> last_keyframe_packet_ms =
rtp_video_stream_receiver_.LastReceivedKeyframePacketMs();
@@ -731,6 +764,7 @@ bool VideoReceiveStream2::IsReceivingKeyFrame(int64_t timestamp_ms) const {
}
void VideoReceiveStream2::UpdatePlayoutDelays() const {
+ // Running on worker_sequence_checker_.
const int minimum_delay_ms =
std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_,
syncable_minimum_playout_delay_ms_});
@@ -753,36 +787,43 @@ VideoReceiveStream2::SetAndGetRecordingState(RecordingState state,
bool generate_key_frame) {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
rtc::Event event;
+
+ // Save old state, set the new state.
RecordingState old_state;
- decode_queue_.PostTask([this, &event, &old_state, generate_key_frame,
- state = std::move(state)] {
- RTC_DCHECK_RUN_ON(&decode_queue_);
- // Save old state.
- old_state.callback = std::move(encoded_frame_buffer_function_);
- old_state.keyframe_needed = keyframe_generation_requested_;
- old_state.last_keyframe_request_ms = last_keyframe_request_ms_;
-
- // Set new state.
- encoded_frame_buffer_function_ = std::move(state.callback);
- if (generate_key_frame) {
- RequestKeyFrame(clock_->TimeInMilliseconds());
- keyframe_generation_requested_ = true;
- } else {
- keyframe_generation_requested_ = state.keyframe_needed;
- last_keyframe_request_ms_ = state.last_keyframe_request_ms.value_or(0);
- }
- event.Set();
- });
+
+ decode_queue_.PostTask(
+ [this, &event, &old_state, callback = std::move(state.callback),
+ generate_key_frame,
+ last_keyframe_request = state.last_keyframe_request_ms.value_or(0)] {
+ RTC_DCHECK_RUN_ON(&decode_queue_);
+ old_state.callback = std::move(encoded_frame_buffer_function_);
+ encoded_frame_buffer_function_ = std::move(callback);
+
+ old_state.last_keyframe_request_ms = last_keyframe_request_ms_;
+ last_keyframe_request_ms_ = generate_key_frame
+ ? clock_->TimeInMilliseconds()
+ : last_keyframe_request;
+
+ event.Set();
+ });
+
+ old_state.keyframe_needed = keyframe_generation_requested_;
+
+ if (generate_key_frame) {
+ rtp_video_stream_receiver_.RequestKeyFrame();
+ keyframe_generation_requested_ = true;
+ } else {
+ keyframe_generation_requested_ = state.keyframe_needed;
+ }
+
event.Wait(rtc::Event::kForever);
return old_state;
}
void VideoReceiveStream2::GenerateKeyFrame() {
- decode_queue_.PostTask([this]() {
- RTC_DCHECK_RUN_ON(&decode_queue_);
- RequestKeyFrame(clock_->TimeInMilliseconds());
- keyframe_generation_requested_ = true;
- });
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ RequestKeyFrame(clock_->TimeInMilliseconds());
+ keyframe_generation_requested_ = true;
}
} // namespace internal
diff --git a/video/video_receive_stream2.h b/video/video_receive_stream2.h
index bbed08a7a6..71b336e587 100644
--- a/video/video_receive_stream2.h
+++ b/video/video_receive_stream2.h
@@ -15,7 +15,6 @@
#include <vector>
#include "api/task_queue/task_queue_factory.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/units/timestamp.h"
#include "api/video/recordable_encoded_frame.h"
#include "call/rtp_packet_sink_interface.h"
@@ -27,10 +26,11 @@
#include "modules/video_coding/video_receiver2.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "system_wrappers/include/clock.h"
#include "video/receive_statistics_proxy2.h"
#include "video/rtp_streams_synchronizer2.h"
-#include "video/rtp_video_stream_receiver.h"
+#include "video/rtp_video_stream_receiver2.h"
#include "video/transport_adapter.h"
#include "video/video_stream_decoder2.h"
@@ -158,24 +158,28 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
void GenerateKeyFrame() override;
private:
- int64_t GetWaitMs() const;
+ int64_t GetMaxWaitMs() const RTC_RUN_ON(decode_queue_);
void StartNextDecode() RTC_RUN_ON(decode_queue_);
void HandleEncodedFrame(std::unique_ptr<video_coding::EncodedFrame> frame)
RTC_RUN_ON(decode_queue_);
- void HandleFrameBufferTimeout() RTC_RUN_ON(decode_queue_);
+ void HandleFrameBufferTimeout(int64_t now_ms, int64_t wait_ms)
+ RTC_RUN_ON(worker_sequence_checker_);
void UpdatePlayoutDelays() const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(playout_delay_lock_);
- void RequestKeyFrame(int64_t timestamp_ms) RTC_RUN_ON(decode_queue_);
- void HandleKeyFrameGeneration(bool received_frame_is_keyframe, int64_t now_ms)
- RTC_RUN_ON(decode_queue_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_sequence_checker_);
+ void RequestKeyFrame(int64_t timestamp_ms)
+ RTC_RUN_ON(worker_sequence_checker_);
+ void HandleKeyFrameGeneration(bool received_frame_is_keyframe,
+ int64_t now_ms,
+ bool always_request_key_frame,
+ bool keyframe_request_is_due)
+ RTC_RUN_ON(worker_sequence_checker_);
bool IsReceivingKeyFrame(int64_t timestamp_ms) const
- RTC_RUN_ON(decode_queue_);
+ RTC_RUN_ON(worker_sequence_checker_);
void UpdateHistograms();
SequenceChecker worker_sequence_checker_;
SequenceChecker module_process_sequence_checker_;
- SequenceChecker network_sequence_checker_;
TaskQueueFactory* const task_queue_factory_;
@@ -199,7 +203,7 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
std::unique_ptr<VCMTiming> timing_; // Jitter buffer experiment.
VideoReceiver2 video_receiver_;
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> incoming_video_stream_;
- RtpVideoStreamReceiver rtp_video_stream_receiver_;
+ RtpVideoStreamReceiver2 rtp_video_stream_receiver_;
std::unique_ptr<VideoStreamDecoder> video_stream_decoder_;
RtpStreamsSynchronizer rtp_stream_sync_;
@@ -216,47 +220,49 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
// Whenever we are in an undecodable state (stream has just started or due to
// a decoding error) we require a keyframe to restart the stream.
- bool keyframe_required_ = true;
+ bool keyframe_required_ RTC_GUARDED_BY(decode_queue_) = true;
// If we have successfully decoded any frame.
- bool frame_decoded_ = false;
+ bool frame_decoded_ RTC_GUARDED_BY(decode_queue_) = false;
- int64_t last_keyframe_request_ms_ = 0;
- int64_t last_complete_frame_time_ms_ = 0;
+ int64_t last_keyframe_request_ms_ RTC_GUARDED_BY(decode_queue_) = 0;
+ int64_t last_complete_frame_time_ms_
+ RTC_GUARDED_BY(worker_sequence_checker_) = 0;
// Keyframe request intervals are configurable through field trials.
const int max_wait_for_keyframe_ms_;
const int max_wait_for_frame_ms_;
- rtc::CriticalSection playout_delay_lock_;
-
// All of them tries to change current min_playout_delay on |timing_| but
// source of the change request is different in each case. Among them the
// biggest delay is used. -1 means use default value from the |timing_|.
//
// Minimum delay as decided by the RTP playout delay extension.
- int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
+ int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) =
+ -1;
// Minimum delay as decided by the setLatency function in "webrtc/api".
- int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
- // Minimum delay as decided by the A/V synchronization feature.
- int syncable_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) =
+ int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) =
-1;
+ // Minimum delay as decided by the A/V synchronization feature.
+ int syncable_minimum_playout_delay_ms_
+ RTC_GUARDED_BY(worker_sequence_checker_) = -1;
// Maximum delay as decided by the RTP playout delay extension.
- int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
+ int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) =
+ -1;
// Function that is triggered with encoded frames, if not empty.
std::function<void(const RecordableEncodedFrame&)>
encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_queue_);
// Set to true while we're requesting keyframes but not yet received one.
- bool keyframe_generation_requested_ RTC_GUARDED_BY(decode_queue_) = false;
+ bool keyframe_generation_requested_ RTC_GUARDED_BY(worker_sequence_checker_) =
+ false;
// Defined last so they are destroyed before all other members.
rtc::TaskQueue decode_queue_;
// Used to signal destruction to potentially pending tasks.
- PendingTaskSafetyFlag::Pointer task_safety_flag_ =
- PendingTaskSafetyFlag::Create();
+ ScopedTaskSafety task_safety_;
};
} // namespace internal
} // namespace webrtc
diff --git a/video/video_receive_stream2_unittest.cc b/video/video_receive_stream2_unittest.cc
new file mode 100644
index 0000000000..17dc4de86b
--- /dev/null
+++ b/video/video_receive_stream2_unittest.cc
@@ -0,0 +1,570 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/video_receive_stream2.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/test/video/function_video_decoder_factory.h"
+#include "api/video_codecs/video_decoder.h"
+#include "call/rtp_stream_receiver_controller.h"
+#include "common_video/test/utilities.h"
+#include "media/base/fake_video_renderer.h"
+#include "modules/pacing/packet_router.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/utility/include/process_thread.h"
+#include "modules/video_coding/encoded_frame.h"
+#include "rtc_base/event.h"
+#include "system_wrappers/include/clock.h"
+#include "test/fake_decoder.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/run_loop.h"
+#include "test/time_controller/simulated_time_controller.h"
+#include "test/video_decoder_proxy_factory.h"
+#include "video/call_stats2.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::_;
+using ::testing::ElementsAreArray;
+using ::testing::Invoke;
+using ::testing::IsEmpty;
+using ::testing::SizeIs;
+
+constexpr int kDefaultTimeOutMs = 50;
+
+class MockTransport : public Transport {
+ public:
+ MOCK_METHOD(bool,
+ SendRtp,
+ (const uint8_t*, size_t length, const PacketOptions& options),
+ (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
+};
+
+class MockVideoDecoder : public VideoDecoder {
+ public:
+ MOCK_METHOD(int32_t,
+ InitDecode,
+ (const VideoCodec*, int32_t number_of_cores),
+ (override));
+ MOCK_METHOD(int32_t,
+ Decode,
+ (const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms),
+ (override));
+ MOCK_METHOD(int32_t,
+ RegisterDecodeCompleteCallback,
+ (DecodedImageCallback*),
+ (override));
+ MOCK_METHOD(int32_t, Release, (), (override));
+ const char* ImplementationName() const { return "MockVideoDecoder"; }
+};
+
+class FrameObjectFake : public video_coding::EncodedFrame {
+ public:
+ void SetPayloadType(uint8_t payload_type) { _payloadType = payload_type; }
+
+ void SetRotation(const VideoRotation& rotation) { rotation_ = rotation; }
+
+ void SetNtpTime(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; }
+
+ int64_t ReceivedTime() const override { return 0; }
+
+ int64_t RenderTime() const override { return _renderTimeMs; }
+};
+
+} // namespace
+
+class VideoReceiveStream2Test : public ::testing::Test {
+ public:
+ VideoReceiveStream2Test()
+ : process_thread_(ProcessThread::Create("TestThread")),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ config_(&mock_transport_),
+ call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()),
+ h264_decoder_factory_(&mock_h264_video_decoder_),
+ null_decoder_factory_(&mock_null_video_decoder_) {}
+
+ void SetUp() {
+ constexpr int kDefaultNumCpuCores = 2;
+ config_.rtp.remote_ssrc = 1111;
+ config_.rtp.local_ssrc = 2222;
+ config_.renderer = &fake_renderer_;
+ VideoReceiveStream::Decoder h264_decoder;
+ h264_decoder.payload_type = 99;
+ h264_decoder.video_format = SdpVideoFormat("H264");
+ h264_decoder.video_format.parameters.insert(
+ {"sprop-parameter-sets", "Z0IACpZTBYmI,aMljiA=="});
+ h264_decoder.decoder_factory = &h264_decoder_factory_;
+ config_.decoders.push_back(h264_decoder);
+ VideoReceiveStream::Decoder null_decoder;
+ null_decoder.payload_type = 98;
+ null_decoder.video_format = SdpVideoFormat("null");
+ null_decoder.decoder_factory = &null_decoder_factory_;
+ config_.decoders.push_back(null_decoder);
+
+ clock_ = Clock::GetRealTimeClock();
+ timing_ = new VCMTiming(clock_);
+
+ video_receive_stream_ =
+ std::make_unique<webrtc::internal::VideoReceiveStream2>(
+ task_queue_factory_.get(), loop_.task_queue(),
+ &rtp_stream_receiver_controller_, kDefaultNumCpuCores,
+ &packet_router_, config_.Copy(), process_thread_.get(),
+ &call_stats_, clock_, timing_);
+ }
+
+ protected:
+ test::RunLoop loop_;
+ std::unique_ptr<ProcessThread> process_thread_;
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ VideoReceiveStream::Config config_;
+ internal::CallStats call_stats_;
+ MockVideoDecoder mock_h264_video_decoder_;
+ MockVideoDecoder mock_null_video_decoder_;
+ test::VideoDecoderProxyFactory h264_decoder_factory_;
+ test::VideoDecoderProxyFactory null_decoder_factory_;
+ cricket::FakeVideoRenderer fake_renderer_;
+ MockTransport mock_transport_;
+ PacketRouter packet_router_;
+ RtpStreamReceiverController rtp_stream_receiver_controller_;
+ std::unique_ptr<webrtc::internal::VideoReceiveStream2> video_receive_stream_;
+ Clock* clock_;
+ VCMTiming* timing_;
+};
+
+TEST_F(VideoReceiveStream2Test, CreateFrameFromH264FmtpSpropAndIdr) {
+ constexpr uint8_t idr_nalu[] = {0x05, 0xFF, 0xFF, 0xFF};
+ RtpPacketToSend rtppacket(nullptr);
+ uint8_t* payload = rtppacket.AllocatePayload(sizeof(idr_nalu));
+ memcpy(payload, idr_nalu, sizeof(idr_nalu));
+ rtppacket.SetMarker(true);
+ rtppacket.SetSsrc(1111);
+ rtppacket.SetPayloadType(99);
+ rtppacket.SetSequenceNumber(1);
+ rtppacket.SetTimestamp(0);
+ rtc::Event init_decode_event_;
+ EXPECT_CALL(mock_h264_video_decoder_, InitDecode(_, _))
+ .WillOnce(Invoke([&init_decode_event_](const VideoCodec* config,
+ int32_t number_of_cores) {
+ init_decode_event_.Set();
+ return 0;
+ }));
+ EXPECT_CALL(mock_h264_video_decoder_, RegisterDecodeCompleteCallback(_));
+ video_receive_stream_->Start();
+ EXPECT_CALL(mock_h264_video_decoder_, Decode(_, false, _));
+ RtpPacketReceived parsed_packet;
+ ASSERT_TRUE(parsed_packet.Parse(rtppacket.data(), rtppacket.size()));
+ rtp_stream_receiver_controller_.OnRtpPacket(parsed_packet);
+ EXPECT_CALL(mock_h264_video_decoder_, Release());
+ // Make sure the decoder thread had a chance to run.
+ init_decode_event_.Wait(kDefaultTimeOutMs);
+}
+
+TEST_F(VideoReceiveStream2Test, PlayoutDelay) {
+ const PlayoutDelay kPlayoutDelayMs = {123, 321};
+ std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
+ test_frame->id.picture_id = 0;
+ test_frame->SetPlayoutDelay(kPlayoutDelayMs);
+
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
+ EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
+
+ // Check that the biggest minimum delay is chosen.
+ video_receive_stream_->SetMinimumPlayoutDelay(400);
+ EXPECT_EQ(400, timing_->min_playout_delay());
+
+ // Check base minimum delay validation.
+ EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(12345));
+ EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(-1));
+ EXPECT_TRUE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(500));
+ EXPECT_EQ(500, timing_->min_playout_delay());
+
+ // Check that intermidiate values are remembered and the biggest remembered
+ // is chosen.
+ video_receive_stream_->SetBaseMinimumPlayoutDelayMs(0);
+ EXPECT_EQ(400, timing_->min_playout_delay());
+
+ video_receive_stream_->SetMinimumPlayoutDelay(0);
+ EXPECT_EQ(123, timing_->min_playout_delay());
+}
+
+TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMaxValue) {
+ const int default_max_playout_latency = timing_->max_playout_delay();
+ const PlayoutDelay kPlayoutDelayMs = {123, -1};
+
+ std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
+ test_frame->id.picture_id = 0;
+ test_frame->SetPlayoutDelay(kPlayoutDelayMs);
+
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+
+ // Ensure that -1 preserves default maximum value from |timing_|.
+ EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
+ EXPECT_NE(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
+ EXPECT_EQ(default_max_playout_latency, timing_->max_playout_delay());
+}
+
+TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMinValue) {
+ const int default_min_playout_latency = timing_->min_playout_delay();
+ const PlayoutDelay kPlayoutDelayMs = {-1, 321};
+
+ std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
+ test_frame->id.picture_id = 0;
+ test_frame->SetPlayoutDelay(kPlayoutDelayMs);
+
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+
+ // Ensure that -1 preserves default minimum value from |timing_|.
+ EXPECT_NE(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
+ EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
+ EXPECT_EQ(default_min_playout_latency, timing_->min_playout_delay());
+}
+
+class VideoReceiveStream2TestWithFakeDecoder : public ::testing::Test {
+ public:
+ VideoReceiveStream2TestWithFakeDecoder()
+ : fake_decoder_factory_(
+ []() { return std::make_unique<test::FakeDecoder>(); }),
+ process_thread_(ProcessThread::Create("TestThread")),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ config_(&mock_transport_),
+ call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()) {}
+
+ void SetUp() {
+ config_.rtp.remote_ssrc = 1111;
+ config_.rtp.local_ssrc = 2222;
+ config_.renderer = &fake_renderer_;
+ VideoReceiveStream::Decoder fake_decoder;
+ fake_decoder.payload_type = 99;
+ fake_decoder.video_format = SdpVideoFormat("VP8");
+ fake_decoder.decoder_factory = &fake_decoder_factory_;
+ config_.decoders.push_back(fake_decoder);
+ clock_ = Clock::GetRealTimeClock();
+ ReCreateReceiveStream(VideoReceiveStream::RecordingState());
+ }
+
+ void ReCreateReceiveStream(VideoReceiveStream::RecordingState state) {
+ constexpr int kDefaultNumCpuCores = 2;
+ video_receive_stream_ = nullptr;
+ timing_ = new VCMTiming(clock_);
+ video_receive_stream_.reset(new webrtc::internal::VideoReceiveStream2(
+ task_queue_factory_.get(), loop_.task_queue(),
+ &rtp_stream_receiver_controller_, kDefaultNumCpuCores, &packet_router_,
+ config_.Copy(), process_thread_.get(), &call_stats_, clock_, timing_));
+ video_receive_stream_->SetAndGetRecordingState(std::move(state), false);
+ }
+
+ protected:
+ test::RunLoop loop_;
+ test::FunctionVideoDecoderFactory fake_decoder_factory_;
+ std::unique_ptr<ProcessThread> process_thread_;
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ VideoReceiveStream::Config config_;
+ internal::CallStats call_stats_;
+ cricket::FakeVideoRenderer fake_renderer_;
+ MockTransport mock_transport_;
+ PacketRouter packet_router_;
+ RtpStreamReceiverController rtp_stream_receiver_controller_;
+ std::unique_ptr<webrtc::internal::VideoReceiveStream2> video_receive_stream_;
+ Clock* clock_;
+ VCMTiming* timing_;
+};
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder, PassesNtpTime) {
+ const int64_t kNtpTimestamp = 12345;
+ auto test_frame = std::make_unique<FrameObjectFake>();
+ test_frame->SetPayloadType(99);
+ test_frame->id.picture_id = 0;
+ test_frame->SetNtpTime(kNtpTimestamp);
+
+ video_receive_stream_->Start();
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+ EXPECT_EQ(kNtpTimestamp, fake_renderer_.ntp_time_ms());
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder, PassesRotation) {
+ const webrtc::VideoRotation kRotation = webrtc::kVideoRotation_180;
+ auto test_frame = std::make_unique<FrameObjectFake>();
+ test_frame->SetPayloadType(99);
+ test_frame->id.picture_id = 0;
+ test_frame->SetRotation(kRotation);
+
+ video_receive_stream_->Start();
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+
+ EXPECT_EQ(kRotation, fake_renderer_.rotation());
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder, PassesPacketInfos) {
+ auto test_frame = std::make_unique<FrameObjectFake>();
+ test_frame->SetPayloadType(99);
+ test_frame->id.picture_id = 0;
+ RtpPacketInfos packet_infos = CreatePacketInfos(3);
+ test_frame->SetPacketInfos(packet_infos);
+
+ video_receive_stream_->Start();
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+
+ EXPECT_THAT(fake_renderer_.packet_infos(), ElementsAreArray(packet_infos));
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder, RenderedFrameUpdatesGetSources) {
+ constexpr uint32_t kSsrc = 1111;
+ constexpr uint32_t kCsrc = 9001;
+ constexpr uint32_t kRtpTimestamp = 12345;
+
+ // Prepare one video frame with per-packet information.
+ auto test_frame = std::make_unique<FrameObjectFake>();
+ test_frame->SetPayloadType(99);
+ test_frame->id.picture_id = 0;
+ RtpPacketInfos packet_infos;
+ {
+ RtpPacketInfos::vector_type infos;
+
+ RtpPacketInfo info;
+ info.set_ssrc(kSsrc);
+ info.set_csrcs({kCsrc});
+ info.set_rtp_timestamp(kRtpTimestamp);
+
+ info.set_receive_time_ms(clock_->TimeInMilliseconds() - 5000);
+ infos.push_back(info);
+
+ info.set_receive_time_ms(clock_->TimeInMilliseconds() - 3000);
+ infos.push_back(info);
+
+ info.set_receive_time_ms(clock_->TimeInMilliseconds() - 2000);
+ infos.push_back(info);
+
+ info.set_receive_time_ms(clock_->TimeInMilliseconds() - 4000);
+ infos.push_back(info);
+
+ packet_infos = RtpPacketInfos(std::move(infos));
+ }
+ test_frame->SetPacketInfos(packet_infos);
+
+ // Start receive stream.
+ video_receive_stream_->Start();
+ EXPECT_THAT(video_receive_stream_->GetSources(), IsEmpty());
+
+ // Render one video frame.
+ int64_t timestamp_ms_min = clock_->TimeInMilliseconds();
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+ int64_t timestamp_ms_max = clock_->TimeInMilliseconds();
+
+ // Verify that the per-packet information is passed to the renderer.
+ EXPECT_THAT(fake_renderer_.packet_infos(), ElementsAreArray(packet_infos));
+
+ // Verify that the per-packet information also updates |GetSources()|.
+ std::vector<RtpSource> sources = video_receive_stream_->GetSources();
+ ASSERT_THAT(sources, SizeIs(2));
+ {
+ auto it = std::find_if(sources.begin(), sources.end(),
+ [](const RtpSource& source) {
+ return source.source_type() == RtpSourceType::SSRC;
+ });
+ ASSERT_NE(it, sources.end());
+
+ EXPECT_EQ(it->source_id(), kSsrc);
+ EXPECT_EQ(it->source_type(), RtpSourceType::SSRC);
+ EXPECT_EQ(it->rtp_timestamp(), kRtpTimestamp);
+ EXPECT_GE(it->timestamp_ms(), timestamp_ms_min);
+ EXPECT_LE(it->timestamp_ms(), timestamp_ms_max);
+ }
+ {
+ auto it = std::find_if(sources.begin(), sources.end(),
+ [](const RtpSource& source) {
+ return source.source_type() == RtpSourceType::CSRC;
+ });
+ ASSERT_NE(it, sources.end());
+
+ EXPECT_EQ(it->source_id(), kCsrc);
+ EXPECT_EQ(it->source_type(), RtpSourceType::CSRC);
+ EXPECT_EQ(it->rtp_timestamp(), kRtpTimestamp);
+ EXPECT_GE(it->timestamp_ms(), timestamp_ms_min);
+ EXPECT_LE(it->timestamp_ms(), timestamp_ms_max);
+ }
+}
+
+std::unique_ptr<FrameObjectFake> MakeFrame(VideoFrameType frame_type,
+ int picture_id) {
+ auto frame = std::make_unique<FrameObjectFake>();
+ frame->SetPayloadType(99);
+ frame->id.picture_id = picture_id;
+ frame->SetFrameType(frame_type);
+ return frame;
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder,
+ PassesFrameWhenEncodedFramesCallbackSet) {
+ testing::MockFunction<void(const RecordableEncodedFrame&)> callback;
+ video_receive_stream_->Start();
+ // Expect a keyframe request to be generated
+ EXPECT_CALL(mock_transport_, SendRtcp);
+ EXPECT_CALL(callback, Call);
+ video_receive_stream_->SetAndGetRecordingState(
+ VideoReceiveStream::RecordingState(callback.AsStdFunction()), true);
+ video_receive_stream_->OnCompleteFrame(
+ MakeFrame(VideoFrameType::kVideoFrameKey, 0));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+ video_receive_stream_->Stop();
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder,
+ MovesEncodedFrameDispatchStateWhenReCreating) {
+ testing::MockFunction<void(const RecordableEncodedFrame&)> callback;
+ video_receive_stream_->Start();
+ // Expect a key frame request over RTCP.
+ EXPECT_CALL(mock_transport_, SendRtcp).Times(1);
+ video_receive_stream_->SetAndGetRecordingState(
+ VideoReceiveStream::RecordingState(callback.AsStdFunction()), true);
+ video_receive_stream_->Stop();
+ VideoReceiveStream::RecordingState old_state =
+ video_receive_stream_->SetAndGetRecordingState(
+ VideoReceiveStream::RecordingState(), false);
+ ReCreateReceiveStream(std::move(old_state));
+ video_receive_stream_->Stop();
+}
+
+class VideoReceiveStream2TestWithSimulatedClock : public ::testing::Test {
+ public:
+ class FakeDecoder2 : public test::FakeDecoder {
+ public:
+ explicit FakeDecoder2(std::function<void()> decode_callback)
+ : callback_(decode_callback) {}
+
+ int32_t Decode(const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms) override {
+ int32_t result =
+ FakeDecoder::Decode(input, missing_frames, render_time_ms);
+ callback_();
+ return result;
+ }
+
+ private:
+ std::function<void()> callback_;
+ };
+
+ static VideoReceiveStream::Config GetConfig(
+ Transport* transport,
+ VideoDecoderFactory* decoder_factory,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* renderer) {
+ VideoReceiveStream::Config config(transport);
+ config.rtp.remote_ssrc = 1111;
+ config.rtp.local_ssrc = 2222;
+ config.renderer = renderer;
+ VideoReceiveStream::Decoder fake_decoder;
+ fake_decoder.payload_type = 99;
+ fake_decoder.video_format = SdpVideoFormat("VP8");
+ fake_decoder.decoder_factory = decoder_factory;
+ config.decoders.push_back(fake_decoder);
+ return config;
+ }
+
+ VideoReceiveStream2TestWithSimulatedClock()
+ : time_controller_(Timestamp::Millis(4711)),
+ fake_decoder_factory_([this] {
+ return std::make_unique<FakeDecoder2>([this] { OnFrameDecoded(); });
+ }),
+ process_thread_(time_controller_.CreateProcessThread("ProcessThread")),
+ config_(GetConfig(&mock_transport_,
+ &fake_decoder_factory_,
+ &fake_renderer_)),
+ call_stats_(time_controller_.GetClock(), loop_.task_queue()),
+ video_receive_stream_(time_controller_.GetTaskQueueFactory(),
+ loop_.task_queue(),
+ &rtp_stream_receiver_controller_,
+ /*num_cores=*/2,
+ &packet_router_,
+ config_.Copy(),
+ process_thread_.get(),
+ &call_stats_,
+ time_controller_.GetClock(),
+ new VCMTiming(time_controller_.GetClock())) {
+ video_receive_stream_.Start();
+ }
+
+ void OnFrameDecoded() { event_->Set(); }
+
+ void PassEncodedFrameAndWait(
+ std::unique_ptr<video_coding::EncodedFrame> frame) {
+ event_ = std::make_unique<rtc::Event>();
+ // This call will eventually end up in the Decoded method where the
+ // event is set.
+ video_receive_stream_.OnCompleteFrame(std::move(frame));
+ event_->Wait(rtc::Event::kForever);
+ }
+
+ protected:
+ GlobalSimulatedTimeController time_controller_;
+ test::RunLoop loop_;
+ test::FunctionVideoDecoderFactory fake_decoder_factory_;
+ std::unique_ptr<ProcessThread> process_thread_;
+ MockTransport mock_transport_;
+ cricket::FakeVideoRenderer fake_renderer_;
+ VideoReceiveStream::Config config_;
+ internal::CallStats call_stats_;
+ PacketRouter packet_router_;
+ RtpStreamReceiverController rtp_stream_receiver_controller_;
+ webrtc::internal::VideoReceiveStream2 video_receive_stream_;
+ std::unique_ptr<rtc::Event> event_;
+};
+
+TEST_F(VideoReceiveStream2TestWithSimulatedClock,
+ RequestsKeyFramesUntilKeyFrameReceived) {
+ auto tick = TimeDelta::Millis(
+ internal::VideoReceiveStream2::kMaxWaitForKeyFrameMs / 2);
+ EXPECT_CALL(mock_transport_, SendRtcp).Times(1).WillOnce(Invoke([this]() {
+ loop_.Quit();
+ return 0;
+ }));
+ video_receive_stream_.GenerateKeyFrame();
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 0));
+ time_controller_.AdvanceTime(tick);
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 1));
+ loop_.Run();
+ testing::Mock::VerifyAndClearExpectations(&mock_transport_);
+
+ // T+200ms: still no key frame received, expect key frame request sent again.
+ EXPECT_CALL(mock_transport_, SendRtcp).Times(1).WillOnce(Invoke([this]() {
+ loop_.Quit();
+ return 0;
+ }));
+ time_controller_.AdvanceTime(tick);
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 2));
+ loop_.Run();
+ testing::Mock::VerifyAndClearExpectations(&mock_transport_);
+
+ // T+200ms: now send a key frame - we should not observe new key frame
+ // requests after this.
+ EXPECT_CALL(mock_transport_, SendRtcp).Times(0);
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameKey, 3));
+ time_controller_.AdvanceTime(2 * tick);
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 4));
+ loop_.PostTask([this]() { loop_.Quit(); });
+ loop_.Run();
+}
+
+} // namespace webrtc
diff --git a/video/video_receive_stream_unittest.cc b/video/video_receive_stream_unittest.cc
index 54896e89d8..abdabfb0be 100644
--- a/video/video_receive_stream_unittest.cc
+++ b/video/video_receive_stream_unittest.cc
@@ -25,7 +25,6 @@
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "modules/utility/include/process_thread.h"
#include "modules/video_coding/encoded_frame.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "system_wrappers/include/clock.h"
#include "test/fake_decoder.h"
@@ -49,24 +48,30 @@ constexpr int kDefaultTimeOutMs = 50;
class MockTransport : public Transport {
public:
- MOCK_METHOD3(SendRtp,
- bool(const uint8_t* packet,
- size_t length,
- const PacketOptions& options));
- MOCK_METHOD2(SendRtcp, bool(const uint8_t* packet, size_t length));
+ MOCK_METHOD(bool,
+ SendRtp,
+ (const uint8_t*, size_t length, const PacketOptions& options),
+ (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
};
class MockVideoDecoder : public VideoDecoder {
public:
- MOCK_METHOD2(InitDecode,
- int32_t(const VideoCodec* config, int32_t number_of_cores));
- MOCK_METHOD3(Decode,
- int32_t(const EncodedImage& input,
- bool missing_frames,
- int64_t render_time_ms));
- MOCK_METHOD1(RegisterDecodeCompleteCallback,
- int32_t(DecodedImageCallback* callback));
- MOCK_METHOD0(Release, int32_t(void));
+ MOCK_METHOD(int32_t,
+ InitDecode,
+ (const VideoCodec*, int32_t number_of_cores),
+ (override));
+ MOCK_METHOD(int32_t,
+ Decode,
+ (const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms),
+ (override));
+ MOCK_METHOD(int32_t,
+ RegisterDecodeCompleteCallback,
+ (DecodedImageCallback*),
+ (override));
+ MOCK_METHOD(int32_t, Release, (), (override));
const char* ImplementationName() const { return "MockVideoDecoder"; }
};
diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc
index bc9a0cd5f3..30ed86dbd1 100644
--- a/video/video_send_stream.cc
+++ b/video/video_send_stream.cc
@@ -179,6 +179,18 @@ void VideoSendStream::Stop() {
worker_queue_->PostTask([send_stream] { send_stream->Stop(); });
}
+void VideoSendStream::AddAdaptationResource(
+ rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ video_stream_encoder_->AddAdaptationResource(resource);
+}
+
+std::vector<rtc::scoped_refptr<Resource>>
+VideoSendStream::GetAdaptationResources() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return video_stream_encoder_->GetAdaptationResources();
+}
+
void VideoSendStream::SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) {
diff --git a/video/video_send_stream.h b/video/video_send_stream.h
index addaee49c2..e10f4ad59b 100644
--- a/video/video_send_stream.h
+++ b/video/video_send_stream.h
@@ -20,7 +20,6 @@
#include "call/bitrate_allocator.h"
#include "call/video_receive_stream.h"
#include "call/video_send_stream.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_checker.h"
@@ -79,6 +78,9 @@ class VideoSendStream : public webrtc::VideoSendStream {
void Start() override;
void Stop() override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+ std::vector<rtc::scoped_refptr<Resource>> GetAdaptationResources() override;
+
void SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) override;
diff --git a/video/video_send_stream_impl.cc b/video/video_send_stream_impl.cc
index 03c9613ab4..712af87a0c 100644
--- a/video/video_send_stream_impl.cc
+++ b/video/video_send_stream_impl.cc
@@ -92,17 +92,26 @@ int CalculateMaxPadBitrateBps(const std::vector<VideoStream>& streams,
const double hysteresis_factor =
RateControlSettings::ParseFromFieldTrials()
.GetSimulcastHysteresisFactor(content_type);
- const size_t top_active_stream_idx = active_streams.size() - 1;
- pad_up_to_bitrate_bps = std::min(
- static_cast<int>(
- hysteresis_factor *
- active_streams[top_active_stream_idx].min_bitrate_bps +
- 0.5),
- active_streams[top_active_stream_idx].target_bitrate_bps);
-
- // Add target_bitrate_bps of the lower active streams.
- for (size_t i = 0; i < top_active_stream_idx; ++i) {
- pad_up_to_bitrate_bps += active_streams[i].target_bitrate_bps;
+ if (is_svc) {
+ // For SVC, since there is only one "stream", the padding bitrate
+ // needed to enable the top spatial layer is stored in the
+ // |target_bitrate_bps| field.
+ // TODO(sprang): This behavior needs to die.
+ pad_up_to_bitrate_bps = static_cast<int>(
+ hysteresis_factor * active_streams[0].target_bitrate_bps + 0.5);
+ } else {
+ const size_t top_active_stream_idx = active_streams.size() - 1;
+ pad_up_to_bitrate_bps = std::min(
+ static_cast<int>(
+ hysteresis_factor *
+ active_streams[top_active_stream_idx].min_bitrate_bps +
+ 0.5),
+ active_streams[top_active_stream_idx].target_bitrate_bps);
+
+ // Add target_bitrate_bps of the lower active streams.
+ for (size_t i = 0; i < top_active_stream_idx; ++i) {
+ pad_up_to_bitrate_bps += active_streams[i].target_bitrate_bps;
+ }
}
}
} else if (!active_streams.empty() && pad_to_min_bitrate) {
diff --git a/video/video_send_stream_impl.h b/video/video_send_stream_impl.h
index 8f30b630be..834fed4693 100644
--- a/video/video_send_stream_impl.h
+++ b/video/video_send_stream_impl.h
@@ -35,8 +35,8 @@
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/utility/include/process_thread.h"
#include "modules/video_coding/include/video_codec_interface.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/experiments/field_trial_parser.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/thread_annotations.h"
@@ -164,7 +164,7 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
RtpTransportControllerSendInterface* const transport_;
BitrateAllocatorInterface* const bitrate_allocator_;
- rtc::CriticalSection ivf_writers_crit_;
+ Mutex ivf_writers_mutex_;
bool disable_padding_;
int max_padding_bitrate_;
diff --git a/video/video_send_stream_impl_unittest.cc b/video/video_send_stream_impl_unittest.cc
index 532e035e2b..bb702ba270 100644
--- a/video/video_send_stream_impl_unittest.cc
+++ b/video/video_send_stream_impl_unittest.cc
@@ -10,6 +10,7 @@
#include "video/video_send_stream_impl.h"
+#include <algorithm>
#include <memory>
#include <string>
@@ -43,6 +44,8 @@ bool operator==(const BitrateAllocationUpdate& a,
namespace internal {
namespace {
using ::testing::_;
+using ::testing::AllOf;
+using ::testing::Field;
using ::testing::Invoke;
using ::testing::NiceMock;
using ::testing::Return;
@@ -58,33 +61,45 @@ std::string GetAlrProbingExperimentString() {
}
class MockRtpVideoSender : public RtpVideoSenderInterface {
public:
- MOCK_METHOD1(RegisterProcessThread, void(ProcessThread*));
- MOCK_METHOD0(DeRegisterProcessThread, void());
- MOCK_METHOD1(SetActive, void(bool));
- MOCK_METHOD1(SetActiveModules, void(const std::vector<bool>));
- MOCK_METHOD0(IsActive, bool());
- MOCK_METHOD1(OnNetworkAvailability, void(bool));
- MOCK_CONST_METHOD0(GetRtpStates, std::map<uint32_t, RtpState>());
- MOCK_CONST_METHOD0(GetRtpPayloadStates,
- std::map<uint32_t, RtpPayloadState>());
- MOCK_METHOD2(DeliverRtcp, void(const uint8_t*, size_t));
- MOCK_METHOD1(OnBitrateAllocationUpdated, void(const VideoBitrateAllocation&));
- MOCK_METHOD3(OnEncodedImage,
- EncodedImageCallback::Result(const EncodedImage&,
- const CodecSpecificInfo*,
- const RTPFragmentationHeader*));
- MOCK_METHOD1(OnTransportOverheadChanged, void(size_t));
- MOCK_METHOD1(OnOverheadChanged, void(size_t));
- MOCK_METHOD2(OnBitrateUpdated, void(BitrateAllocationUpdate, int));
- MOCK_CONST_METHOD0(GetPayloadBitrateBps, uint32_t());
- MOCK_CONST_METHOD0(GetProtectionBitrateBps, uint32_t());
- MOCK_METHOD3(SetEncodingData, void(size_t, size_t, size_t));
- MOCK_CONST_METHOD2(GetSentRtpPacketInfos,
- std::vector<RtpSequenceNumberMap::Info>(
- uint32_t ssrc,
- rtc::ArrayView<const uint16_t> sequence_numbers));
-
- MOCK_METHOD1(SetFecAllowed, void(bool fec_allowed));
+ MOCK_METHOD(void, RegisterProcessThread, (ProcessThread*), (override));
+ MOCK_METHOD(void, DeRegisterProcessThread, (), (override));
+ MOCK_METHOD(void, SetActive, (bool), (override));
+ MOCK_METHOD(void, SetActiveModules, (const std::vector<bool>), (override));
+ MOCK_METHOD(bool, IsActive, (), (override));
+ MOCK_METHOD(void, OnNetworkAvailability, (bool), (override));
+ MOCK_METHOD((std::map<uint32_t, RtpState>),
+ GetRtpStates,
+ (),
+ (const, override));
+ MOCK_METHOD((std::map<uint32_t, RtpPayloadState>),
+ GetRtpPayloadStates,
+ (),
+ (const, override));
+ MOCK_METHOD(void, DeliverRtcp, (const uint8_t*, size_t), (override));
+ MOCK_METHOD(void,
+ OnBitrateAllocationUpdated,
+ (const VideoBitrateAllocation&),
+ (override));
+ MOCK_METHOD(EncodedImageCallback::Result,
+ OnEncodedImage,
+ (const EncodedImage&,
+ const CodecSpecificInfo*,
+ const RTPFragmentationHeader*),
+ (override));
+ MOCK_METHOD(void, OnTransportOverheadChanged, (size_t), (override));
+ MOCK_METHOD(void,
+ OnBitrateUpdated,
+ (BitrateAllocationUpdate, int),
+ (override));
+ MOCK_METHOD(uint32_t, GetPayloadBitrateBps, (), (const, override));
+ MOCK_METHOD(uint32_t, GetProtectionBitrateBps, (), (const, override));
+ MOCK_METHOD(void, SetEncodingData, (size_t, size_t, size_t), (override));
+ MOCK_METHOD(std::vector<RtpSequenceNumberMap::Info>,
+ GetSentRtpPacketInfos,
+ (uint32_t ssrc, rtc::ArrayView<const uint16_t> sequence_numbers),
+ (const, override));
+
+ MOCK_METHOD(void, SetFecAllowed, (bool fec_allowed), (override));
};
BitrateAllocationUpdate CreateAllocation(int bitrate_bps) {
@@ -895,112 +910,114 @@ TEST_F(VideoSendStreamImplTest, KeepAliveOnDroppedFrame) {
ASSERT_TRUE(done.Wait(5000));
}
-TEST_F(VideoSendStreamImplTest, ConfiguresBitratesForSvcWithAlr) {
- test_queue_.SendTask(
- [this] {
- const bool kSuspend = false;
- config_.suspend_below_min_bitrate = kSuspend;
- config_.rtp.extensions.emplace_back(
- RtpExtension::kTransportSequenceNumberUri, 1);
- config_.periodic_alr_bandwidth_probing = true;
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
- vss_impl->Start();
-
- // Svc
- VideoStream stream;
- stream.width = 1920;
- stream.height = 1080;
- stream.max_framerate = 30;
- stream.min_bitrate_bps = 60000;
- stream.target_bitrate_bps = 6000000;
- stream.max_bitrate_bps = 1250000;
- stream.num_temporal_layers = 2;
- stream.max_qp = 56;
- stream.bitrate_priority = 1;
-
- int min_transmit_bitrate_bps = 400000;
-
- config_.rtp.ssrcs.emplace_back(1);
- config_.rtp.ssrcs.emplace_back(2);
-
- EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
- .WillRepeatedly(Invoke([&](BitrateAllocatorObserver*,
- MediaStreamAllocationConfig config) {
- EXPECT_EQ(config.min_bitrate_bps,
- static_cast<uint32_t>(stream.min_bitrate_bps));
- EXPECT_EQ(config.max_bitrate_bps,
- static_cast<uint32_t>(stream.max_bitrate_bps));
- if (config.pad_up_bitrate_bps != 0) {
- EXPECT_EQ(config.pad_up_bitrate_bps,
- static_cast<uint32_t>(min_transmit_bitrate_bps));
- }
- EXPECT_EQ(config.enforce_min_bitrate, !kSuspend);
- }));
-
- static_cast<VideoStreamEncoderInterface::EncoderSink*>(vss_impl.get())
- ->OnEncoderConfigurationChanged(
- std::vector<VideoStream>{stream}, true,
- VideoEncoderConfig::ContentType::kScreen,
- min_transmit_bitrate_bps);
- vss_impl->Stop();
- },
- RTC_FROM_HERE);
-}
-
-TEST_F(VideoSendStreamImplTest, ConfiguresBitratesForSvcNoAlr) {
- test_queue_.SendTask(
- [this] {
- const bool kSuspend = false;
- config_.suspend_below_min_bitrate = kSuspend;
- config_.rtp.extensions.emplace_back(
- RtpExtension::kTransportSequenceNumberUri, 1);
- config_.periodic_alr_bandwidth_probing = false;
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
- vss_impl->Start();
-
- // Svc
- VideoStream stream;
- stream.width = 1920;
- stream.height = 1080;
- stream.max_framerate = 30;
- stream.min_bitrate_bps = 60000;
- stream.target_bitrate_bps = 6000000;
- stream.max_bitrate_bps = 1250000;
- stream.num_temporal_layers = 2;
- stream.max_qp = 56;
- stream.bitrate_priority = 1;
-
- int min_transmit_bitrate_bps = 400000;
-
- config_.rtp.ssrcs.emplace_back(1);
- config_.rtp.ssrcs.emplace_back(2);
+TEST_F(VideoSendStreamImplTest, ConfiguresBitratesForSvc) {
+ struct TestConfig {
+ bool screenshare = false;
+ bool alr = false;
+ int min_padding_bitrate_bps = 0;
+ };
+
+ std::vector<TestConfig> test_variants;
+ for (bool screenshare : {false, true}) {
+ for (bool alr : {false, true}) {
+ for (int min_padding : {0, 400000}) {
+ test_variants.push_back({screenshare, alr, min_padding});
+ }
+ }
+ }
- EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
- .WillRepeatedly(Invoke([&](BitrateAllocatorObserver*,
- MediaStreamAllocationConfig config) {
- EXPECT_EQ(config.min_bitrate_bps,
- static_cast<uint32_t>(stream.min_bitrate_bps));
- EXPECT_EQ(config.max_bitrate_bps,
- static_cast<uint32_t>(stream.max_bitrate_bps));
- if (config.pad_up_bitrate_bps != 0) {
- EXPECT_EQ(config.pad_up_bitrate_bps,
- static_cast<uint32_t>(stream.target_bitrate_bps));
- }
- EXPECT_EQ(config.enforce_min_bitrate, !kSuspend);
- }));
+ for (const TestConfig& test_config : test_variants) {
+ test_queue_.SendTask(
+ [this, test_config] {
+ const bool kSuspend = false;
+ config_.suspend_below_min_bitrate = kSuspend;
+ config_.rtp.extensions.emplace_back(
+ RtpExtension::kTransportSequenceNumberUri, 1);
+ config_.periodic_alr_bandwidth_probing = test_config.alr;
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ test_config.screenshare
+ ? VideoEncoderConfig::ContentType::kScreen
+ : VideoEncoderConfig::ContentType::kRealtimeVideo);
+ vss_impl->Start();
+
+ // Svc
+ VideoStream stream;
+ stream.width = 1920;
+ stream.height = 1080;
+ stream.max_framerate = 30;
+ stream.min_bitrate_bps = 60000;
+ stream.target_bitrate_bps = 6000000;
+ stream.max_bitrate_bps = 1250000;
+ stream.num_temporal_layers = 2;
+ stream.max_qp = 56;
+ stream.bitrate_priority = 1;
+
+ config_.rtp.ssrcs.emplace_back(1);
+ config_.rtp.ssrcs.emplace_back(2);
+
+ EXPECT_CALL(
+ bitrate_allocator_,
+ AddObserver(
+ vss_impl.get(),
+ AllOf(Field(&MediaStreamAllocationConfig::min_bitrate_bps,
+ static_cast<uint32_t>(stream.min_bitrate_bps)),
+ Field(&MediaStreamAllocationConfig::max_bitrate_bps,
+ static_cast<uint32_t>(stream.max_bitrate_bps)),
+ // Stream not yet active - no padding.
+ Field(&MediaStreamAllocationConfig::pad_up_bitrate_bps,
+ 0u),
+ Field(&MediaStreamAllocationConfig::enforce_min_bitrate,
+ !kSuspend))));
+
+ static_cast<VideoStreamEncoderInterface::EncoderSink*>(vss_impl.get())
+ ->OnEncoderConfigurationChanged(
+ std::vector<VideoStream>{stream}, true,
+ test_config.screenshare
+ ? VideoEncoderConfig::ContentType::kScreen
+ : VideoEncoderConfig::ContentType::kRealtimeVideo,
+ test_config.min_padding_bitrate_bps);
+ ::testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_);
+
+ // Simulate an encoded image, this will turn the stream active and
+ // enable padding.
+ EncodedImage encoded_image;
+ CodecSpecificInfo codec_specific;
+ EXPECT_CALL(rtp_video_sender_, OnEncodedImage)
+ .WillRepeatedly(Return(EncodedImageCallback::Result(
+ EncodedImageCallback::Result::OK)));
+
+ // Screensharing implicitly forces ALR.
+ const bool using_alr = test_config.alr || test_config.screenshare;
+ // If ALR is used, pads only to min bitrate as rampup is handled by
+ // probing. Otherwise target_bitrate contains the padding target.
+ int expected_padding =
+ using_alr ? stream.min_bitrate_bps : stream.target_bitrate_bps;
+ // Min padding bitrate may override padding target.
+ expected_padding =
+ std::max(expected_padding, test_config.min_padding_bitrate_bps);
+ EXPECT_CALL(
+ bitrate_allocator_,
+ AddObserver(
+ vss_impl.get(),
+ AllOf(Field(&MediaStreamAllocationConfig::min_bitrate_bps,
+ static_cast<uint32_t>(stream.min_bitrate_bps)),
+ Field(&MediaStreamAllocationConfig::max_bitrate_bps,
+ static_cast<uint32_t>(stream.max_bitrate_bps)),
+ // Stream now active - min bitrate use as padding target
+ // when ALR is active.
+ Field(&MediaStreamAllocationConfig::pad_up_bitrate_bps,
+ expected_padding),
+ Field(&MediaStreamAllocationConfig::enforce_min_bitrate,
+ !kSuspend))));
+ static_cast<EncodedImageCallback*>(vss_impl.get())
+ ->OnEncodedImage(encoded_image, &codec_specific, nullptr);
+ ::testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_);
- static_cast<VideoStreamEncoderInterface::EncoderSink*>(vss_impl.get())
- ->OnEncoderConfigurationChanged(
- std::vector<VideoStream>{stream}, true,
- VideoEncoderConfig::ContentType::kScreen,
- min_transmit_bitrate_bps);
- vss_impl->Stop();
- },
- RTC_FROM_HERE);
+ vss_impl->Stop();
+ },
+ RTC_FROM_HERE);
+ }
}
} // namespace internal
} // namespace webrtc
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
index e38653831b..64a1a55710 100644
--- a/video/video_send_stream_tests.cc
+++ b/video/video_send_stream_tests.cc
@@ -25,20 +25,20 @@
#include "call/simulated_network.h"
#include "call/video_send_stream.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/source/rtcp_sender.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/checks.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/experiments/alr_experiment.h"
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/rate_limiter.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue_for_test.h"
#include "rtc_base/task_utils/to_queued_task.h"
@@ -948,7 +948,7 @@ void VideoSendStreamTest::TestNackRetransmission(
non_padding_sequence_numbers_.end() - kNackedPacketsAtOnceCount,
non_padding_sequence_numbers_.end());
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = Clock::GetRealTimeClock();
config.outgoing_transport = transport_adapter_.get();
config.rtcp_report_interval_ms = kRtcpIntervalMs;
@@ -1140,7 +1140,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
fec_packet_received_ = false;
++current_size_rtp_;
- rtc::CritScope lock(&mutex_);
+ MutexLock lock(&mutex_);
++current_size_frame_;
}
}
@@ -1164,7 +1164,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
kVideoSendSsrcs[0], rtp_packet.SequenceNumber(),
packets_lost_, // Cumulative lost.
loss_ratio); // Loss percent.
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = Clock::GetRealTimeClock();
config.receive_statistics = &lossy_receive_stats;
config.outgoing_transport = transport_adapter_.get();
@@ -1182,7 +1182,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
}
void UpdateConfiguration() {
- rtc::CritScope lock(&mutex_);
+ MutexLock lock(&mutex_);
// Increase frame size for next encoded frame, in the context of the
// encoder thread.
if (!use_fec_ && current_size_frame_ < static_cast<int32_t>(stop_size_)) {
@@ -1247,7 +1247,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
bool fec_packet_received_;
size_t current_size_rtp_;
- rtc::CriticalSection mutex_;
+ Mutex mutex_;
int current_size_frame_ RTC_GUARDED_BY(mutex_);
};
@@ -1296,7 +1296,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
: remb_observer_(remb_observer) {}
void OnFrame(const VideoFrame&) {
- rtc::CritScope lock(&remb_observer_->crit_);
+ MutexLock lock(&remb_observer_->mutex_);
if (remb_observer_->test_state_ == kDuringSuspend &&
++remb_observer_->suspended_frame_count_ > kSuspendTimeFrames) {
VideoSendStream::Stats stats = remb_observer_->stream_->GetStats();
@@ -1324,7 +1324,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
++rtp_count_;
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
@@ -1361,12 +1361,12 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
}
void set_low_remb_bps(int value) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
low_remb_bps_ = value;
}
void set_high_remb_bps(int value) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
high_remb_bps_ = value;
}
@@ -1413,10 +1413,10 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
};
virtual void SendRtcpFeedback(int remb_value)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_) {
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) {
FakeReceiveStatistics receive_stats(kVideoSendSsrcs[0],
last_sequence_number_, rtp_count_, 0);
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = clock_;
config.receive_statistics = &receive_stats;
config.outgoing_transport = transport_adapter_.get();
@@ -1438,13 +1438,13 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
CaptureObserver capture_observer_;
VideoSendStream* stream_;
- rtc::CriticalSection crit_;
- TestState test_state_ RTC_GUARDED_BY(crit_);
- int rtp_count_ RTC_GUARDED_BY(crit_);
- int last_sequence_number_ RTC_GUARDED_BY(crit_);
- int suspended_frame_count_ RTC_GUARDED_BY(crit_);
- int low_remb_bps_ RTC_GUARDED_BY(crit_);
- int high_remb_bps_ RTC_GUARDED_BY(crit_);
+ Mutex mutex_;
+ TestState test_state_ RTC_GUARDED_BY(mutex_);
+ int rtp_count_ RTC_GUARDED_BY(mutex_);
+ int last_sequence_number_ RTC_GUARDED_BY(mutex_);
+ int suspended_frame_count_ RTC_GUARDED_BY(mutex_);
+ int low_remb_bps_ RTC_GUARDED_BY(mutex_);
+ int high_remb_bps_ RTC_GUARDED_BY(mutex_);
} test;
RunBaseTest(&test);
@@ -1462,7 +1462,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
last_packet_time_ms_ = clock_->TimeInMilliseconds();
RtpPacket rtp_packet;
@@ -1490,7 +1490,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
}
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
const int kNoPacketsThresholdMs = 2000;
if (test_state_ == kWaitingForNoPackets &&
(last_packet_time_ms_ &&
@@ -1513,7 +1513,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
void OnFrameGeneratorCapturerCreated(
test::FrameGeneratorCapturer* frame_generator_capturer) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
capturer_ = frame_generator_capturer;
}
@@ -1532,9 +1532,9 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
TestState test_state_ = kBeforeStopCapture;
Clock* const clock_;
- rtc::CriticalSection crit_;
- absl::optional<int64_t> last_packet_time_ms_ RTC_GUARDED_BY(crit_);
- test::FrameGeneratorCapturer* capturer_ RTC_GUARDED_BY(crit_);
+ Mutex mutex_;
+ absl::optional<int64_t> last_packet_time_ms_ RTC_GUARDED_BY(mutex_);
+ test::FrameGeneratorCapturer* capturer_ RTC_GUARDED_BY(mutex_);
} test;
RunBaseTest(&test);
@@ -1557,7 +1557,7 @@ TEST_F(VideoSendStreamTest, PaddingIsPrimarilyRetransmissions) {
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
rtp_packet.Parse(packet, length);
@@ -1597,16 +1597,16 @@ TEST_F(VideoSendStreamTest, PaddingIsPrimarilyRetransmissions) {
// rid of this.
SleepMs(5000);
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// Expect padding to be a small percentage of total bytes sent.
EXPECT_LT(padding_length_, .1 * total_length_);
}
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
Clock* const clock_;
- size_t padding_length_ RTC_GUARDED_BY(crit_);
- size_t total_length_ RTC_GUARDED_BY(crit_);
+ size_t padding_length_ RTC_GUARDED_BY(mutex_);
+ size_t total_length_ RTC_GUARDED_BY(mutex_);
Call* call_;
} test;
@@ -1627,12 +1627,18 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
static const int kRembRespectedBitrateBps = 100000;
class BitrateObserver : public test::SendTest {
public:
- BitrateObserver()
+ explicit BitrateObserver(TaskQueueBase* task_queue)
: SendTest(kDefaultTimeoutMs),
+ task_queue_(task_queue),
retranmission_rate_limiter_(Clock::GetRealTimeClock(), 1000),
stream_(nullptr),
bitrate_capped_(false) {}
+ ~BitrateObserver() override {
+ // Make sure we free |rtp_rtcp_| in the same context as we constructed it.
+ SendTask(RTC_FROM_HERE, task_queue_, [this]() { rtp_rtcp_ = nullptr; });
+ }
+
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
if (RtpHeaderParser::IsRtcp(packet, length))
@@ -1667,11 +1673,11 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = Clock::GetRealTimeClock();
config.outgoing_transport = feedback_transport_.get();
config.retransmission_rate_limiter = &retranmission_rate_limiter_;
- rtp_rtcp_ = RtpRtcp::Create(config);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config);
rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize);
}
@@ -1690,12 +1696,13 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
<< "Timeout while waiting for low bitrate stats after REMB.";
}
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ TaskQueueBase* const task_queue_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
std::unique_ptr<internal::TransportAdapter> feedback_transport_;
RateLimiter retranmission_rate_limiter_;
VideoSendStream* stream_;
bool bitrate_capped_;
- } test;
+ } test(task_queue());
RunBaseTest(&test);
}
@@ -1939,7 +1946,7 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) {
Action OnSendRtp(const uint8_t* packet, size_t length) override {
EXPECT_LE(length, kMaxRtpPacketSize);
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (++packets_sent_ < 100)
return SEND_PACKET;
observation_complete_.Set();
@@ -1963,7 +1970,7 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) {
EXPECT_TRUE(Wait());
{
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
packets_sent_ = 0;
}
@@ -1979,7 +1986,7 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) {
private:
TaskQueueBase* const task_queue_;
Call* call_;
- rtc::CriticalSection lock_;
+ Mutex lock_;
int packets_sent_ RTC_GUARDED_BY(lock_);
int transport_overhead_;
const size_t kMaxRtpPacketSize = 1000;
@@ -2155,7 +2162,7 @@ TEST_F(VideoSendStreamTest,
void WaitForResolution(int width, int height) {
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (last_initialized_frame_width_ == width &&
last_initialized_frame_height_ == height) {
return;
@@ -2164,7 +2171,7 @@ TEST_F(VideoSendStreamTest,
EXPECT_TRUE(
init_encode_called_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
EXPECT_EQ(width, last_initialized_frame_width_);
EXPECT_EQ(height, last_initialized_frame_height_);
}
@@ -2173,7 +2180,7 @@ TEST_F(VideoSendStreamTest,
private:
int32_t InitEncode(const VideoCodec* config,
const Settings& settings) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
last_initialized_frame_width_ = config->width;
last_initialized_frame_height_ = config->height;
++number_of_initializations_;
@@ -2188,11 +2195,11 @@ TEST_F(VideoSendStreamTest,
return 0;
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
rtc::Event init_encode_called_;
- size_t number_of_initializations_ RTC_GUARDED_BY(&crit_);
- int last_initialized_frame_width_ RTC_GUARDED_BY(&crit_);
- int last_initialized_frame_height_ RTC_GUARDED_BY(&crit_);
+ size_t number_of_initializations_ RTC_GUARDED_BY(&mutex_);
+ int last_initialized_frame_width_ RTC_GUARDED_BY(&mutex_);
+ int last_initialized_frame_height_ RTC_GUARDED_BY(&mutex_);
};
test::NullTransport transport;
@@ -2231,21 +2238,21 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
: FakeEncoder(Clock::GetRealTimeClock()), start_bitrate_kbps_(0) {}
int32_t InitEncode(const VideoCodec* config,
const Settings& settings) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
start_bitrate_kbps_ = config->startBitrate;
start_bitrate_changed_.Set();
return FakeEncoder::InitEncode(config, settings);
}
void SetRates(const RateControlParameters& parameters) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
start_bitrate_kbps_ = parameters.bitrate.get_sum_kbps();
start_bitrate_changed_.Set();
FakeEncoder::SetRates(parameters);
}
int GetStartBitrateKbps() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return start_bitrate_kbps_;
}
@@ -2255,9 +2262,9 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
}
private:
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
rtc::Event start_bitrate_changed_;
- int start_bitrate_kbps_ RTC_GUARDED_BY(crit_);
+ int start_bitrate_kbps_ RTC_GUARDED_BY(mutex_);
};
CreateSenderCall();
@@ -2273,13 +2280,15 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
StartBitrateObserver encoder;
test::VideoEncoderProxyFactory encoder_factory(&encoder);
- // Since this test does not use a capturer, set |internal_source| = true.
- // Encoder configuration is otherwise updated on the next video frame.
- encoder_factory.SetHasInternalSource(true);
GetVideoSendConfig()->encoder_settings.encoder_factory = &encoder_factory;
CreateVideoStreams();
+ // Start capturing and encoding frames to force encoder reconfiguration.
+ CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
+ kDefaultHeight);
+ frame_generator_capturer_->Start();
+
EXPECT_TRUE(encoder.WaitForStartBitrate());
EXPECT_EQ(GetVideoEncoderConfig()->max_bitrate_bps / 1000,
encoder.GetStartBitrateKbps());
@@ -2304,13 +2313,13 @@ class StartStopBitrateObserver : public test::FakeEncoder {
StartStopBitrateObserver() : FakeEncoder(Clock::GetRealTimeClock()) {}
int32_t InitEncode(const VideoCodec* config,
const Settings& settings) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
encoder_init_.Set();
return FakeEncoder::InitEncode(config, settings);
}
void SetRates(const RateControlParameters& parameters) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
bitrate_kbps_ = parameters.bitrate.get_sum_kbps();
bitrate_changed_.Set();
FakeEncoder::SetRates(parameters);
@@ -2324,7 +2333,7 @@ class StartStopBitrateObserver : public test::FakeEncoder {
do {
absl::optional<int> bitrate_kbps;
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
bitrate_kbps = bitrate_kbps_;
}
if (!bitrate_kbps)
@@ -2339,10 +2348,10 @@ class StartStopBitrateObserver : public test::FakeEncoder {
}
private:
- rtc::CriticalSection crit_;
+ Mutex mutex_;
rtc::Event encoder_init_;
rtc::Event bitrate_changed_;
- absl::optional<int> bitrate_kbps_ RTC_GUARDED_BY(crit_);
+ absl::optional<int> bitrate_kbps_ RTC_GUARDED_BY(mutex_);
};
// This test that if the encoder use an internal source, VideoEncoder::SetRates
@@ -2476,30 +2485,35 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
released_(false),
encoder_factory_(this) {}
- bool IsReleased() {
- rtc::CritScope lock(&crit_);
+ bool IsReleased() RTC_LOCKS_EXCLUDED(mutex_) {
+ MutexLock lock(&mutex_);
return released_;
}
- bool IsReadyForEncode() {
- rtc::CritScope lock(&crit_);
- return initialized_ && callback_registered_;
+ bool IsReadyForEncode() RTC_LOCKS_EXCLUDED(mutex_) {
+ MutexLock lock(&mutex_);
+ return IsReadyForEncodeLocked();
}
- size_t num_releases() {
- rtc::CritScope lock(&crit_);
+ size_t num_releases() RTC_LOCKS_EXCLUDED(mutex_) {
+ MutexLock lock(&mutex_);
return num_releases_;
}
private:
+ bool IsReadyForEncodeLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) {
+ return initialized_ && callback_registered_;
+ }
+
void SetFecControllerOverride(
FecControllerOverride* fec_controller_override) override {
// Ignored.
}
int32_t InitEncode(const VideoCodec* codecSettings,
- const Settings& settings) override {
- rtc::CritScope lock(&crit_);
+ const Settings& settings) override
+ RTC_LOCKS_EXCLUDED(mutex_) {
+ MutexLock lock(&mutex_);
EXPECT_FALSE(initialized_);
initialized_ = true;
released_ = false;
@@ -2515,16 +2529,16 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
}
int32_t RegisterEncodeCompleteCallback(
- EncodedImageCallback* callback) override {
- rtc::CritScope lock(&crit_);
+ EncodedImageCallback* callback) override RTC_LOCKS_EXCLUDED(mutex_) {
+ MutexLock lock(&mutex_);
EXPECT_TRUE(initialized_);
callback_registered_ = true;
return 0;
}
- int32_t Release() override {
- rtc::CritScope lock(&crit_);
- EXPECT_TRUE(IsReadyForEncode());
+ int32_t Release() override RTC_LOCKS_EXCLUDED(mutex_) {
+ MutexLock lock(&mutex_);
+ EXPECT_TRUE(IsReadyForEncodeLocked());
EXPECT_FALSE(released_);
initialized_ = false;
callback_registered_ = false;
@@ -2570,12 +2584,12 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
}
TaskQueueBase* const task_queue_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
VideoSendStream* stream_;
- bool initialized_ RTC_GUARDED_BY(crit_);
- bool callback_registered_ RTC_GUARDED_BY(crit_);
- size_t num_releases_ RTC_GUARDED_BY(crit_);
- bool released_ RTC_GUARDED_BY(crit_);
+ bool initialized_ RTC_GUARDED_BY(mutex_);
+ bool callback_registered_ RTC_GUARDED_BY(mutex_);
+ size_t num_releases_ RTC_GUARDED_BY(mutex_);
+ bool released_ RTC_GUARDED_BY(mutex_);
test::VideoEncoderProxyFactory encoder_factory_;
VideoEncoderConfig encoder_config_;
} test_encoder(task_queue());
@@ -2805,7 +2819,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
RtpPacket rtp_packet;
EXPECT_TRUE(rtp_packet.Parse(packet, length));
++rtp_packets_sent_;
@@ -2814,7 +2828,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
}
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
test::RtcpPacketParser parser;
EXPECT_TRUE(parser.Parse(packet, length));
@@ -2838,9 +2852,9 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
EXPECT_TRUE(Wait()) << "Timed out while waiting for RTCP sender report.";
}
- rtc::CriticalSection crit_;
- size_t rtp_packets_sent_ RTC_GUARDED_BY(&crit_);
- size_t media_bytes_sent_ RTC_GUARDED_BY(&crit_);
+ Mutex mutex_;
+ size_t rtp_packets_sent_ RTC_GUARDED_BY(&mutex_);
+ size_t media_bytes_sent_ RTC_GUARDED_BY(&mutex_);
} test;
RunBaseTest(&test);
@@ -2994,7 +3008,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
void SetRates(const RateControlParameters& parameters) override {
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (target_bitrate_ == parameters.bitrate.get_sum_kbps()) {
FakeEncoder::SetRates(parameters);
return;
@@ -3011,14 +3025,14 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
// until the correct value has been observed.
const int64_t start_time = rtc::TimeMillis();
do {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (target_bitrate_ == expected_bitrate) {
return;
}
} while (bitrate_changed_event_.Wait(
std::max(int64_t{1}, VideoSendStreamTest::kDefaultTimeoutMs -
(rtc::TimeMillis() - start_time))));
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
EXPECT_EQ(target_bitrate_, expected_bitrate)
<< "Timed out while waiting encoder rate to be set.";
}
@@ -3099,8 +3113,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
rtc::Event create_rate_allocator_event_;
rtc::Event init_encode_event_;
rtc::Event bitrate_changed_event_;
- rtc::CriticalSection crit_;
- uint32_t target_bitrate_ RTC_GUARDED_BY(&crit_);
+ Mutex mutex_;
+ uint32_t target_bitrate_ RTC_GUARDED_BY(&mutex_);
int num_rate_allocator_creations_;
int num_encoder_initializations_;
@@ -3148,7 +3162,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
encoded.SetSpatialIndex(i);
EncodedImageCallback* callback;
{
- rtc::CritScope cs(&crit_sect_);
+ MutexLock lock(&mutex_);
callback = callback_;
}
RTC_DCHECK(callback);
@@ -3251,7 +3265,7 @@ class Vp9HeaderObserver : public test::SendTest {
bool wait = Wait();
{
// In case of time out, OnSendRtp might still access frames_sent_;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
EXPECT_TRUE(wait) << "Test timed out waiting for VP9 packet, num frames "
<< frames_sent_;
}
@@ -3283,7 +3297,7 @@ class Vp9HeaderObserver : public test::SendTest {
++packets_sent_;
if (rtp_packet.Marker()) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
++frames_sent_;
}
last_packet_marker_ = rtp_packet.Marker();
@@ -3510,7 +3524,7 @@ class Vp9HeaderObserver : public test::SendTest {
uint32_t last_packet_timestamp_ = 0;
RTPVideoHeaderVP9 last_vp9_;
size_t packets_sent_;
- rtc::CriticalSection crit_;
+ Mutex mutex_;
size_t frames_sent_;
int expected_width_;
int expected_height_;
@@ -3801,7 +3815,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) {
first_packet_sent_(false) {}
void SetRates(const RateControlParameters& parameters) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// Wait for the first sent packet so that videosendstream knows
// rtp_overhead.
if (first_packet_sent_) {
@@ -3825,7 +3839,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) {
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
first_packet_sent_ = true;
return SEND_PACKET;
}
@@ -3850,7 +3864,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) {
EXPECT_TRUE(
bitrate_changed_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
EXPECT_LE(max_bitrate_bps_, 57760u);
}
}
@@ -3859,9 +3873,9 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) {
TaskQueueBase* const task_queue_;
test::VideoEncoderProxyFactory encoder_factory_;
Call* call_;
- rtc::CriticalSection crit_;
- uint32_t max_bitrate_bps_ RTC_GUARDED_BY(&crit_);
- bool first_packet_sent_ RTC_GUARDED_BY(&crit_);
+ Mutex mutex_;
+ uint32_t max_bitrate_bps_ RTC_GUARDED_BY(&mutex_);
+ bool first_packet_sent_ RTC_GUARDED_BY(&mutex_);
rtc::Event bitrate_changed_event_;
} test(task_queue());
RunBaseTest(&test);
@@ -3980,7 +3994,7 @@ class ContentSwitchTest : public test::SendTest {
void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
send_stream_ = send_stream;
}
@@ -4001,7 +4015,7 @@ class ContentSwitchTest : public test::SendTest {
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
auto internal_send_peer = test::VideoSendStreamPeer(send_stream_);
float pacing_factor =
@@ -4063,18 +4077,18 @@ class ContentSwitchTest : public test::SendTest {
private:
StreamState GetStreamState() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return state_;
}
- rtc::CriticalSection crit_;
+ Mutex mutex_;
rtc::Event content_switch_event_;
Call* call_;
- StreamState state_ RTC_GUARDED_BY(crit_);
- VideoSendStream* send_stream_ RTC_GUARDED_BY(crit_);
+ StreamState state_ RTC_GUARDED_BY(mutex_);
+ VideoSendStream* send_stream_ RTC_GUARDED_BY(mutex_);
VideoSendStream::Config send_stream_config_;
VideoEncoderConfig encoder_config_;
- uint32_t packets_sent_ RTC_GUARDED_BY(crit_);
+ uint32_t packets_sent_ RTC_GUARDED_BY(mutex_);
T* stream_resetter_;
};
diff --git a/video/video_source_sink_controller.cc b/video/video_source_sink_controller.cc
index a649adc68c..a5c0941e02 100644
--- a/video/video_source_sink_controller.cc
+++ b/video/video_source_sink_controller.cc
@@ -14,10 +14,28 @@
#include <limits>
#include <utility>
+#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/strings/string_builder.h"
namespace webrtc {
+namespace {
+
+std::string WantsToString(const rtc::VideoSinkWants& wants) {
+ rtc::StringBuilder ss;
+
+ ss << "max_fps=" << wants.max_framerate_fps
+ << " max_pixel_count=" << wants.max_pixel_count << " target_pixel_count="
+ << (wants.target_pixel_count.has_value()
+ ? std::to_string(wants.target_pixel_count.value())
+ : "null");
+
+ return ss.Release();
+}
+
+} // namespace
+
VideoSourceSinkController::VideoSourceSinkController(
rtc::VideoSinkInterface<VideoFrame>* sink,
rtc::VideoSourceInterface<VideoFrame>* source)
@@ -30,7 +48,7 @@ void VideoSourceSinkController::SetSource(
rtc::VideoSourceInterface<VideoFrame>* old_source;
rtc::VideoSinkWants wants;
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
old_source = source_;
source_ = source;
wants = CurrentSettingsToSinkWants();
@@ -43,69 +61,71 @@ void VideoSourceSinkController::SetSource(
}
void VideoSourceSinkController::PushSourceSinkSettings() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
if (!source_)
return;
- source_->AddOrUpdateSink(sink_, CurrentSettingsToSinkWants());
+ rtc::VideoSinkWants wants = CurrentSettingsToSinkWants();
+ RTC_LOG(INFO) << "Pushing SourceSink restrictions: " << WantsToString(wants);
+ source_->AddOrUpdateSink(sink_, wants);
}
VideoSourceRestrictions VideoSourceSinkController::restrictions() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return restrictions_;
}
absl::optional<size_t> VideoSourceSinkController::pixels_per_frame_upper_limit()
const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return pixels_per_frame_upper_limit_;
}
absl::optional<double> VideoSourceSinkController::frame_rate_upper_limit()
const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return frame_rate_upper_limit_;
}
bool VideoSourceSinkController::rotation_applied() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return rotation_applied_;
}
int VideoSourceSinkController::resolution_alignment() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return resolution_alignment_;
}
void VideoSourceSinkController::SetRestrictions(
VideoSourceRestrictions restrictions) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
restrictions_ = std::move(restrictions);
}
void VideoSourceSinkController::SetPixelsPerFrameUpperLimit(
absl::optional<size_t> pixels_per_frame_upper_limit) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
pixels_per_frame_upper_limit_ = std::move(pixels_per_frame_upper_limit);
}
void VideoSourceSinkController::SetFrameRateUpperLimit(
absl::optional<double> frame_rate_upper_limit) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
frame_rate_upper_limit_ = std::move(frame_rate_upper_limit);
}
void VideoSourceSinkController::SetRotationApplied(bool rotation_applied) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
rotation_applied_ = rotation_applied;
}
void VideoSourceSinkController::SetResolutionAlignment(
int resolution_alignment) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
resolution_alignment_ = resolution_alignment;
}
-// RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_)
+// RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_)
rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants()
const {
rtc::VideoSinkWants wants;
diff --git a/video/video_source_sink_controller.h b/video/video_source_sink_controller.h
index 4811b2866e..877cf85901 100644
--- a/video/video_source_sink_controller.h
+++ b/video/video_source_sink_controller.h
@@ -11,12 +11,14 @@
#ifndef VIDEO_VIDEO_SOURCE_SINK_CONTROLLER_H_
#define VIDEO_VIDEO_SOURCE_SINK_CONTROLLER_H_
+#include <string>
+
#include "absl/types/optional.h"
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
#include "api/video/video_source_interface.h"
#include "call/adaptation/video_source_restrictions.h"
-#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -51,23 +53,20 @@ class VideoSourceSinkController {
private:
rtc::VideoSinkWants CurrentSettingsToSinkWants() const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- // TODO(hbos): If everything is handled on the same sequence (i.e.
- // VideoStreamEncoder's encoder queue) then |crit_| can be replaced by
- // sequence checker. Investigate if we want to do this.
- mutable rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
rtc::VideoSinkInterface<VideoFrame>* const sink_;
- rtc::VideoSourceInterface<VideoFrame>* source_ RTC_GUARDED_BY(&crit_);
+ rtc::VideoSourceInterface<VideoFrame>* source_ RTC_GUARDED_BY(&mutex_);
// Pixel and frame rate restrictions.
- VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&crit_);
+ VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&mutex_);
// Ensures that even if we are not restricted, the sink is never configured
// above this limit. Example: We are not CPU limited (no |restrictions_|) but
// our encoder is capped at 30 fps (= |frame_rate_upper_limit_|).
- absl::optional<size_t> pixels_per_frame_upper_limit_ RTC_GUARDED_BY(&crit_);
- absl::optional<double> frame_rate_upper_limit_ RTC_GUARDED_BY(&crit_);
- bool rotation_applied_ RTC_GUARDED_BY(&crit_) = false;
- int resolution_alignment_ RTC_GUARDED_BY(&crit_) = 1;
+ absl::optional<size_t> pixels_per_frame_upper_limit_ RTC_GUARDED_BY(&mutex_);
+ absl::optional<double> frame_rate_upper_limit_ RTC_GUARDED_BY(&mutex_);
+ bool rotation_applied_ RTC_GUARDED_BY(&mutex_) = false;
+ int resolution_alignment_ RTC_GUARDED_BY(&mutex_) = 1;
};
} // namespace webrtc
diff --git a/video/video_source_sink_controller_unittest.cc b/video/video_source_sink_controller_unittest.cc
index c4e2ea11d2..66881cd023 100644
--- a/video/video_source_sink_controller_unittest.cc
+++ b/video/video_source_sink_controller_unittest.cc
@@ -30,8 +30,8 @@ class MockVideoSinkWithVideoFrame : public rtc::VideoSinkInterface<VideoFrame> {
public:
~MockVideoSinkWithVideoFrame() override {}
- MOCK_METHOD1(OnFrame, void(const VideoFrame& frame));
- MOCK_METHOD0(OnDiscardedFrame, void());
+ MOCK_METHOD(void, OnFrame, (const VideoFrame& frame), (override));
+ MOCK_METHOD(void, OnDiscardedFrame, (), (override));
};
class MockVideoSourceWithVideoFrame
@@ -39,10 +39,15 @@ class MockVideoSourceWithVideoFrame
public:
~MockVideoSourceWithVideoFrame() override {}
- MOCK_METHOD2(AddOrUpdateSink,
- void(rtc::VideoSinkInterface<VideoFrame>*,
- const rtc::VideoSinkWants&));
- MOCK_METHOD1(RemoveSink, void(rtc::VideoSinkInterface<VideoFrame>*));
+ MOCK_METHOD(void,
+ AddOrUpdateSink,
+ (rtc::VideoSinkInterface<VideoFrame>*,
+ const rtc::VideoSinkWants&),
+ (override));
+ MOCK_METHOD(void,
+ RemoveSink,
+ (rtc::VideoSinkInterface<VideoFrame>*),
+ (override));
};
} // namespace
diff --git a/video/video_stream_decoder.h b/video/video_stream_decoder.h
index 6b040c6a6f..bfe9252976 100644
--- a/video/video_stream_decoder.h
+++ b/video/video_stream_decoder.h
@@ -20,8 +20,8 @@
#include "api/video/video_sink_interface.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/video_coding/include/video_coding_defines.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -50,7 +50,7 @@ class VideoStreamDecoder : public VCMReceiveCallback {
private:
// Used for all registered callbacks except rendering.
- rtc::CriticalSection crit_;
+ Mutex mutex_;
VideoReceiver2* const video_receiver_;
diff --git a/video/video_stream_decoder2.h b/video/video_stream_decoder2.h
index 04f98bc044..a301d32107 100644
--- a/video/video_stream_decoder2.h
+++ b/video/video_stream_decoder2.h
@@ -20,7 +20,6 @@
#include "api/video/video_sink_interface.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/video_coding/include/video_coding_defines.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/platform_thread.h"
namespace webrtc {
diff --git a/video/video_stream_decoder_impl.cc b/video/video_stream_decoder_impl.cc
index 1e11d38050..02ba45e253 100644
--- a/video/video_stream_decoder_impl.cc
+++ b/video/video_stream_decoder_impl.cc
@@ -47,7 +47,7 @@ VideoStreamDecoderImpl::VideoStreamDecoderImpl(
}
VideoStreamDecoderImpl::~VideoStreamDecoderImpl() {
- rtc::CritScope lock(&shut_down_crit_);
+ MutexLock lock(&shut_down_mutex_);
shut_down_ = true;
}
@@ -157,7 +157,7 @@ void VideoStreamDecoderImpl::OnNextFrameCallback(
RTC_DCHECK(frame);
SaveFrameTimestamps(*frame);
- rtc::CritScope lock(&shut_down_crit_);
+ MutexLock lock(&shut_down_mutex_);
if (shut_down_) {
return;
}
diff --git a/video/video_stream_decoder_impl.h b/video/video_stream_decoder_impl.h
index f3f09e4a79..2f33e9d349 100644
--- a/video/video_stream_decoder_impl.h
+++ b/video/video_stream_decoder_impl.h
@@ -20,6 +20,7 @@
#include "modules/video_coding/frame_buffer2.h"
#include "modules/video_coding/timing.h"
#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_checker.h"
#include "system_wrappers/include/clock.h"
@@ -112,8 +113,8 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface {
// safe for the |decode_queue_| to be destructed. After that the |decoder_|
// can be destructed, and then the |bookkeeping_queue_|. Finally the
// |frame_buffer_| can be destructed.
- rtc::CriticalSection shut_down_crit_;
- bool shut_down_ RTC_GUARDED_BY(shut_down_crit_);
+ Mutex shut_down_mutex_;
+ bool shut_down_ RTC_GUARDED_BY(shut_down_mutex_);
video_coding::FrameBuffer frame_buffer_ RTC_GUARDED_BY(bookkeeping_queue_);
rtc::TaskQueue bookkeeping_queue_;
std::unique_ptr<VideoDecoder> decoder_ RTC_GUARDED_BY(decode_queue_);
diff --git a/video/video_stream_decoder_impl_unittest.cc b/video/video_stream_decoder_impl_unittest.cc
index 44e914001d..a45a12ccae 100644
--- a/video/video_stream_decoder_impl_unittest.cc
+++ b/video/video_stream_decoder_impl_unittest.cc
@@ -27,21 +27,25 @@ using ::testing::Return;
class MockVideoStreamDecoderCallbacks
: public VideoStreamDecoderInterface::Callbacks {
public:
- MOCK_METHOD0(OnNonDecodableState, void());
- MOCK_METHOD1(OnContinuousUntil,
- void(const video_coding::VideoLayerFrameId& key));
- MOCK_METHOD1(OnEncodedFrame, void(const video_coding::EncodedFrame& frame));
- MOCK_METHOD3(OnDecodedFrame,
- void(VideoFrame decodedImage,
- absl::optional<int> decode_time_ms,
- absl::optional<int> qp));
+ MOCK_METHOD(void, OnNonDecodableState, (), (override));
+ MOCK_METHOD(void,
+ OnContinuousUntil,
+ (const video_coding::VideoLayerFrameId& key),
+ (override));
+ MOCK_METHOD(void,
+ OnDecodedFrame,
+ (VideoFrame decodedImage,
+ absl::optional<int> decode_time_ms,
+ absl::optional<int> qp),
+ (override));
};
class StubVideoDecoder : public VideoDecoder {
public:
- MOCK_METHOD2(InitDecode,
- int32_t(const VideoCodec* codec_settings,
- int32_t number_of_cores));
+ MOCK_METHOD(int32_t,
+ InitDecode,
+ (const VideoCodec*, int32_t number_of_cores),
+ (override));
int32_t Decode(const EncodedImage& input_image,
bool missing_frames,
@@ -57,10 +61,12 @@ class StubVideoDecoder : public VideoDecoder {
return ret_code;
}
- MOCK_METHOD3(DecodeCall,
- int32_t(const EncodedImage& input_image,
- bool missing_frames,
- int64_t render_time_ms));
+ MOCK_METHOD(int32_t,
+ DecodeCall,
+ (const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms),
+ ());
int32_t Release() override { return 0; }
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index c69cf1e731..c0fcd1f7c3 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -19,6 +19,8 @@
#include "absl/algorithm/container.h"
#include "absl/types/optional.h"
+#include "api/task_queue/queued_task.h"
+#include "api/task_queue/task_queue_base.h"
#include "api/video/encoded_image.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_adaptation_reason.h"
@@ -26,15 +28,20 @@
#include "api/video/video_codec_constants.h"
#include "api/video_codecs/video_encoder.h"
#include "call/adaptation/resource_adaptation_processor.h"
+#include "call/adaptation/video_stream_adapter.h"
#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h"
#include "modules/video_coding/include/video_codec_initializer.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
+#include "rtc_base/constructor_magic.h"
#include "rtc_base/experiments/alr_experiment.h"
#include "rtc_base/experiments/rate_control_settings.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/thread_annotations.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/trace_event.h"
#include "system_wrappers/include/field_trial.h"
@@ -201,6 +208,82 @@ bool VideoStreamEncoder::EncoderRateSettings::operator!=(
return !(*this == rhs);
}
+class VideoStreamEncoder::DegradationPreferenceManager
+ : public DegradationPreferenceProvider {
+ public:
+ DegradationPreferenceManager()
+ : degradation_preference_(DegradationPreference::DISABLED),
+ is_screenshare_(false),
+ effective_degradation_preference_(DegradationPreference::DISABLED) {}
+
+ ~DegradationPreferenceManager() override {
+ RTC_DCHECK(!video_stream_adapter_);
+ }
+
+ DegradationPreference degradation_preference() const override {
+ MutexLock lock(&lock_);
+ return effective_degradation_preference_;
+ }
+
+ void SetDegradationPreference(DegradationPreference degradation_preference) {
+ MutexLock lock(&lock_);
+ degradation_preference_ = degradation_preference;
+ MaybeUpdateEffectiveDegradationPreference();
+ }
+
+ void SetIsScreenshare(bool is_screenshare) {
+ MutexLock lock(&lock_);
+ is_screenshare_ = is_screenshare;
+ MaybeUpdateEffectiveDegradationPreference();
+ }
+
+ void SetVideoStreamAdapterQueue(
+ TaskQueueBase* video_stream_adapter_task_queue) {
+ RTC_DCHECK(!video_stream_adapter_task_queue_);
+ RTC_DCHECK(video_stream_adapter_task_queue);
+ RTC_DCHECK_RUN_ON(video_stream_adapter_task_queue);
+ video_stream_adapter_task_queue_ = video_stream_adapter_task_queue;
+ }
+
+ void SetVideoStreamAdapter(VideoStreamAdapter* video_stream_adapter) {
+ RTC_DCHECK_RUN_ON(video_stream_adapter_task_queue_);
+ video_stream_adapter_ = video_stream_adapter;
+ }
+
+ private:
+ void MaybeUpdateEffectiveDegradationPreference()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_) {
+ DegradationPreference effective_degradation_preference =
+ (is_screenshare_ &&
+ degradation_preference_ == DegradationPreference::BALANCED)
+ ? DegradationPreference::MAINTAIN_RESOLUTION
+ : degradation_preference_;
+
+ if (effective_degradation_preference != effective_degradation_preference_) {
+ effective_degradation_preference_ = effective_degradation_preference;
+ if (video_stream_adapter_task_queue_) {
+ video_stream_adapter_task_queue_->PostTask(
+ ToQueuedTask([this, effective_degradation_preference]() {
+ RTC_DCHECK_RUN_ON(video_stream_adapter_task_queue_);
+ if (video_stream_adapter_) {
+ video_stream_adapter_->SetDegradationPreference(
+ effective_degradation_preference);
+ }
+ }));
+ }
+ }
+ }
+
+ mutable Mutex lock_;
+ DegradationPreference degradation_preference_ RTC_GUARDED_BY(&lock_);
+ bool is_screenshare_ RTC_GUARDED_BY(&lock_);
+ DegradationPreference effective_degradation_preference_
+ RTC_GUARDED_BY(&lock_);
+ TaskQueueBase* video_stream_adapter_task_queue_ = nullptr;
+ VideoStreamAdapter* video_stream_adapter_
+ RTC_GUARDED_BY(&video_stream_adapter_task_queue_);
+};
+
VideoStreamEncoder::VideoStreamEncoder(
Clock* clock,
uint32_t number_of_cores,
@@ -251,40 +334,70 @@ VideoStreamEncoder::VideoStreamEncoder(
next_frame_types_(1, VideoFrameType::kVideoFrameDelta),
frame_encode_metadata_writer_(this),
experiment_groups_(GetExperimentGroups()),
- next_frame_id_(0),
encoder_switch_experiment_(ParseEncoderSwitchFieldTrial()),
automatic_animation_detection_experiment_(
ParseAutomatincAnimationDetectionFieldTrial()),
encoder_switch_requested_(false),
input_state_provider_(encoder_stats_observer),
+ video_stream_adapter_(
+ std::make_unique<VideoStreamAdapter>(&input_state_provider_)),
resource_adaptation_processor_(
std::make_unique<ResourceAdaptationProcessor>(
- &input_state_provider_,
- encoder_stats_observer)),
+ encoder_stats_observer,
+ video_stream_adapter_.get())),
+ degradation_preference_manager_(
+ std::make_unique<DegradationPreferenceManager>()),
+ adaptation_constraints_(),
stream_resource_manager_(&input_state_provider_,
- resource_adaptation_processor_.get(),
encoder_stats_observer,
clock_,
settings_.experiment_cpu_load_estimator,
- std::move(overuse_detector)),
+ std::move(overuse_detector),
+ degradation_preference_manager_.get()),
video_source_sink_controller_(/*sink=*/this,
/*source=*/nullptr),
+ resource_adaptation_queue_(task_queue_factory->CreateTaskQueue(
+ "ResourceAdaptationQueue",
+ TaskQueueFactory::Priority::NORMAL)),
encoder_queue_(task_queue_factory->CreateTaskQueue(
"EncoderQueue",
TaskQueueFactory::Priority::NORMAL)) {
RTC_DCHECK(encoder_stats_observer);
RTC_DCHECK_GE(number_of_cores, 1);
- resource_adaptation_processor_->AddAdaptationListener(
- &stream_resource_manager_);
- resource_adaptation_processor_->AddAdaptationListener(this);
-
- // Add the stream resource manager's resources to the processor.
- for (Resource* resource : stream_resource_manager_.MappedResources())
- resource_adaptation_processor_->AddResource(resource);
-
- for (auto& state : encoder_buffer_state_)
- state.fill(std::numeric_limits<int64_t>::max());
+ stream_resource_manager_.Initialize(&encoder_queue_,
+ &resource_adaptation_queue_);
+
+ rtc::Event initialize_processor_event;
+ resource_adaptation_queue_.PostTask([this, &initialize_processor_event] {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ resource_adaptation_processor_->SetResourceAdaptationQueue(
+ resource_adaptation_queue_.Get());
+ stream_resource_manager_.SetAdaptationProcessor(
+ resource_adaptation_processor_.get(), video_stream_adapter_.get());
+ resource_adaptation_processor_->AddResourceLimitationsListener(
+ &stream_resource_manager_);
+ video_stream_adapter_->AddRestrictionsListener(&stream_resource_manager_);
+ video_stream_adapter_->AddRestrictionsListener(this);
+ degradation_preference_manager_->SetVideoStreamAdapterQueue(
+ resource_adaptation_queue_.Get());
+ degradation_preference_manager_->SetVideoStreamAdapter(
+ video_stream_adapter_.get());
+
+ // Add the stream resource manager's resources to the processor.
+ adaptation_constraints_ = stream_resource_manager_.AdaptationConstraints();
+ for (auto& resource : stream_resource_manager_.MappedResources()) {
+ resource_adaptation_processor_->AddResource(resource);
+ }
+ for (auto* constraint : adaptation_constraints_) {
+ video_stream_adapter_->AddAdaptationConstraint(constraint);
+ }
+ for (auto* listener : stream_resource_manager_.AdaptationListeners()) {
+ video_stream_adapter_->AddAdaptationListener(listener);
+ }
+ initialize_processor_event.Set();
+ });
+ initialize_processor_event.Wait(rtc::Event::kForever);
}
VideoStreamEncoder::~VideoStreamEncoder() {
@@ -296,16 +409,46 @@ VideoStreamEncoder::~VideoStreamEncoder() {
void VideoStreamEncoder::Stop() {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_source_sink_controller_.SetSource(nullptr);
+
+ if (resource_adaptation_processor_) {
+ for (auto& resource : stream_resource_manager_.MappedResources()) {
+ resource_adaptation_processor_->RemoveResource(resource);
+ }
+ }
+ rtc::Event shutdown_adaptation_processor_event;
+ resource_adaptation_queue_.PostTask([this,
+ &shutdown_adaptation_processor_event] {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ if (resource_adaptation_processor_) {
+ // Removed on the resource_adaptaiton_processor_ queue because the
+ // adaptation_constraints_ and adaptation_listeners_ fields are guarded by
+ // this queue.
+ for (auto* constraint : adaptation_constraints_) {
+ video_stream_adapter_->RemoveAdaptationConstraint(constraint);
+ }
+ for (auto* listener : stream_resource_manager_.AdaptationListeners()) {
+ video_stream_adapter_->RemoveAdaptationListener(listener);
+ }
+ video_stream_adapter_->RemoveRestrictionsListener(this);
+ video_stream_adapter_->RemoveRestrictionsListener(
+ &stream_resource_manager_);
+ resource_adaptation_processor_->RemoveResourceLimitationsListener(
+ &stream_resource_manager_);
+ stream_resource_manager_.SetAdaptationProcessor(nullptr, nullptr);
+ degradation_preference_manager_->SetVideoStreamAdapter(nullptr);
+ resource_adaptation_processor_.reset();
+ }
+ shutdown_adaptation_processor_event.Set();
+ });
+ shutdown_adaptation_processor_event.Wait(rtc::Event::kForever);
encoder_queue_.PostTask([this] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
stream_resource_manager_.StopManagedResources();
- resource_adaptation_processor_->StopResourceAdaptation();
rate_allocator_ = nullptr;
bitrate_observer_ = nullptr;
ReleaseEncoder();
shutdown_event_.Set();
});
-
shutdown_event_.Wait(rtc::Event::kForever);
}
@@ -331,19 +474,40 @@ void VideoStreamEncoder::SetFecControllerOverride(
});
}
+void VideoStreamEncoder::AddAdaptationResource(
+ rtc::scoped_refptr<Resource> resource) {
+ // Map any externally added resources as kCpu for the sake of stats reporting.
+ // TODO(hbos): Make the manager map any unknown resources to kCpu and get rid
+ // of this MapResourceToReason() call.
+ rtc::Event map_resource_event;
+ encoder_queue_.PostTask([this, resource, &map_resource_event] {
+ RTC_DCHECK_RUN_ON(&encoder_queue_);
+ stream_resource_manager_.MapResourceToReason(resource,
+ VideoAdaptationReason::kCpu);
+ resource_adaptation_processor_->AddResource(resource);
+ map_resource_event.Set();
+ });
+ map_resource_event.Wait(rtc::Event::kForever);
+}
+
+std::vector<rtc::scoped_refptr<Resource>>
+VideoStreamEncoder::GetAdaptationResources() {
+ return resource_adaptation_processor_->GetResources();
+}
+
void VideoStreamEncoder::SetSource(
rtc::VideoSourceInterface<VideoFrame>* source,
const DegradationPreference& degradation_preference) {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_source_sink_controller_.SetSource(source);
- encoder_queue_.PostTask([this, source, degradation_preference] {
+ input_state_provider_.OnHasInputChanged(source);
+
+ degradation_preference_manager_->SetDegradationPreference(
+ degradation_preference);
+ // This may trigger reconfiguring the QualityScaler on the encoder queue.
+ encoder_queue_.PostTask([this, degradation_preference] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
- input_state_provider_.OnHasInputChanged(source);
- resource_adaptation_processor_->SetDegradationPreference(
- degradation_preference);
- stream_resource_manager_.SetDegradationPreferences(
- resource_adaptation_processor_->degradation_preference(),
- resource_adaptation_processor_->effective_degradation_preference());
+ stream_resource_manager_.SetDegradationPreferences(degradation_preference);
if (encoder_) {
stream_resource_manager_.ConfigureQualityScaler(
encoder_->GetEncoderInfo());
@@ -534,6 +698,13 @@ void VideoStreamEncoder::ReconfigureEncoder() {
RTC_LOG(LS_ERROR) << "Failed to create encoder configuration.";
}
+ if (encoder_config_.codec_type == kVideoCodecVP9) {
+ // Spatial layers configuration might impose some parity restrictions,
+ // thus some cropping might be needed.
+ crop_width_ = last_frame_info_->width - codec.width;
+ crop_height_ = last_frame_info_->height - codec.height;
+ }
+
char log_stream_buf[4 * 1024];
rtc::SimpleStringBuilder log_stream(log_stream_buf);
log_stream << "ReconfigureEncoder:\n";
@@ -670,10 +841,12 @@ void VideoStreamEncoder::ReconfigureEncoder() {
}
if (pending_encoder_creation_) {
+ // TODO(hbos): Stopping and restarting for backwards compatibility reasons.
+ // We may be able to change this to "EnsureStarted()" if it took care of
+ // reconfiguring the QualityScaler as well. (ConfigureQualityScaler() is
+ // invoked later in this method.)
stream_resource_manager_.StopManagedResources();
- resource_adaptation_processor_->StopResourceAdaptation();
stream_resource_manager_.StartEncodeUsageResource();
- resource_adaptation_processor_->StartResourceAdaptation();
pending_encoder_creation_ = false;
}
@@ -752,13 +925,11 @@ void VideoStreamEncoder::ReconfigureEncoder() {
void VideoStreamEncoder::OnEncoderSettingsChanged() {
EncoderSettings encoder_settings(encoder_->GetEncoderInfo(),
encoder_config_.Copy(), send_codec_);
- resource_adaptation_processor_->SetIsScreenshare(
- encoder_config_.content_type == VideoEncoderConfig::ContentType::kScreen);
- stream_resource_manager_.SetDegradationPreferences(
- resource_adaptation_processor_->degradation_preference(),
- resource_adaptation_processor_->effective_degradation_preference());
- input_state_provider_.OnEncoderSettingsChanged(encoder_settings);
stream_resource_manager_.SetEncoderSettings(encoder_settings);
+ input_state_provider_.OnEncoderSettingsChanged(encoder_settings);
+ bool is_screenshare = encoder_settings.encoder_config().content_type ==
+ VideoEncoderConfig::ContentType::kScreen;
+ degradation_preference_manager_->SetIsScreenshare(is_screenshare);
}
void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) {
@@ -1442,48 +1613,8 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
simulcast_id = encoded_image.SpatialIndex().value_or(0);
}
- std::unique_ptr<CodecSpecificInfo> codec_info_copy;
- {
- rtc::CritScope cs(&encoded_image_lock_);
-
- if (codec_specific_info && codec_specific_info->generic_frame_info) {
- codec_info_copy =
- std::make_unique<CodecSpecificInfo>(*codec_specific_info);
- GenericFrameInfo& generic_info = *codec_info_copy->generic_frame_info;
- generic_info.frame_id = next_frame_id_++;
-
- if (encoder_buffer_state_.size() <= static_cast<size_t>(simulcast_id)) {
- RTC_LOG(LS_ERROR) << "At most " << encoder_buffer_state_.size()
- << " simulcast streams supported.";
- } else {
- std::array<int64_t, kMaxEncoderBuffers>& state =
- encoder_buffer_state_[simulcast_id];
- for (const CodecBufferUsage& buffer : generic_info.encoder_buffers) {
- if (state.size() <= static_cast<size_t>(buffer.id)) {
- RTC_LOG(LS_ERROR)
- << "At most " << state.size() << " encoder buffers supported.";
- break;
- }
-
- if (buffer.referenced) {
- int64_t diff = generic_info.frame_id - state[buffer.id];
- if (diff <= 0) {
- RTC_LOG(LS_ERROR) << "Invalid frame diff: " << diff << ".";
- } else if (absl::c_find(generic_info.frame_diffs, diff) ==
- generic_info.frame_diffs.end()) {
- generic_info.frame_diffs.push_back(diff);
- }
- }
-
- if (buffer.updated)
- state[buffer.id] = generic_info.frame_id;
- }
- }
- }
- }
-
EncodedImageCallback::Result result = sink_->OnEncodedImage(
- image_copy, codec_info_copy ? codec_info_copy.get() : codec_specific_info,
+ image_copy, codec_specific_info,
fragmentation_copy ? fragmentation_copy.get() : fragmentation);
// We are only interested in propagating the meta-data about the image, not
@@ -1655,7 +1786,8 @@ bool VideoStreamEncoder::DropDueToSize(uint32_t pixel_count) const {
bool simulcast_or_svc =
(send_codec_.codecType == VideoCodecType::kVideoCodecVP9 &&
send_codec_.VP9().numberOfSpatialLayers > 1) ||
- send_codec_.numberOfSimulcastStreams > 1;
+ send_codec_.numberOfSimulcastStreams > 1 ||
+ encoder_config_.simulcast_layers.size() > 1;
if (simulcast_or_svc || !stream_resource_manager_.DropInitialFrames() ||
!encoder_target_bitrate_bps_.has_value()) {
@@ -1683,8 +1815,12 @@ bool VideoStreamEncoder::DropDueToSize(uint32_t pixel_count) const {
void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated(
VideoSourceRestrictions restrictions,
const VideoAdaptationCounters& adaptation_counters,
- const Resource* reason) {
- RTC_DCHECK_RUN_ON(&encoder_queue_);
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ std::string resource_name = reason ? reason->Name() : "<null>";
+ RTC_LOG(INFO) << "Updating sink restrictions from " << resource_name << " to "
+ << restrictions.ToString();
video_source_sink_controller_.SetRestrictions(std::move(restrictions));
video_source_sink_controller_.PushSourceSinkSettings();
}
@@ -1887,7 +2023,7 @@ void VideoStreamEncoder::CheckForAnimatedContent(
if (!automatic_animation_detection_experiment_.enabled ||
encoder_config_.content_type !=
VideoEncoderConfig::ContentType::kScreen ||
- resource_adaptation_processor_->degradation_preference() !=
+ stream_resource_manager_.degradation_preference() !=
DegradationPreference::BALANCED) {
return;
}
@@ -1952,16 +2088,63 @@ void VideoStreamEncoder::CheckForAnimatedContent(
}
}
void VideoStreamEncoder::InjectAdaptationResource(
- Resource* resource,
+ rtc::scoped_refptr<Resource> resource,
VideoAdaptationReason reason) {
- stream_resource_manager_.MapResourceToReason(resource, reason);
- resource_adaptation_processor_->AddResource(resource);
+ rtc::Event map_resource_event;
+ encoder_queue_.PostTask([this, resource, reason, &map_resource_event] {
+ RTC_DCHECK_RUN_ON(&encoder_queue_);
+ stream_resource_manager_.MapResourceToReason(resource, reason);
+ resource_adaptation_processor_->AddResource(resource);
+ map_resource_event.Set();
+ });
+ map_resource_event.Wait(rtc::Event::kForever);
+}
+
+void VideoStreamEncoder::InjectAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) {
+ rtc::Event event;
+ resource_adaptation_queue_.PostTask([this, adaptation_constraint, &event] {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ if (!resource_adaptation_processor_) {
+ // The VideoStreamEncoder was stopped and the processor destroyed before
+ // this task had a chance to execute. No action needed.
+ return;
+ }
+ adaptation_constraints_.push_back(adaptation_constraint);
+ video_stream_adapter_->AddAdaptationConstraint(adaptation_constraint);
+ event.Set();
+ });
+ event.Wait(rtc::Event::kForever);
}
-QualityScalerResource*
+rtc::scoped_refptr<QualityScalerResource>
VideoStreamEncoder::quality_scaler_resource_for_testing() {
RTC_DCHECK_RUN_ON(&encoder_queue_);
return stream_resource_manager_.quality_scaler_resource_for_testing();
}
+void VideoStreamEncoder::AddRestrictionsListenerForTesting(
+ VideoSourceRestrictionsListener* restrictions_listener) {
+ rtc::Event event;
+ resource_adaptation_queue_.PostTask([this, restrictions_listener, &event] {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ RTC_DCHECK(resource_adaptation_processor_);
+ video_stream_adapter_->AddRestrictionsListener(restrictions_listener);
+ event.Set();
+ });
+ event.Wait(rtc::Event::kForever);
+}
+
+void VideoStreamEncoder::RemoveRestrictionsListenerForTesting(
+ VideoSourceRestrictionsListener* restrictions_listener) {
+ rtc::Event event;
+ resource_adaptation_queue_.PostTask([this, restrictions_listener, &event] {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ RTC_DCHECK(resource_adaptation_processor_);
+ video_stream_adapter_->RemoveRestrictionsListener(restrictions_listener);
+ event.Set();
+ });
+ event.Wait(rtc::Event::kForever);
+}
+
} // namespace webrtc
diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h
index a4a055dbd3..43122e95a9 100644
--- a/video/video_stream_encoder.h
+++ b/video/video_stream_encoder.h
@@ -17,6 +17,7 @@
#include <string>
#include <vector>
+#include "api/adaptation/resource.h"
#include "api/units/data_rate.h"
#include "api/video/video_bitrate_allocator.h"
#include "api/video/video_rotation.h"
@@ -26,11 +27,13 @@
#include "api/video/video_stream_encoder_settings.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
+#include "call/adaptation/adaptation_constraint.h"
+#include "call/adaptation/adaptation_listener.h"
+#include "call/adaptation/resource_adaptation_processor.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
#include "call/adaptation/video_source_restrictions.h"
#include "call/adaptation/video_stream_input_state_provider.h"
#include "modules/video_coding/utility/frame_dropper.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/experiments/rate_control_settings.h"
#include "rtc_base/numerics/exp_filter.h"
@@ -38,6 +41,7 @@
#include "rtc_base/rate_statistics.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
+#include "rtc_base/thread_checker.h"
#include "system_wrappers/include/clock.h"
#include "video/adaptation/video_stream_encoder_resource_manager.h"
#include "video/encoder_bitrate_adjuster.h"
@@ -56,7 +60,7 @@ namespace webrtc {
// Call Stop() when done.
class VideoStreamEncoder : public VideoStreamEncoderInterface,
private EncodedImageCallback,
- public ResourceAdaptationProcessorListener {
+ public VideoSourceRestrictionsListener {
public:
VideoStreamEncoder(Clock* clock,
uint32_t number_of_cores,
@@ -66,6 +70,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
TaskQueueFactory* task_queue_factory);
~VideoStreamEncoder() override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+ std::vector<rtc::scoped_refptr<Resource>> GetAdaptationResources() override;
+
void SetSource(rtc::VideoSourceInterface<VideoFrame>* source,
const DegradationPreference& degradation_preference) override;
@@ -106,19 +113,29 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
// Used for testing. For example the |ScalingObserverInterface| methods must
// be called on |encoder_queue_|.
rtc::TaskQueue* encoder_queue() { return &encoder_queue_; }
+ rtc::TaskQueue* resource_adaptation_queue() {
+ return &resource_adaptation_queue_;
+ }
void OnVideoSourceRestrictionsUpdated(
VideoSourceRestrictions restrictions,
const VideoAdaptationCounters& adaptation_counters,
- const Resource* reason) override;
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) override;
// Used for injected test resources.
// TODO(eshr): Move all adaptation tests out of VideoStreamEncoder tests.
- void InjectAdaptationResource(Resource* resource,
- VideoAdaptationReason reason)
- RTC_RUN_ON(&encoder_queue_);
+ void InjectAdaptationResource(rtc::scoped_refptr<Resource> resource,
+ VideoAdaptationReason reason);
+ void InjectAdaptationConstraint(AdaptationConstraint* adaptation_constraint);
+
+ rtc::scoped_refptr<QualityScalerResource>
+ quality_scaler_resource_for_testing();
- QualityScalerResource* quality_scaler_resource_for_testing();
+ void AddRestrictionsListenerForTesting(
+ VideoSourceRestrictionsListener* restrictions_listener);
+ void RemoveRestrictionsListenerForTesting(
+ VideoSourceRestrictionsListener* restrictions_listener);
private:
class VideoFrameInfo {
@@ -152,6 +169,8 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
DataRate stable_encoder_target;
};
+ class DegradationPreferenceManager;
+
void ReconfigureEncoder() RTC_RUN_ON(&encoder_queue_);
void OnEncoderSettingsChanged() RTC_RUN_ON(&encoder_queue_);
@@ -332,17 +351,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
// experiment group numbers incremented by 1.
const std::array<uint8_t, 2> experiment_groups_;
- // TODO(philipel): Remove this lock and run on |encoder_queue_| instead.
- rtc::CriticalSection encoded_image_lock_;
-
- int64_t next_frame_id_ RTC_GUARDED_BY(encoded_image_lock_);
-
- // This array is used as a map from simulcast id to an encoder's buffer
- // state. For every buffer of the encoder we keep track of the last frame id
- // that updated that buffer.
- std::array<std::array<int64_t, kMaxEncoderBuffers>, kMaxSimulcastStreams>
- encoder_buffer_state_ RTC_GUARDED_BY(encoded_image_lock_);
-
struct EncoderSwitchExperiment {
struct Thresholds {
absl::optional<DataRate> bitrate;
@@ -400,31 +408,40 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
bool encoder_switch_requested_ RTC_GUARDED_BY(&encoder_queue_);
// Provies video stream input states: current resolution and frame rate.
- VideoStreamInputStateProvider input_state_provider_
- RTC_GUARDED_BY(&encoder_queue_);
+ // This class is thread-safe.
+ VideoStreamInputStateProvider input_state_provider_;
+
+ std::unique_ptr<VideoStreamAdapter> video_stream_adapter_
+ RTC_GUARDED_BY(&resource_adaptation_queue_);
// Responsible for adapting input resolution or frame rate to ensure resources
// (e.g. CPU or bandwidth) are not overused.
+ // Adding resources can occur on any thread, but all other methods need to be
+ // called on the adaptation thread.
std::unique_ptr<ResourceAdaptationProcessorInterface>
- resource_adaptation_processor_ RTC_GUARDED_BY(&encoder_queue_);
+ resource_adaptation_processor_;
+ std::unique_ptr<DegradationPreferenceManager> degradation_preference_manager_;
+ std::vector<AdaptationConstraint*> adaptation_constraints_
+ RTC_GUARDED_BY(&resource_adaptation_queue_);
// Handles input, output and stats reporting related to VideoStreamEncoder
// specific resources, such as "encode usage percent" measurements and "QP
// scaling". Also involved with various mitigations such as inital frame
// dropping.
- VideoStreamEncoderResourceManager stream_resource_manager_
- RTC_GUARDED_BY(&encoder_queue_);
+ // The manager primarily operates on the |encoder_queue_| but its lifetime is
+ // tied to the VideoStreamEncoder (which is destroyed off the encoder queue)
+ // and its resource list is accessible from any thread.
+ VideoStreamEncoderResourceManager stream_resource_manager_;
// Carries out the VideoSourceRestrictions provided by the
// ResourceAdaptationProcessor, i.e. reconfigures the source of video frames
// to provide us with different resolution or frame rate.
- //
- // Used on the |encoder_queue_| with a few exceptions:
- // - VideoStreamEncoder::SetSource() invokes SetSource().
- // - VideoStreamEncoder::SetSink() invokes SetRotationApplied() and
- // PushSourceSinkSettings().
- // - VideoStreamEncoder::Stop() invokes SetSource().
+ // This class is thread-safe.
VideoSourceSinkController video_source_sink_controller_;
- // All public methods are proxied to |encoder_queue_|. It must must be
- // destroyed first to make sure no tasks are run that use other members.
+ // Public methods are proxied to the task queues. The queues must be destroyed
+ // first to make sure no tasks run that use other members.
+ // TODO(https://crbug.com/webrtc/11172): Move ownership of the
+ // ResourceAdaptationProcessor and its task queue to Call when processors are
+ // multi-stream aware.
+ rtc::TaskQueue resource_adaptation_queue_;
rtc::TaskQueue encoder_queue_;
RTC_DISALLOW_COPY_AND_ASSIGN(VideoStreamEncoder);
diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc
index cf9e790fcf..46f218e310 100644
--- a/video/video_stream_encoder_unittest.cc
+++ b/video/video_stream_encoder_unittest.cc
@@ -26,6 +26,8 @@
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/vp8_temporal_layers.h"
#include "api/video_codecs/vp8_temporal_layers_factory.h"
+#include "call/adaptation/test/fake_adaptation_constraint.h"
+#include "call/adaptation/test/fake_adaptation_listener.h"
#include "call/adaptation/test/fake_resource.h"
#include "common_video/h264/h264_common.h"
#include "common_video/include/video_frame_buffer.h"
@@ -34,8 +36,10 @@
#include "modules/video_coding/utility/quality_scaler.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "rtc_base/fake_clock.h"
+#include "rtc_base/gunit.h"
#include "rtc_base/logging.h"
#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
#include "system_wrappers/include/sleep.h"
@@ -52,7 +56,12 @@ namespace webrtc {
using ::testing::_;
using ::testing::AllOf;
+using ::testing::Eq;
using ::testing::Field;
+using ::testing::Ge;
+using ::testing::Gt;
+using ::testing::Le;
+using ::testing::Lt;
using ::testing::Matcher;
using ::testing::NiceMock;
using ::testing::Return;
@@ -125,46 +134,179 @@ class CpuOveruseDetectorProxy : public OveruseFrameDetector {
public:
explicit CpuOveruseDetectorProxy(CpuOveruseMetricsObserver* metrics_observer)
: OveruseFrameDetector(metrics_observer),
- last_target_framerate_fps_(-1) {}
+ last_target_framerate_fps_(-1),
+ framerate_updated_event_(true /* manual_reset */,
+ false /* initially_signaled */) {}
virtual ~CpuOveruseDetectorProxy() {}
void OnTargetFramerateUpdated(int framerate_fps) override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
last_target_framerate_fps_ = framerate_fps;
OveruseFrameDetector::OnTargetFramerateUpdated(framerate_fps);
+ framerate_updated_event_.Set();
}
int GetLastTargetFramerate() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
return last_target_framerate_fps_;
}
CpuOveruseOptions GetOptions() { return options_; }
+ rtc::Event* framerate_updated_event() { return &framerate_updated_event_; }
+
private:
- rtc::CriticalSection lock_;
+ Mutex lock_;
int last_target_framerate_fps_ RTC_GUARDED_BY(lock_);
+ rtc::Event framerate_updated_event_;
};
class FakeQualityScalerQpUsageHandlerCallback
: public QualityScalerQpUsageHandlerCallbackInterface {
public:
FakeQualityScalerQpUsageHandlerCallback()
- : QualityScalerQpUsageHandlerCallbackInterface() {}
- ~FakeQualityScalerQpUsageHandlerCallback() override {}
+ : QualityScalerQpUsageHandlerCallbackInterface(),
+ qp_usage_handled_event_(/*manual_reset=*/true,
+ /*initially_signaled=*/false),
+ clear_qp_samples_result_(absl::nullopt) {}
+ ~FakeQualityScalerQpUsageHandlerCallback() override {
+ RTC_DCHECK(clear_qp_samples_result_.has_value());
+ }
void OnQpUsageHandled(bool clear_qp_samples) override {
clear_qp_samples_result_ = clear_qp_samples;
+ qp_usage_handled_event_.Set();
}
+ bool WaitForQpUsageHandled() { return qp_usage_handled_event_.Wait(5000); }
+
absl::optional<bool> clear_qp_samples_result() const {
return clear_qp_samples_result_;
}
private:
+ rtc::Event qp_usage_handled_event_;
absl::optional<bool> clear_qp_samples_result_;
};
+class FakeVideoSourceRestrictionsListener
+ : public VideoSourceRestrictionsListener {
+ public:
+ FakeVideoSourceRestrictionsListener()
+ : was_restrictions_updated_(false), restrictions_updated_event_() {}
+ ~FakeVideoSourceRestrictionsListener() override {
+ RTC_DCHECK(was_restrictions_updated_);
+ }
+
+ rtc::Event* restrictions_updated_event() {
+ return &restrictions_updated_event_;
+ }
+
+ // VideoSourceRestrictionsListener implementation.
+ void OnVideoSourceRestrictionsUpdated(
+ VideoSourceRestrictions restrictions,
+ const VideoAdaptationCounters& adaptation_counters,
+ rtc::scoped_refptr<Resource> reason,
+ const VideoSourceRestrictions& unfiltered_restrictions) override {
+ was_restrictions_updated_ = true;
+ restrictions_updated_event_.Set();
+ }
+
+ private:
+ bool was_restrictions_updated_;
+ rtc::Event restrictions_updated_event_;
+};
+
+auto WantsFps(Matcher<int> fps_matcher) {
+ return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps,
+ fps_matcher);
+}
+
+auto WantsMaxPixels(Matcher<int> max_pixel_matcher) {
+ return Field("max_pixel_count", &rtc::VideoSinkWants::max_pixel_count,
+ AllOf(max_pixel_matcher, Gt(0)));
+}
+
+auto ResolutionMax() {
+ return AllOf(
+ WantsMaxPixels(Eq(std::numeric_limits<int>::max())),
+ Field("target_pixel_count", &rtc::VideoSinkWants::target_pixel_count,
+ Eq(absl::nullopt)));
+}
+
+auto FpsMax() {
+ return WantsFps(Eq(kDefaultFramerate));
+}
+
+auto FpsUnlimited() {
+ return WantsFps(Eq(std::numeric_limits<int>::max()));
+}
+
+auto FpsMatchesResolutionMax(Matcher<int> fps_matcher) {
+ return AllOf(WantsFps(fps_matcher), ResolutionMax());
+}
+
+auto FpsMaxResolutionMatches(Matcher<int> pixel_matcher) {
+ return AllOf(FpsMax(), WantsMaxPixels(pixel_matcher));
+}
+
+auto FpsMaxResolutionMax() {
+ return AllOf(FpsMax(), ResolutionMax());
+}
+
+auto UnlimitedSinkWants() {
+ return AllOf(FpsUnlimited(), ResolutionMax());
+}
+
+auto FpsInRangeForPixelsInBalanced(int last_frame_pixels) {
+ Matcher<int> fps_range_matcher;
+
+ if (last_frame_pixels <= 320 * 240) {
+ fps_range_matcher = AllOf(Ge(7), Le(10));
+ } else if (last_frame_pixels <= 480 * 270) {
+ fps_range_matcher = AllOf(Ge(10), Le(15));
+ } else if (last_frame_pixels <= 640 * 480) {
+ fps_range_matcher = Ge(15);
+ } else {
+ fps_range_matcher = Eq(kDefaultFramerate);
+ }
+ return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps,
+ fps_range_matcher);
+}
+
+auto FpsEqResolutionEqTo(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Eq(other_wants.max_pixel_count)));
+}
+
+auto FpsMaxResolutionLt(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(FpsMax(), WantsMaxPixels(Lt(other_wants.max_pixel_count)));
+}
+
+auto FpsMaxResolutionGt(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(FpsMax(), WantsMaxPixels(Gt(other_wants.max_pixel_count)));
+}
+
+auto FpsLtResolutionEq(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Lt(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Eq(other_wants.max_pixel_count)));
+}
+
+auto FpsGtResolutionEq(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Gt(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Eq(other_wants.max_pixel_count)));
+}
+
+auto FpsEqResolutionLt(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Lt(other_wants.max_pixel_count)));
+}
+
+auto FpsEqResolutionGt(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Gt(other_wants.max_pixel_count)));
+}
+
class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
public:
VideoStreamEncoderUnderTest(SendStatisticsProxy* stats_proxy,
@@ -178,13 +320,53 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
overuse_detector_proxy_ =
new CpuOveruseDetectorProxy(stats_proxy)),
task_queue_factory),
- fake_cpu_resource_(std::make_unique<FakeResource>("FakeResource[CPU]")),
- fake_quality_resource_(
- std::make_unique<FakeResource>("FakeResource[QP]")) {
- InjectAdaptationResource(fake_quality_resource_.get(),
+ fake_cpu_resource_(FakeResource::Create("FakeResource[CPU]")),
+ fake_quality_resource_(FakeResource::Create("FakeResource[QP]")),
+ fake_adaptation_constraint_("FakeAdaptationConstraint") {
+ InjectAdaptationResource(fake_quality_resource_,
VideoAdaptationReason::kQuality);
- InjectAdaptationResource(fake_cpu_resource_.get(),
- VideoAdaptationReason::kCpu);
+ InjectAdaptationResource(fake_cpu_resource_, VideoAdaptationReason::kCpu);
+ InjectAdaptationConstraint(&fake_adaptation_constraint_);
+ }
+
+ void SetSourceAndWaitForRestrictionsUpdated(
+ rtc::VideoSourceInterface<VideoFrame>* source,
+ const DegradationPreference& degradation_preference) {
+ FakeVideoSourceRestrictionsListener listener;
+ AddRestrictionsListenerForTesting(&listener);
+ SetSource(source, degradation_preference);
+ listener.restrictions_updated_event()->Wait(5000);
+ RemoveRestrictionsListenerForTesting(&listener);
+ }
+
+ void SetSourceAndWaitForFramerateUpdated(
+ rtc::VideoSourceInterface<VideoFrame>* source,
+ const DegradationPreference& degradation_preference) {
+ overuse_detector_proxy_->framerate_updated_event()->Reset();
+ SetSource(source, degradation_preference);
+ overuse_detector_proxy_->framerate_updated_event()->Wait(5000);
+ }
+
+ void OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate target_bitrate,
+ DataRate stable_target_bitrate,
+ DataRate link_allocation,
+ uint8_t fraction_lost,
+ int64_t round_trip_time_ms,
+ double cwnd_reduce_ratio) {
+ OnBitrateUpdated(target_bitrate, stable_target_bitrate, link_allocation,
+ fraction_lost, round_trip_time_ms, cwnd_reduce_ratio);
+ // Bitrate is updated on the encoder queue.
+ WaitUntilTaskQueueIsIdle();
+ // Give the managed resources time to react to the new bitrate.
+ // TODO(hbos): Can we await an appropriate event instead?
+ WaitUntilAdaptationTaskQueueIsIdle();
+ }
+
+ void WaitUntilAdaptationTaskQueueIsIdle() {
+ rtc::Event event;
+ resource_adaptation_queue()->PostTask([&event] { event.Set(); });
+ ASSERT_TRUE(event.Wait(5000));
}
// This is used as a synchronisation mechanism, to make sure that the
@@ -198,16 +380,16 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
// Triggers resource usage measurements on the fake CPU resource.
void TriggerCpuOveruse() {
rtc::Event event;
- encoder_queue()->PostTask([this, &event] {
- fake_cpu_resource_->set_usage_state(ResourceUsageState::kOveruse);
+ resource_adaptation_queue()->PostTask([this, &event] {
+ fake_cpu_resource_->SetUsageState(ResourceUsageState::kOveruse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
}
void TriggerCpuUnderuse() {
rtc::Event event;
- encoder_queue()->PostTask([this, &event] {
- fake_cpu_resource_->set_usage_state(ResourceUsageState::kUnderuse);
+ resource_adaptation_queue()->PostTask([this, &event] {
+ fake_cpu_resource_->SetUsageState(ResourceUsageState::kUnderuse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
@@ -216,16 +398,16 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
// Triggers resource usage measurements on the fake quality resource.
void TriggerQualityLow() {
rtc::Event event;
- encoder_queue()->PostTask([this, &event] {
- fake_quality_resource_->set_usage_state(ResourceUsageState::kOveruse);
+ resource_adaptation_queue()->PostTask([this, &event] {
+ fake_quality_resource_->SetUsageState(ResourceUsageState::kOveruse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
}
void TriggerQualityHigh() {
rtc::Event event;
- encoder_queue()->PostTask([this, &event] {
- fake_quality_resource_->set_usage_state(ResourceUsageState::kUnderuse);
+ resource_adaptation_queue()->PostTask([this, &event] {
+ fake_quality_resource_->SetUsageState(ResourceUsageState::kUnderuse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
@@ -235,21 +417,22 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
// QualityScalerResource. Returns whether or not QP samples would have been
// cleared if this had been a real signal from the QualityScaler.
bool TriggerQualityScalerHighQpAndReturnIfQpSamplesShouldBeCleared() {
- rtc::Event event;
rtc::scoped_refptr<FakeQualityScalerQpUsageHandlerCallback> callback =
new FakeQualityScalerQpUsageHandlerCallback();
- encoder_queue()->PostTask([this, &event, callback] {
+ encoder_queue()->PostTask([this, callback] {
+ // This will cause a "ping" between adaptation task queue and encoder
+ // queue. When we have the result, the |callback| will be notified.
quality_scaler_resource_for_testing()->OnReportQpUsageHigh(callback);
- event.Set();
});
- EXPECT_TRUE(event.Wait(5000));
+ EXPECT_TRUE(callback->WaitForQpUsageHandled());
EXPECT_TRUE(callback->clear_qp_samples_result().has_value());
return callback->clear_qp_samples_result().value();
}
CpuOveruseDetectorProxy* overuse_detector_proxy_;
- std::unique_ptr<FakeResource> fake_cpu_resource_;
- std::unique_ptr<FakeResource> fake_quality_resource_;
+ rtc::scoped_refptr<FakeResource> fake_cpu_resource_;
+ rtc::scoped_refptr<FakeResource> fake_quality_resource_;
+ FakeAdaptationConstraint fake_adaptation_constraint_;
};
class VideoStreamFactory
@@ -314,17 +497,17 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
~AdaptingFrameForwarder() override {}
void set_adaptation_enabled(bool enabled) {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
adaptation_enabled_ = enabled;
}
bool adaption_enabled() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return adaptation_enabled_;
}
rtc::VideoSinkWants last_wants() const {
- rtc::CritScope cs(&crit_);
+ MutexLock lock(&mutex_);
return last_wants_;
}
@@ -373,14 +556,14 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
- rtc::CritScope cs(&crit_);
- last_wants_ = sink_wants();
+ MutexLock lock(&mutex_);
+ last_wants_ = sink_wants_locked();
adapter_.OnSinkWants(wants);
- test::FrameForwarder::AddOrUpdateSink(sink, wants);
+ test::FrameForwarder::AddOrUpdateSinkLocked(sink, wants);
}
cricket::VideoAdapter adapter_;
- bool adaptation_enabled_ RTC_GUARDED_BY(crit_);
- rtc::VideoSinkWants last_wants_ RTC_GUARDED_BY(crit_);
+ bool adaptation_enabled_ RTC_GUARDED_BY(mutex_);
+ rtc::VideoSinkWants last_wants_ RTC_GUARDED_BY(mutex_);
absl::optional<int> last_width_;
absl::optional<int> last_height_;
};
@@ -394,45 +577,53 @@ class MockableSendStatisticsProxy : public SendStatisticsProxy {
: SendStatisticsProxy(clock, config, content_type) {}
VideoSendStream::Stats GetStats() override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (mock_stats_)
return *mock_stats_;
return SendStatisticsProxy::GetStats();
}
int GetInputFrameRate() const override {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
if (mock_stats_)
return mock_stats_->input_frame_rate;
return SendStatisticsProxy::GetInputFrameRate();
}
void SetMockStats(const VideoSendStream::Stats& stats) {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
mock_stats_.emplace(stats);
}
void ResetMockStats() {
- rtc::CritScope cs(&lock_);
+ MutexLock lock(&lock_);
mock_stats_.reset();
}
private:
- rtc::CriticalSection lock_;
+ mutable Mutex lock_;
absl::optional<VideoSendStream::Stats> mock_stats_ RTC_GUARDED_BY(lock_);
};
class MockBitrateObserver : public VideoBitrateAllocationObserver {
public:
- MOCK_METHOD1(OnBitrateAllocationUpdated, void(const VideoBitrateAllocation&));
+ MOCK_METHOD(void,
+ OnBitrateAllocationUpdated,
+ (const VideoBitrateAllocation&),
+ (override));
};
class MockEncoderSelector
: public VideoEncoderFactory::EncoderSelectorInterface {
public:
- MOCK_METHOD1(OnCurrentEncoder, void(const SdpVideoFormat& format));
- MOCK_METHOD1(OnAvailableBitrate,
- absl::optional<SdpVideoFormat>(const DataRate& rate));
- MOCK_METHOD0(OnEncoderBroken, absl::optional<SdpVideoFormat>());
+ MOCK_METHOD(void,
+ OnCurrentEncoder,
+ (const SdpVideoFormat& format),
+ (override));
+ MOCK_METHOD(absl::optional<SdpVideoFormat>,
+ OnAvailableBitrate,
+ (const DataRate& rate),
+ (override));
+ MOCK_METHOD(absl::optional<SdpVideoFormat>, OnEncoderBroken, (), (override));
};
} // namespace
@@ -516,6 +707,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
if (payload_name == "VP9") {
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
vp9_settings.numberOfSpatialLayers = num_spatial_layers;
+ vp9_settings.automaticResizeOn = num_spatial_layers <= 1;
video_encoder_config.encoder_specific_settings =
new rtc::RefCountedObject<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
@@ -595,7 +787,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate))
.Times(1);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -605,106 +797,6 @@ class VideoStreamEncoderTest : public ::testing::Test {
WaitForEncodedFrame(1);
}
- void VerifyNoLimitation(const rtc::VideoSinkWants& wants) {
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_framerate_fps);
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
- EXPECT_FALSE(wants.target_pixel_count);
- }
-
- void VerifyFpsEqResolutionEq(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsMaxResolutionMax(const rtc::VideoSinkWants& wants) {
- EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps);
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
- EXPECT_FALSE(wants.target_pixel_count);
- }
-
- void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
- EXPECT_LT(wants1.max_pixel_count, wants2.max_pixel_count);
- EXPECT_GT(wants1.max_pixel_count, 0);
- }
-
- void VerifyFpsMaxResolutionGt(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
- EXPECT_GT(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsMaxResolutionEq(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
- EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsLtResolutionEq(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_LT(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsGtResolutionEq(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_GT(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsEqResolutionLt(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_LT(wants1.max_pixel_count, wants2.max_pixel_count);
- EXPECT_GT(wants1.max_pixel_count, 0);
- }
-
- void VerifyFpsEqResolutionGt(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_GT(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants,
- int pixel_count) {
- EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps);
- EXPECT_LT(wants.max_pixel_count, pixel_count);
- EXPECT_GT(wants.max_pixel_count, 0);
- }
-
- void VerifyFpsLtResolutionMax(const rtc::VideoSinkWants& wants, int fps) {
- EXPECT_LT(wants.max_framerate_fps, fps);
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
- EXPECT_FALSE(wants.target_pixel_count);
- }
-
- void VerifyFpsEqResolutionMax(const rtc::VideoSinkWants& wants,
- int expected_fps) {
- EXPECT_EQ(expected_fps, wants.max_framerate_fps);
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
- EXPECT_FALSE(wants.target_pixel_count);
- }
-
- void VerifyBalancedModeFpsRange(const rtc::VideoSinkWants& wants,
- int last_frame_pixels) {
- // Balanced mode should always scale FPS to the desired range before
- // attempting to scale resolution.
- int fps_limit = wants.max_framerate_fps;
- if (last_frame_pixels <= 320 * 240) {
- EXPECT_LE(7, fps_limit);
- EXPECT_LE(fps_limit, 10);
- } else if (last_frame_pixels <= 480 * 270) {
- EXPECT_LE(10, fps_limit);
- EXPECT_LE(fps_limit, 15);
- } else if (last_frame_pixels <= 640 * 480) {
- EXPECT_LE(15, fps_limit);
- } else {
- EXPECT_EQ(kDefaultFramerate, fps_limit);
- }
- }
-
void WaitForEncodedFrame(int64_t expected_ntp_time) {
sink_.WaitForEncodedFrame(expected_ntp_time);
fake_clock_.AdvanceTime(TimeDelta::Seconds(1) / max_framerate_);
@@ -737,17 +829,17 @@ class VideoStreamEncoderTest : public ::testing::Test {
TestEncoder() : FakeEncoder(Clock::GetRealTimeClock()) {}
VideoCodec codec_config() const {
- rtc::CritScope lock(&crit_sect_);
+ MutexLock lock(&mutex_);
return config_;
}
void BlockNextEncode() {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
block_next_encode_ = true;
}
VideoEncoder::EncoderInfo GetEncoderInfo() const override {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
EncoderInfo info;
if (initialized_ == EncoderState::kInitialized) {
if (quality_scaling_) {
@@ -770,7 +862,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
int32_t RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) override {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
encoded_image_callback_ = callback;
return FakeEncoder::RegisterEncodeCompleteCallback(callback);
}
@@ -779,60 +871,60 @@ class VideoStreamEncoderTest : public ::testing::Test {
void CheckLastTimeStampsMatch(int64_t ntp_time_ms,
uint32_t timestamp) const {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
EXPECT_EQ(timestamp_, timestamp);
EXPECT_EQ(ntp_time_ms_, ntp_time_ms);
}
void SetQualityScaling(bool b) {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
quality_scaling_ = b;
}
void SetRequestedResolutionAlignment(int requested_resolution_alignment) {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
requested_resolution_alignment_ = requested_resolution_alignment;
}
void SetIsHardwareAccelerated(bool is_hardware_accelerated) {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
is_hardware_accelerated_ = is_hardware_accelerated;
}
void SetTemporalLayersSupported(size_t spatial_idx, bool supported) {
RTC_DCHECK_LT(spatial_idx, kMaxSpatialLayers);
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
temporal_layers_supported_[spatial_idx] = supported;
}
void SetResolutionBitrateLimits(
std::vector<ResolutionBitrateLimits> thresholds) {
- rtc::CritScope cs(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
resolution_bitrate_limits_ = thresholds;
}
void ForceInitEncodeFailure(bool force_failure) {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
force_init_encode_failed_ = force_failure;
}
void SimulateOvershoot(double rate_factor) {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
rate_factor_ = rate_factor;
}
uint32_t GetLastFramerate() const {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
return last_framerate_;
}
VideoFrame::UpdateRect GetLastUpdateRect() const {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
return last_update_rect_;
}
const std::vector<VideoFrameType>& LastFrameTypes() const {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
return last_frame_types_;
}
@@ -841,27 +933,25 @@ class VideoStreamEncoderTest : public ::testing::Test {
keyframe ? VideoFrameType::kVideoFrameKey
: VideoFrameType::kVideoFrameDelta};
{
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
last_frame_types_ = frame_type;
}
FakeEncoder::Encode(input_image, &frame_type);
}
void InjectEncodedImage(const EncodedImage& image) {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
encoded_image_callback_->OnEncodedImage(image, nullptr, nullptr);
}
- void InjectEncodedImage(const EncodedImage& image,
- const CodecSpecificInfo* codec_specific_info,
- const RTPFragmentationHeader* fragmentation) {
- rtc::CritScope lock(&local_crit_sect_);
- encoded_image_callback_->OnEncodedImage(image, codec_specific_info,
- fragmentation);
+ void SetEncodedImageData(
+ rtc::scoped_refptr<EncodedImageBufferInterface> encoded_image_data) {
+ MutexLock lock(&local_mutex_);
+ encoded_image_data_ = encoded_image_data;
}
void ExpectNullFrame() {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
expect_null_frame_ = true;
}
@@ -873,12 +963,12 @@ class VideoStreamEncoderTest : public ::testing::Test {
}
int GetNumEncoderInitializations() const {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
return num_encoder_initializations_;
}
int GetNumSetRates() const {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
return num_set_rates_;
}
@@ -887,7 +977,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
const std::vector<VideoFrameType>* frame_types) override {
bool block_encode;
{
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
if (expect_null_frame_) {
EXPECT_EQ(input_image.timestamp(), 0u);
EXPECT_EQ(input_image.width(), 1);
@@ -914,11 +1004,32 @@ class VideoStreamEncoderTest : public ::testing::Test {
return result;
}
+ std::unique_ptr<RTPFragmentationHeader> EncodeHook(
+ EncodedImage* encoded_image,
+ CodecSpecificInfo* codec_specific) override {
+ {
+ MutexLock lock(&mutex_);
+ codec_specific->codecType = config_.codecType;
+ }
+ MutexLock lock(&local_mutex_);
+ if (encoded_image_data_) {
+ encoded_image->SetEncodedData(encoded_image_data_);
+ if (codec_specific->codecType == kVideoCodecH264) {
+ auto fragmentation = std::make_unique<RTPFragmentationHeader>();
+ fragmentation->VerifyAndAllocateFragmentationHeader(1);
+ fragmentation->fragmentationOffset[0] = 4;
+ fragmentation->fragmentationLength[0] = encoded_image->size() - 4;
+ return fragmentation;
+ }
+ }
+ return nullptr;
+ }
+
int32_t InitEncode(const VideoCodec* config,
const Settings& settings) override {
int res = FakeEncoder::InitEncode(config, settings);
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
EXPECT_EQ(initialized_, EncoderState::kUninitialized);
++num_encoder_initializations_;
@@ -940,14 +1051,14 @@ class VideoStreamEncoderTest : public ::testing::Test {
}
int32_t Release() override {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
EXPECT_NE(initialized_, EncoderState::kUninitialized);
initialized_ = EncoderState::kUninitialized;
return FakeEncoder::Release();
}
void SetRates(const RateControlParameters& parameters) {
- rtc::CritScope lock(&local_crit_sect_);
+ MutexLock lock(&local_mutex_);
num_set_rates_++;
VideoBitrateAllocation adjusted_rate_allocation;
for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
@@ -967,43 +1078,44 @@ class VideoStreamEncoderTest : public ::testing::Test {
FakeEncoder::SetRates(adjusted_paramters);
}
- rtc::CriticalSection local_crit_sect_;
+ mutable Mutex local_mutex_;
enum class EncoderState {
kUninitialized,
kInitializationFailed,
kInitialized
- } initialized_ RTC_GUARDED_BY(local_crit_sect_) =
- EncoderState::kUninitialized;
- bool block_next_encode_ RTC_GUARDED_BY(local_crit_sect_) = false;
+ } initialized_ RTC_GUARDED_BY(local_mutex_) = EncoderState::kUninitialized;
+ bool block_next_encode_ RTC_GUARDED_BY(local_mutex_) = false;
rtc::Event continue_encode_event_;
- uint32_t timestamp_ RTC_GUARDED_BY(local_crit_sect_) = 0;
- int64_t ntp_time_ms_ RTC_GUARDED_BY(local_crit_sect_) = 0;
- int last_input_width_ RTC_GUARDED_BY(local_crit_sect_) = 0;
- int last_input_height_ RTC_GUARDED_BY(local_crit_sect_) = 0;
- bool quality_scaling_ RTC_GUARDED_BY(local_crit_sect_) = true;
- int requested_resolution_alignment_ RTC_GUARDED_BY(local_crit_sect_) = 1;
- bool is_hardware_accelerated_ RTC_GUARDED_BY(local_crit_sect_) = false;
+ uint32_t timestamp_ RTC_GUARDED_BY(local_mutex_) = 0;
+ int64_t ntp_time_ms_ RTC_GUARDED_BY(local_mutex_) = 0;
+ int last_input_width_ RTC_GUARDED_BY(local_mutex_) = 0;
+ int last_input_height_ RTC_GUARDED_BY(local_mutex_) = 0;
+ bool quality_scaling_ RTC_GUARDED_BY(local_mutex_) = true;
+ int requested_resolution_alignment_ RTC_GUARDED_BY(local_mutex_) = 1;
+ bool is_hardware_accelerated_ RTC_GUARDED_BY(local_mutex_) = false;
+ rtc::scoped_refptr<EncodedImageBufferInterface> encoded_image_data_
+ RTC_GUARDED_BY(local_mutex_);
std::unique_ptr<Vp8FrameBufferController> frame_buffer_controller_
- RTC_GUARDED_BY(local_crit_sect_);
+ RTC_GUARDED_BY(local_mutex_);
absl::optional<bool>
temporal_layers_supported_[kMaxSpatialLayers] RTC_GUARDED_BY(
- local_crit_sect_);
- bool force_init_encode_failed_ RTC_GUARDED_BY(local_crit_sect_) = false;
- double rate_factor_ RTC_GUARDED_BY(local_crit_sect_) = 1.0;
- uint32_t last_framerate_ RTC_GUARDED_BY(local_crit_sect_) = 0;
+ local_mutex_);
+ bool force_init_encode_failed_ RTC_GUARDED_BY(local_mutex_) = false;
+ double rate_factor_ RTC_GUARDED_BY(local_mutex_) = 1.0;
+ uint32_t last_framerate_ RTC_GUARDED_BY(local_mutex_) = 0;
absl::optional<VideoEncoder::RateControlParameters>
last_rate_control_settings_;
- VideoFrame::UpdateRect last_update_rect_
- RTC_GUARDED_BY(local_crit_sect_) = {0, 0, 0, 0};
+ VideoFrame::UpdateRect last_update_rect_ RTC_GUARDED_BY(local_mutex_) = {
+ 0, 0, 0, 0};
std::vector<VideoFrameType> last_frame_types_;
bool expect_null_frame_ = false;
- EncodedImageCallback* encoded_image_callback_
- RTC_GUARDED_BY(local_crit_sect_) = nullptr;
+ EncodedImageCallback* encoded_image_callback_ RTC_GUARDED_BY(local_mutex_) =
+ nullptr;
NiceMock<MockFecControllerOverride> fec_controller_override_;
- int num_encoder_initializations_ RTC_GUARDED_BY(local_crit_sect_) = 0;
+ int num_encoder_initializations_ RTC_GUARDED_BY(local_mutex_) = 0;
std::vector<ResolutionBitrateLimits> resolution_bitrate_limits_
- RTC_GUARDED_BY(local_crit_sect_);
- int num_set_rates_ RTC_GUARDED_BY(local_crit_sect_) = 0;
+ RTC_GUARDED_BY(local_mutex_);
+ int num_set_rates_ RTC_GUARDED_BY(local_mutex_) = 0;
};
class TestSink : public VideoStreamEncoder::EncoderSink {
@@ -1022,7 +1134,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
if (!encoded_frame_event_.Wait(timeout_ms))
return false;
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
timestamp = last_timestamp_;
}
test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp);
@@ -1040,7 +1152,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
uint32_t width = 0;
uint32_t height = 0;
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
width = last_width_;
height = last_height_;
}
@@ -1052,7 +1164,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
int width = 0;
int height = 0;
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
width = last_width_;
height = last_height_;
}
@@ -1063,7 +1175,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
void CheckLastFrameRotationMatches(VideoRotation expected_rotation) {
VideoRotation rotation;
{
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
rotation = last_rotation_;
}
EXPECT_EQ(expected_rotation, rotation);
@@ -1076,37 +1188,37 @@ class VideoStreamEncoderTest : public ::testing::Test {
}
void SetExpectNoFrames() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
expect_frames_ = false;
}
int number_of_reconfigurations() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return number_of_reconfigurations_;
}
int last_min_transmit_bitrate() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return min_transmit_bitrate_bps_;
}
void SetNumExpectedLayers(size_t num_layers) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
num_expected_layers_ = num_layers;
}
int64_t GetLastCaptureTimeMs() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return last_capture_time_ms_;
}
std::vector<uint8_t> GetLastEncodedImageData() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return std::move(last_encoded_image_data_);
}
RTPFragmentationHeader GetLastFragmentation() {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return std::move(last_fragmentation_);
}
@@ -1115,7 +1227,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
EXPECT_TRUE(expect_frames_);
last_encoded_image_data_ = std::vector<uint8_t>(
encoded_image.data(), encoded_image.data() + encoded_image.size());
@@ -1144,12 +1256,12 @@ class VideoStreamEncoderTest : public ::testing::Test {
bool is_svc,
VideoEncoderConfig::ContentType content_type,
int min_transmit_bitrate_bps) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
++number_of_reconfigurations_;
min_transmit_bitrate_bps_ = min_transmit_bitrate_bps;
}
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
TestEncoder* test_encoder_;
rtc::Event encoded_frame_event_;
std::vector<uint8_t> last_encoded_image_data_;
@@ -1175,21 +1287,21 @@ class VideoStreamEncoderTest : public ::testing::Test {
std::unique_ptr<VideoBitrateAllocator> CreateVideoBitrateAllocator(
const VideoCodec& codec) override {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
codec_config_ = codec;
return bitrate_allocator_factory_->CreateVideoBitrateAllocator(codec);
}
VideoCodec codec_config() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return codec_config_;
}
private:
std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
- rtc::CriticalSection crit_;
- VideoCodec codec_config_ RTC_GUARDED_BY(crit_);
+ mutable Mutex mutex_;
+ VideoCodec codec_config_ RTC_GUARDED_BY(mutex_);
};
VideoSendStream::Config video_send_config_;
@@ -1209,7 +1321,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
};
TEST_F(VideoStreamEncoderTest, EncodeOneFrame) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1230,7 +1342,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) {
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs));
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1244,23 +1356,23 @@ TEST_F(VideoStreamEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) {
}
TEST_F(VideoStreamEncoderTest, DropsFramesWhenRateSetToZero) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
WaitForEncodedFrame(1);
- video_stream_encoder_->OnBitrateUpdated(DataRate::BitsPerSec(0),
- DataRate::BitsPerSec(0),
- DataRate::BitsPerSec(0), 0, 0, 0);
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), DataRate::BitsPerSec(0),
+ 0, 0, 0);
// The encoder will cache up to one frame for a short duration. Adding two
// frames means that the first frame will be dropped and the second frame will
// be sent when the encoder is resumed.
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1271,7 +1383,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenRateSetToZero) {
}
TEST_F(VideoStreamEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1287,7 +1399,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) {
}
TEST_F(VideoStreamEncoderTest, DropsFrameAfterStop) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1303,7 +1415,7 @@ TEST_F(VideoStreamEncoderTest, DropsFrameAfterStop) {
}
TEST_F(VideoStreamEncoderTest, DropsPendingFramesOnSlowEncode) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1322,7 +1434,7 @@ TEST_F(VideoStreamEncoderTest, DropsPendingFramesOnSlowEncode) {
}
TEST_F(VideoStreamEncoderTest, DropFrameWithFailedI420Conversion) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1344,7 +1456,7 @@ TEST_F(VideoStreamEncoderTest, DropFrameWithFailedI420ConversionWithCrop) {
video_stream_encoder_->WaitUntilTaskQueueIsIdle();
// Capture a frame at codec_width_/codec_height_.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1366,14 +1478,14 @@ TEST_F(VideoStreamEncoderTest, DropFrameWithFailedI420ConversionWithCrop) {
}
TEST_F(VideoStreamEncoderTest, DropsFramesWhenCongestionWindowPushbackSet) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
WaitForEncodedFrame(1);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0.5);
@@ -1392,7 +1504,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenCongestionWindowPushbackSet) {
TEST_F(VideoStreamEncoderTest,
ConfigureEncoderTriggersOnEncoderConfigurationChanged) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1421,7 +1533,7 @@ TEST_F(VideoStreamEncoderTest,
}
TEST_F(VideoStreamEncoderTest, FrameResolutionChangeReconfigureEncoder) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1449,7 +1561,7 @@ TEST_F(VideoStreamEncoderTest, FrameResolutionChangeReconfigureEncoder) {
TEST_F(VideoStreamEncoderTest,
EncoderInstanceDestroyedBeforeAnotherInstanceCreated) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1473,7 +1585,7 @@ TEST_F(VideoStreamEncoderTest,
}
TEST_F(VideoStreamEncoderTest, BitrateLimitsChangeReconfigureRateAllocator) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1520,7 +1632,7 @@ TEST_F(VideoStreamEncoderTest, BitrateLimitsChangeReconfigureRateAllocator) {
TEST_F(VideoStreamEncoderTest,
IntersectionOfEncoderAndAppBitrateLimitsUsedWhenBothProvided) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1568,7 +1680,7 @@ TEST_F(VideoStreamEncoderTest,
TEST_F(VideoStreamEncoderTest,
EncoderAndAppLimitsDontIntersectEncoderLimitsIgnored) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1604,7 +1716,7 @@ TEST_F(VideoStreamEncoderTest,
TEST_F(VideoStreamEncoderTest,
EncoderRecommendedMaxAndMinBitratesUsedForGivenResolution) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1674,7 +1786,7 @@ TEST_F(VideoStreamEncoderTest,
}
TEST_F(VideoStreamEncoderTest, EncoderRecommendedMaxBitrateCapsTargetBitrate) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1740,7 +1852,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsResolutionAlignment) {
constexpr int kRequestedResolutionAlignment = 7;
video_source_.set_adaptation_enabled(true);
fake_encoder_.SetRequestedResolutionAlignment(kRequestedResolutionAlignment);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -1774,13 +1886,13 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
video_source_.set_adaptation_enabled(true);
// Enable BALANCED preference, no initial limitation.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
video_stream_encoder_->SetSource(&video_source_,
webrtc::DegradationPreference::BALANCED);
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1805,9 +1917,10 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
t += frame_interval_ms;
video_stream_encoder_->TriggerCpuOveruse();
- VerifyBalancedModeFpsRange(
+ EXPECT_THAT(
video_source_.sink_wants(),
- *video_source_.last_sent_width() * *video_source_.last_sent_height());
+ FpsInRangeForPixelsInBalanced(*video_source_.last_sent_width() *
+ *video_source_.last_sent_height()));
} while (video_source_.sink_wants().max_pixel_count <
last_wants.max_pixel_count ||
video_source_.sink_wants().max_framerate_fps <
@@ -1840,16 +1953,17 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
t += frame_interval_ms;
video_stream_encoder_->TriggerCpuUnderuse();
- VerifyBalancedModeFpsRange(
+ EXPECT_THAT(
video_source_.sink_wants(),
- *video_source_.last_sent_width() * *video_source_.last_sent_height());
+ FpsInRangeForPixelsInBalanced(*video_source_.last_sent_width() *
+ *video_source_.last_sent_height()));
EXPECT_TRUE(video_source_.sink_wants().max_pixel_count >
last_wants.max_pixel_count ||
video_source_.sink_wants().max_framerate_fps >
last_wants.max_framerate_fps);
}
- VerifyFpsMaxResolutionMax(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), FpsMaxResolutionMax());
stats_proxy_->ResetMockStats();
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
@@ -1859,12 +1973,125 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
video_stream_encoder_->Stop();
}
-TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
+TEST_F(VideoStreamEncoderTest,
+ SinkWantsNotChangedByResourceLimitedBeforeDegradationPreferenceChange) {
video_stream_encoder_->OnBitrateUpdated(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
+
+ const int kFrameWidth = 1280;
+ const int kFrameHeight = 720;
+
+ int64_t ntp_time = kFrameIntervalMs;
+
+ // Force an input frame rate to be available, or the adaptation call won't
+ // know what framerate to adapt form.
+ const int kInputFps = 30;
+ VideoSendStream::Stats stats = stats_proxy_->GetStats();
+ stats.input_frame_rate = kInputFps;
+ stats_proxy_->SetMockStats(stats);
+
+ video_source_.set_adaptation_enabled(true);
+ video_stream_encoder_->SetSource(
+ &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += kFrameIntervalMs;
+
+ // Trigger CPU overuse.
+ video_stream_encoder_->TriggerCpuOveruse();
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += kFrameIntervalMs;
+
+ EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
+ EXPECT_EQ(std::numeric_limits<int>::max(),
+ video_source_.sink_wants().max_pixel_count);
+ // Some framerate constraint should be set.
+ int restricted_fps = video_source_.sink_wants().max_framerate_fps;
+ EXPECT_LT(restricted_fps, kInputFps);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += 100;
+
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
+ &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
+ // Give the encoder queue time to process the change in degradation preference
+ // by waiting for an encoded frame.
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += kFrameIntervalMs;
+
+ video_stream_encoder_->TriggerQualityLow();
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += kFrameIntervalMs;
+
+ // Some resolution constraint should be set.
+ EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
+ EXPECT_LT(video_source_.sink_wants().max_pixel_count,
+ kFrameWidth * kFrameHeight);
+ EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps);
+
+ int pixel_count = video_source_.sink_wants().max_pixel_count;
+ // Triggering a CPU underuse should not change the sink wants since it has
+ // not been overused for resolution since we changed degradation preference.
+ video_stream_encoder_->TriggerCpuUnderuse();
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += kFrameIntervalMs;
+ EXPECT_EQ(video_source_.sink_wants().max_pixel_count, pixel_count);
+ EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps);
+
+ // Change the degradation preference back. CPU underuse should not adapt since
+ // QP is most limited.
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
+ &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += 100;
+ // Resolution adaptations is gone after changing degradation preference.
+ EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
+ EXPECT_EQ(std::numeric_limits<int>::max(),
+ video_source_.sink_wants().max_pixel_count);
+ // The fps adaptation from above is now back.
+ EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, restricted_fps);
+
+ // Trigger CPU underuse.
+ video_stream_encoder_->TriggerCpuUnderuse();
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += kFrameIntervalMs;
+ EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, restricted_fps);
+
+ // Trigger QP underuse, fps should return to normal.
+ video_stream_encoder_->TriggerQualityHigh();
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += kFrameIntervalMs;
+ EXPECT_THAT(video_source_.sink_wants(), FpsMax());
+
+ video_stream_encoder_->Stop();
+}
+
+TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
@@ -1892,7 +2119,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
// Set new source, switch to maintain-resolution.
test::FrameForwarder new_video_source;
- video_stream_encoder_->SetSource(
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
&new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
// Give the encoder queue time to process the change in degradation preference
// by waiting for an encoded frame.
@@ -1901,7 +2128,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
sink_.WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Initially no degradation registered.
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
// Force an input frame rate to be available, or the adaptation call won't
// know what framerate to adapt form.
@@ -1923,15 +2150,15 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
EXPECT_LT(new_video_source.sink_wants().max_framerate_fps, kInputFps);
// Turn off degradation completely.
- video_stream_encoder_->SetSource(&new_video_source,
- webrtc::DegradationPreference::DISABLED);
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
+ &new_video_source, webrtc::DegradationPreference::DISABLED);
// Give the encoder queue time to process the change in degradation preference
// by waiting for an encoded frame.
new_video_source.IncomingCapturedFrame(
CreateFrame(frame_timestamp, kFrameWidth, kFrameWidth));
sink_.WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
video_stream_encoder_->TriggerCpuOveruse();
new_video_source.IncomingCapturedFrame(
@@ -1940,10 +2167,10 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
frame_timestamp += kFrameIntervalMs;
// Still no degradation.
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
// Calling SetSource with resolution scaling enabled apply the old SinkWants.
- video_stream_encoder_->SetSource(
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
&new_video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
// Give the encoder queue time to process the change in degradation preference
// by waiting for an encoded frame.
@@ -1957,7 +2184,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
EXPECT_EQ(kDefaultFramerate, new_video_source.sink_wants().max_framerate_fps);
// Calling SetSource with framerate scaling enabled apply the old SinkWants.
- video_stream_encoder_->SetSource(
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
&new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
// Give the encoder queue time to process the change in degradation preference
// by waiting for an encoded frame.
@@ -1974,7 +2201,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
}
TEST_F(VideoStreamEncoderTest, StatsTracksQualityAdaptationStats) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2010,7 +2237,7 @@ TEST_F(VideoStreamEncoderTest, StatsTracksQualityAdaptationStats) {
}
TEST_F(VideoStreamEncoderTest, StatsTracksCpuAdaptationStats) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2046,7 +2273,7 @@ TEST_F(VideoStreamEncoderTest, StatsTracksCpuAdaptationStats) {
}
TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsCpuAdaptation) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2117,7 +2344,7 @@ TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsCpuAdaptation) {
}
TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsQualityAdaptation) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2180,7 +2407,7 @@ TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsQualityAdaptation) {
TEST_F(VideoStreamEncoderTest,
QualityAdaptationStatsAreResetWhenScalerIsDisabled) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2238,7 +2465,7 @@ TEST_F(VideoStreamEncoderTest,
TEST_F(VideoStreamEncoderTest,
StatsTracksCpuAdaptationStatsWhenSwitchingSource_Balanced) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2267,7 +2494,7 @@ TEST_F(VideoStreamEncoderTest,
// Set new degradation preference should clear restrictions since we changed
// from BALANCED.
- video_stream_encoder_->SetSource(
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
&source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
WaitForEncodedFrame(sequence++);
@@ -2291,8 +2518,8 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
// Back to BALANCED, should clear the restrictions again.
- video_stream_encoder_->SetSource(&source,
- webrtc::DegradationPreference::BALANCED);
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
+ &source, webrtc::DegradationPreference::BALANCED);
source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
@@ -2305,7 +2532,7 @@ TEST_F(VideoStreamEncoderTest,
TEST_F(VideoStreamEncoderTest,
StatsTracksCpuAdaptationStatsWhenSwitchingSource) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2444,13 +2671,13 @@ TEST_F(VideoStreamEncoderTest,
ScalingUpAndDownDoesNothingWithMaintainResolution) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
// Expect no scaling to begin with.
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(1);
@@ -2495,7 +2722,7 @@ TEST_F(VideoStreamEncoderTest,
SkipsSameAdaptDownRequest_MaintainFramerateMode) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2507,13 +2734,14 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(1);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerCpuOveruse();
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
const int kLastMaxPixelCount = source.sink_wants().max_pixel_count;
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2530,7 +2758,7 @@ TEST_F(VideoStreamEncoderTest,
TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2541,11 +2769,12 @@ TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) {
webrtc::DegradationPreference::BALANCED);
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(1);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerQualityLow();
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
const int kLastMaxPixelCount = source.sink_wants().max_pixel_count;
@@ -2573,7 +2802,7 @@ TEST_F(VideoStreamEncoderTest,
NoChangeForInitialNormalUsage_MaintainFramerateMode) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2585,13 +2814,13 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerCpuUnderuse();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2602,7 +2831,7 @@ TEST_F(VideoStreamEncoderTest,
NoChangeForInitialNormalUsage_MaintainResolutionMode) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2614,13 +2843,13 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerCpuUnderuse();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2630,7 +2859,7 @@ TEST_F(VideoStreamEncoderTest,
TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2642,14 +2871,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) {
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2660,7 +2889,7 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) {
TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_DisabledMode) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2672,14 +2901,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_DisabledMode) {
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2691,7 +2920,7 @@ TEST_F(VideoStreamEncoderTest,
AdaptsResolutionForLowQuality_MaintainFramerateMode) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2704,7 +2933,7 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(1);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2712,13 +2941,14 @@ TEST_F(VideoStreamEncoderTest,
video_stream_encoder_->TriggerQualityLow();
source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
WaitForEncodedFrame(2);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2731,7 +2961,7 @@ TEST_F(VideoStreamEncoderTest,
const int kWidth = 1280;
const int kHeight = 720;
const int kInputFps = 30;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2743,33 +2973,35 @@ TEST_F(VideoStreamEncoderTest,
// Expect no scaling to begin with (preference: MAINTAIN_FRAMERATE).
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(1);
- VerifyFpsMaxResolutionMax(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerQualityLow();
video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
sink_.WaitForEncodedFrame(2);
- VerifyFpsMaxResolutionLt(video_source_.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(video_source_.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
// Enable MAINTAIN_RESOLUTION preference.
test::FrameForwarder new_video_source;
- video_stream_encoder_->SetSource(
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
&new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
// Give the encoder queue time to process the change in degradation preference
// by waiting for an encoded frame.
new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
sink_.WaitForEncodedFrame(3);
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect reduced framerate.
video_stream_encoder_->TriggerQualityLow();
new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
sink_.WaitForEncodedFrame(4);
- VerifyFpsLtResolutionMax(new_video_source.sink_wants(), kInputFps);
+ EXPECT_THAT(new_video_source.sink_wants(),
+ FpsMatchesResolutionMax(Lt(kInputFps)));
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
video_stream_encoder_->Stop();
}
@@ -2779,7 +3011,7 @@ TEST_F(VideoStreamEncoderTest, DoesNotScaleBelowSetResolutionLimit) {
const int kHeight = 720;
const size_t kNumFrames = 10;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2817,7 +3049,7 @@ TEST_F(VideoStreamEncoderTest,
AdaptsResolutionUpAndDownTwiceOnOveruse_MaintainFramerateMode) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2831,7 +3063,7 @@ TEST_F(VideoStreamEncoderTest,
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2840,7 +3072,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2849,7 +3082,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2858,7 +3091,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2867,7 +3101,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2878,7 +3112,7 @@ TEST_F(VideoStreamEncoderTest,
AdaptsResolutionUpAndDownTwiceForLowQuality_BalancedMode_NoFpsLimit) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -2892,7 +3126,7 @@ TEST_F(VideoStreamEncoderTest,
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2901,7 +3135,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2910,7 +3145,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2919,7 +3154,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2928,7 +3164,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2939,7 +3175,7 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) {
fake_encoder_.SetResolutionBitrateLimits(
{kEncoderBitrateLimits540p, kEncoderBitrateLimits720p});
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps),
DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps),
DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), 0,
@@ -2957,7 +3193,7 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) {
WaitForEncodedFrame(1280, 720);
// Reduce bitrate and trigger adapt down.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps),
DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps),
DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), 0,
@@ -2972,10 +3208,10 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) {
// Trigger adapt up. Higher resolution should not be requested duo to lack
// of bitrate.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionLt(source.sink_wants(), 1280 * 720);
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMatches(Lt(1280 * 720)));
// Increase bitrate.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps),
DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps),
DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), 0,
@@ -2983,7 +3219,7 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) {
// Trigger adapt up. Higher resolution should be requested.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
video_stream_encoder_->Stop();
}
@@ -2993,7 +3229,7 @@ TEST_F(VideoStreamEncoderTest, DropFirstFramesIfBwEstimateIsTooLow) {
{kEncoderBitrateLimits540p, kEncoderBitrateLimits720p});
// Set bitrate equal to min bitrate of 540p.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps),
DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps),
DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), 0,
@@ -3010,7 +3246,7 @@ TEST_F(VideoStreamEncoderTest, DropFirstFramesIfBwEstimateIsTooLow) {
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
ExpectDroppedFrame();
- VerifyFpsMaxResolutionLt(source.sink_wants(), 1280 * 720);
+ EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < 1280 * 720, 5000);
// Insert 720p frame. It should be downscaled and encoded.
timestamp_ms += kFrameIntervalMs;
@@ -3033,7 +3269,7 @@ class BalancedDegradationTest : public VideoStreamEncoderTest {
}
void OnBitrateUpdated(int bitrate_bps) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(bitrate_bps), DataRate::BitsPerSec(bitrate_bps),
DataRate::BitsPerSec(bitrate_bps), 0, 0, 0);
}
@@ -3068,7 +3304,7 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsFalseIfFpsDiffLtThreshold) {
stats_proxy_->SetMockStats(stats);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down framerate (640x360@24fps).
// Fps diff (input-requested:0) < threshold, expect adapting down not to clear
@@ -3076,7 +3312,7 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsFalseIfFpsDiffLtThreshold) {
EXPECT_FALSE(
video_stream_encoder_
->TriggerQualityScalerHighQpAndReturnIfQpSamplesShouldBeCleared());
- VerifyFpsEqResolutionMax(source_.sink_wants(), 24);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(24)));
video_stream_encoder_->Stop();
}
@@ -3094,7 +3330,7 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsTrueIfFpsDiffGeThreshold) {
stats_proxy_->SetMockStats(stats);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down framerate (640x360@24fps).
// Fps diff (input-requested:1) == threshold, expect adapting down to clear QP
@@ -3102,7 +3338,7 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsTrueIfFpsDiffGeThreshold) {
EXPECT_TRUE(
video_stream_encoder_
->TriggerQualityScalerHighQpAndReturnIfQpSamplesShouldBeCleared());
- VerifyFpsEqResolutionMax(source_.sink_wants(), 24);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(24)));
video_stream_encoder_->Stop();
}
@@ -3116,11 +3352,11 @@ TEST_F(BalancedDegradationTest, AdaptDownUsesCodecSpecificFps) {
EXPECT_EQ(kVideoCodecVP8, video_encoder_config_.codec_type);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down framerate (640x360@22fps).
video_stream_encoder_->TriggerQualityLow();
- VerifyFpsEqResolutionMax(source_.sink_wants(), 22);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(22)));
video_stream_encoder_->Stop();
}
@@ -3136,25 +3372,25 @@ TEST_F(BalancedDegradationTest, NoAdaptUpIfBwEstimateIsLessThanMinBitrate) {
OnBitrateUpdated(kTooLowMinBitrateBps);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (640x360@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionMax(source_.sink_wants(), 14);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14)));
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down resolution (480x270@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionLt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants()));
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (480x270@10fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsLtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants()));
EXPECT_EQ(source_.sink_wants().max_framerate_fps, 10);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -3181,7 +3417,7 @@ TEST_F(BalancedDegradationTest,
SetupTest();
OnBitrateUpdated(kLowTargetBitrateBps);
- VerifyNoLimitation(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants());
// Insert frame, expect scaled down:
// framerate (640x360@24fps) -> resolution (480x270@24fps).
@@ -3216,31 +3452,31 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kTooLowMinResolutionBitrateBps);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (640x360@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionMax(source_.sink_wants(), 14);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14)));
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down resolution (480x270@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionLt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants()));
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (480x270@10fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsLtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants()));
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect upscaled fps (no bitrate limit) (480x270@14fps).
video_stream_encoder_->TriggerQualityHigh();
InsertFrameAndWaitForEncoded();
- VerifyFpsGtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsGtResolutionEq(source_.last_wants()));
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no upscale in res (target bitrate < min bitrate).
@@ -3252,7 +3488,7 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kResolutionMinBitrateBps);
video_stream_encoder_->TriggerQualityHigh();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionGt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionGt(source_.last_wants()));
EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
@@ -3272,25 +3508,25 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kTooLowMinBitrateBps);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (640x360@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionMax(source_.sink_wants(), 14);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14)));
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down resolution (480x270@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionLt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants()));
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (480x270@10fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsLtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants()));
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no upscale (target bitrate < min bitrate).
@@ -3302,7 +3538,7 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kMinBitrateBps);
video_stream_encoder_->TriggerQualityHigh();
InsertFrameAndWaitForEncoded();
- VerifyFpsGtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsGtResolutionEq(source_.last_wants()));
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no upscale in res (target bitrate < min bitrate).
@@ -3315,7 +3551,7 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kResolutionMinBitrateBps);
video_stream_encoder_->TriggerQualityHigh();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionGt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionGt(source_.last_wants()));
EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
@@ -3325,7 +3561,7 @@ TEST_F(VideoStreamEncoderTest,
AdaptsResolutionOnOveruseAndLowQuality_MaintainFramerateMode) {
const int kWidth = 1280;
const int kHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -3339,7 +3575,7 @@ TEST_F(VideoStreamEncoderTest,
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3350,7 +3586,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3361,7 +3598,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3372,7 +3609,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3383,7 +3620,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
rtc::VideoSinkWants last_wants = source.sink_wants();
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -3395,68 +3632,74 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionEq(source.sink_wants(), last_wants);
+ EXPECT_THAT(source.sink_wants(), FpsMax());
+ EXPECT_EQ(source.sink_wants().max_pixel_count, last_wants.max_pixel_count);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect upscaled resolution (480x270).
- video_stream_encoder_->TriggerCpuUnderuse();
+ // Trigger quality adapt up, expect upscaled resolution (480x270).
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
- EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect upscaled resolution (640x360).
+ // Trigger quality and cpu adapt up since both are most limited, expect
+ // upscaled resolution (640x360).
video_stream_encoder_->TriggerCpuUnderuse();
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
- EXPECT_EQ(5, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect upscaled resolution (960x540).
+ // Trigger quality and cpu adapt up since both are most limited, expect
+ // upscaled resolution (960x540).
video_stream_encoder_->TriggerCpuUnderuse();
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
last_wants = source.sink_wants();
- EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
- EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(5, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, no cpu downgrades, expect no change (960x540).
+ // Trigger cpu adapt up, expect no change since not most limited (960x540).
+ // However the stats will change since the CPU resource is no longer limited.
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(last_wants));
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger quality adapt up, expect no restriction (1280x720).
video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
}
@@ -3465,7 +3708,7 @@ TEST_F(VideoStreamEncoderTest, CpuLimitedHistogramIsReported) {
const int kWidth = 640;
const int kHeight = 360;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -3494,7 +3737,7 @@ TEST_F(VideoStreamEncoderTest, CpuLimitedHistogramIsReported) {
TEST_F(VideoStreamEncoderTest,
CpuLimitedHistogramIsNotReportedForDisabledDegradation) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -3529,7 +3772,7 @@ TEST_F(VideoStreamEncoderTest, CallsBitrateObserver) {
EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate))
.Times(1);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kLowTargetBitrateBps),
DataRate::BitsPerSec(kLowTargetBitrateBps),
DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0);
@@ -3638,7 +3881,7 @@ TEST_F(VideoStreamEncoderTest, OveruseDetectorUpdatedOnReconfigureAndAdaption) {
const int kFrameHeight = 720;
const int kFramerate = 24;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -3701,7 +3944,7 @@ TEST_F(VideoStreamEncoderTest,
const int kLowFramerate = 15;
const int kHighFramerate = 25;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -3767,7 +4010,7 @@ TEST_F(VideoStreamEncoderTest,
const int kFrameHeight = 720;
const int kFramerate = 24;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -3803,9 +4046,8 @@ TEST_F(VideoStreamEncoderTest,
// Change degradation preference to not enable framerate scaling. Target
// framerate should be changed to codec defined limit.
- video_stream_encoder_->SetSource(
+ video_stream_encoder_->SetSourceAndWaitForFramerateUpdated(
&source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
- video_stream_encoder_->WaitUntilTaskQueueIsIdle();
EXPECT_EQ(
video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
kFramerate);
@@ -3815,7 +4057,7 @@ TEST_F(VideoStreamEncoderTest,
TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
const int kTooLowBitrateForFrameSizeBps = 10000;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0);
@@ -3828,7 +4070,8 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
ExpectDroppedFrame();
// Expect the sink_wants to specify a scaled frame.
- EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
+ EXPECT_TRUE_WAIT(
+ video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000);
int last_pixel_count = video_source_.sink_wants().max_pixel_count;
@@ -3839,7 +4082,8 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
// Expect to drop this frame, the wait should time out.
ExpectDroppedFrame();
- EXPECT_LT(video_source_.sink_wants().max_pixel_count, last_pixel_count);
+ EXPECT_TRUE_WAIT(
+ video_source_.sink_wants().max_pixel_count < last_pixel_count, 5000);
video_stream_encoder_->Stop();
}
@@ -3847,7 +4091,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
TEST_F(VideoStreamEncoderTest,
NumberOfDroppedFramesLimitedWhenBitrateIsTooLow) {
const int kTooLowBitrateForFrameSizeBps = 10000;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0);
@@ -3874,7 +4118,7 @@ TEST_F(VideoStreamEncoderTest,
InitialFrameDropOffWithMaintainResolutionPreference) {
const int kWidth = 640;
const int kHeight = 360;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kLowTargetBitrateBps),
DataRate::BitsPerSec(kLowTargetBitrateBps),
DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0);
@@ -3901,7 +4145,7 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropOffWhenEncoderDisabledScaling) {
video_encoder_config.video_format.parameters["foo"] = "foo";
video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
kMaxPayloadLength);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kLowTargetBitrateBps),
DataRate::BitsPerSec(kLowTargetBitrateBps),
DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0);
@@ -3929,7 +4173,7 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenBweDrops) {
const int kWidth = 640;
const int kHeight = 360;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -3937,7 +4181,7 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenBweDrops) {
// Frame should not be dropped.
WaitForEncodedFrame(1);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps),
DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps),
DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps), 0, 0, 0);
@@ -3945,7 +4189,7 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenBweDrops) {
// Frame should not be dropped.
WaitForEncodedFrame(2);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0);
@@ -3954,7 +4198,8 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenBweDrops) {
ExpectDroppedFrame();
// Expect the sink_wants to specify a scaled frame.
- EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
+ EXPECT_TRUE_WAIT(
+ video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000);
video_stream_encoder_->Stop();
}
@@ -3977,10 +4222,10 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) {
// Start at low bitrate.
const int kLowBitrateBps = 200000;
- video_stream_encoder_->OnBitrateUpdated(DataRate::BitsPerSec(kLowBitrateBps),
- DataRate::BitsPerSec(kLowBitrateBps),
- DataRate::BitsPerSec(kLowBitrateBps),
- 0, 0, 0);
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(kLowBitrateBps),
+ DataRate::BitsPerSec(kLowBitrateBps),
+ DataRate::BitsPerSec(kLowBitrateBps), 0, 0, 0);
// Expect first frame to be dropped and resolution to be limited.
const int kWidth = 1280;
@@ -3989,11 +4234,12 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) {
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
ExpectDroppedFrame();
- EXPECT_LT(source.sink_wants().max_pixel_count, kWidth * kHeight);
+ EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < kWidth * kHeight,
+ 5000);
// Increase bitrate to encoder max.
- video_stream_encoder_->OnBitrateUpdated(max_bitrate, max_bitrate, max_bitrate,
- 0, 0, 0);
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ max_bitrate, max_bitrate, max_bitrate, 0, 0, 0);
// Insert frames and advance |min_duration_ms|.
for (size_t i = 1; i <= 10; i++) {
@@ -4010,7 +4256,10 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) {
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ // The ramp-up code involves the adaptation queue, give it time to execute.
+ // TODO(hbos): Can we await an appropriate event instead?
+ video_stream_encoder_->WaitUntilAdaptationTaskQueueIsIdle();
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
// Frame should not be adapted.
timestamp_ms += kFrameIntervalMs;
@@ -4025,7 +4274,7 @@ TEST_F(VideoStreamEncoderTest,
ResolutionNotAdaptedForTooSmallFrame_MaintainFramerateMode) {
const int kTooSmallWidth = 10;
const int kTooSmallHeight = 10;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4034,14 +4283,14 @@ TEST_F(VideoStreamEncoderTest,
test::FrameForwarder source;
video_stream_encoder_->SetSource(
&source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
- VerifyNoLimitation(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
// Trigger adapt down, too small frame, expect no change.
source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
WaitForEncodedFrame(1);
video_stream_encoder_->TriggerCpuOveruse();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -4053,7 +4302,7 @@ TEST_F(VideoStreamEncoderTest,
const int kTooSmallWidth = 10;
const int kTooSmallHeight = 10;
const int kFpsLimit = 7;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4062,7 +4311,7 @@ TEST_F(VideoStreamEncoderTest,
test::FrameForwarder source;
video_stream_encoder_->SetSource(&source,
webrtc::DegradationPreference::BALANCED);
- VerifyNoLimitation(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -4070,7 +4319,7 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
WaitForEncodedFrame(1);
video_stream_encoder_->TriggerQualityLow();
- VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit);
+ EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit)));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4079,7 +4328,7 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(2, kTooSmallWidth, kTooSmallHeight));
WaitForEncodedFrame(2);
video_stream_encoder_->TriggerQualityLow();
- VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit);
+ EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit)));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4089,7 +4338,7 @@ TEST_F(VideoStreamEncoderTest,
TEST_F(VideoStreamEncoderTest, FailingInitEncodeDoesntCauseCrash) {
fake_encoder_.ForceInitEncodeFailure(true);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4105,7 +4354,7 @@ TEST_F(VideoStreamEncoderTest, FailingInitEncodeDoesntCauseCrash) {
// TODO(sprang): Extend this with fps throttling and any "balanced" extensions.
TEST_F(VideoStreamEncoderTest,
AdaptsResolutionOnOveruse_MaintainFramerateMode) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4141,7 +4390,7 @@ TEST_F(VideoStreamEncoderTest,
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4245,7 +4494,7 @@ TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) {
// disable frame dropping and make testing easier.
ResetEncoder("VP8", 1, 2, 1, true);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4275,7 +4524,8 @@ TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) {
} while (video_source_.sink_wants().max_framerate_fps <
last_wants.max_framerate_fps);
- VerifyFpsEqResolutionMax(video_source_.sink_wants(), kMinFramerateFps);
+ EXPECT_THAT(video_source_.sink_wants(),
+ FpsMatchesResolutionMax(Eq(kMinFramerateFps)));
video_stream_encoder_->Stop();
}
@@ -4286,7 +4536,7 @@ TEST_F(VideoStreamEncoderTest,
const int kHeight = 720;
const int64_t kFrameIntervalMs = 150;
int64_t timestamp_ms = kFrameIntervalMs;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4299,7 +4549,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4309,7 +4559,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4319,7 +4570,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4329,7 +4580,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4339,7 +4590,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4349,7 +4600,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4359,7 +4610,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(6, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4369,7 +4620,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
rtc::VideoSinkWants last_wants = source.sink_wants();
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -4380,17 +4631,17 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(last_wants));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(7, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger adapt down, expect expect increased fps (320x180@10fps).
+ // Trigger adapt up, expect expect increased fps (320x180@10fps).
video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsGtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(8, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4400,7 +4651,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(9, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4410,7 +4661,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsGtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(10, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4420,7 +4671,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(11, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4430,7 +4681,9 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMax());
+ EXPECT_EQ(source.sink_wants().max_pixel_count,
+ source.last_wants().max_pixel_count);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(12, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4440,7 +4693,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(13, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4450,15 +4703,15 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
@@ -4469,7 +4722,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
const int kHeight = 720;
const int64_t kFrameIntervalMs = 150;
int64_t timestamp_ms = kFrameIntervalMs;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4482,7 +4735,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4495,7 +4748,8 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4508,7 +4762,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4521,59 +4775,77 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
- EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect increased fps (640x360@30fps).
- video_stream_encoder_->TriggerCpuUnderuse();
+ // Trigger cpu adapt up, expect no change since QP is most limited.
+ {
+ // Store current sink wants since we expect no change and if there is no
+ // change then last_wants() is not updated.
+ auto previous_sink_wants = source.sink_wants();
+ video_stream_encoder_->TriggerCpuUnderuse();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants));
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ }
+
+ // Trigger quality adapt up, expect increased fps (640x360@30fps).
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
- EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger quality adapt up, expect upscaled resolution (960x540@30fps).
+ // Trigger quality adapt up and Cpu adapt up since both are most limited,
+ // expect increased resolution (960x540@30fps).
video_stream_encoder_->TriggerQualityHigh();
+ video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect no restriction (1280x720fps@30fps).
+ // Trigger quality adapt up and Cpu adapt up since both are most limited,
+ // expect no restriction (1280x720fps@30fps).
+ video_stream_encoder_->TriggerQualityHigh();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
}
@@ -4585,7 +4857,7 @@ TEST_F(VideoStreamEncoderTest,
const int kFpsLimit = 15;
const int64_t kFrameIntervalMs = 150;
int64_t timestamp_ms = kFrameIntervalMs;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4598,7 +4870,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4611,7 +4883,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit);
+ EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit)));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4624,45 +4896,60 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
- EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect upscaled resolution (640x360@15fps).
- video_stream_encoder_->TriggerCpuUnderuse();
+ // Trigger cpu adapt up, expect no change because quality is most limited.
+ {
+ auto previous_sink_wants = source.sink_wants();
+ // Store current sink wants since we expect no change ind if there is no
+ // change then last__wants() is not updated.
+ video_stream_encoder_->TriggerCpuUnderuse();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants));
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ }
+
+ // Trigger quality adapt up, expect upscaled resolution (640x360@15fps).
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
- EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger quality adapt up, expect increased fps (640x360@30fps).
+ // Trigger quality and cpu adapt up, expect increased fps (640x360@30fps).
video_stream_encoder_->TriggerQualityHigh();
+ video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
}
@@ -4676,7 +4963,7 @@ TEST_F(VideoStreamEncoderTest, AcceptsFullHdAdaptedDownSimulcastFrames) {
const int kAdaptedFrameHeight = 808;
const int kFramerate = 24;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4712,7 +4999,7 @@ TEST_F(VideoStreamEncoderTest, PeriodicallyUpdatesChannelParameters) {
const int kLowFps = 2;
const int kHighFps = 30;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4729,7 +5016,7 @@ TEST_F(VideoStreamEncoderTest, PeriodicallyUpdatesChannelParameters) {
}
// Make sure encoder is updated with new target.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4769,7 +5056,7 @@ TEST_F(VideoStreamEncoderTest, DoesNotUpdateBitrateAllocationWhenSuspended) {
MockBitrateObserver bitrate_observer;
video_stream_encoder_->SetBitrateAllocationObserver(&bitrate_observer);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4783,9 +5070,9 @@ TEST_F(VideoStreamEncoderTest, DoesNotUpdateBitrateAllocationWhenSuspended) {
WaitForEncodedFrame(timestamp_ms);
// Next, simulate video suspension due to pacer queue overrun.
- video_stream_encoder_->OnBitrateUpdated(DataRate::BitsPerSec(0),
- DataRate::BitsPerSec(0),
- DataRate::BitsPerSec(0), 0, 1, 0);
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), DataRate::BitsPerSec(0),
+ 0, 1, 0);
// Skip ahead until a new periodic parameter update should have occured.
timestamp_ms += kProcessIntervalMs;
@@ -4805,7 +5092,7 @@ TEST_F(VideoStreamEncoderTest,
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
const CpuOveruseOptions default_options;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4830,7 +5117,7 @@ TEST_F(VideoStreamEncoderTest,
hardware_options.high_encode_usage_threshold_percent = 200;
fake_encoder_.SetIsHardwareAccelerated(true);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4853,7 +5140,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) {
const int kTargetBitrateBps = 120000;
const int kNumFramesInRun = kFps * 5; // Runs of five seconds.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4890,7 +5177,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) {
overshoot_factor *= 2;
}
fake_encoder_.SimulateOvershoot(overshoot_factor);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps + 1000),
DataRate::BitsPerSec(kTargetBitrateBps + 1000),
DataRate::BitsPerSec(kTargetBitrateBps + 1000), 0, 0, 0);
@@ -4905,7 +5192,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) {
timestamp_ms += 1000 / kFps;
}
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4930,7 +5217,7 @@ TEST_F(VideoStreamEncoderTest, ConfiguresCorrectFrameRate) {
int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
max_framerate_ = kActualInputFps;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4951,7 +5238,7 @@ TEST_F(VideoStreamEncoderTest, ConfiguresCorrectFrameRate) {
TEST_F(VideoStreamEncoderTest, AccumulatesUpdateRectOnDroppedFrames) {
VideoFrame::UpdateRect rect;
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -4997,7 +5284,7 @@ TEST_F(VideoStreamEncoderTest, AccumulatesUpdateRectOnDroppedFrames) {
}
TEST_F(VideoStreamEncoderTest, SetsFrameTypes) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -5030,7 +5317,7 @@ TEST_F(VideoStreamEncoderTest, SetsFrameTypes) {
TEST_F(VideoStreamEncoderTest, SetsFrameTypesSimulcast) {
// Setup simulcast with three streams.
ResetEncoder("VP8", 3, 1, 1, false);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kSimulcastTargetBitrateBps),
DataRate::BitsPerSec(kSimulcastTargetBitrateBps),
DataRate::BitsPerSec(kSimulcastTargetBitrateBps), 0, 0, 0);
@@ -5073,7 +5360,7 @@ TEST_F(VideoStreamEncoderTest, RequestKeyframeInternalSource) {
// Configure internal source factory and setup test again.
encoder_factory_.SetHasInternalSource(true);
ResetEncoder("VP8", 1, 1, 1, false);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -5112,7 +5399,7 @@ TEST_F(VideoStreamEncoderTest, AdjustsTimestampInternalSource) {
// Configure internal source factory and setup test again.
encoder_factory_.SetHasInternalSource(true);
ResetEncoder("VP8", 1, 1, 1, false);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -5139,23 +5426,22 @@ TEST_F(VideoStreamEncoderTest, AdjustsTimestampInternalSource) {
}
TEST_F(VideoStreamEncoderTest, DoesNotRewriteH264BitstreamWithOptimalSps) {
- // Configure internal source factory and setup test again.
- encoder_factory_.SetHasInternalSource(true);
+ // SPS contains VUI with restrictions on the maximum number of reordered
+ // pictures, there is no need to rewrite the bitstream to enable faster
+ // decoding.
ResetEncoder("H264", 1, 1, 1, false);
- EncodedImage image(optimal_sps, sizeof(optimal_sps), sizeof(optimal_sps));
- image._frameType = VideoFrameType::kVideoFrameKey;
-
- CodecSpecificInfo codec_specific_info;
- codec_specific_info.codecType = kVideoCodecH264;
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
- RTPFragmentationHeader fragmentation;
- fragmentation.VerifyAndAllocateFragmentationHeader(1);
- fragmentation.fragmentationOffset[0] = 4;
- fragmentation.fragmentationLength[0] = sizeof(optimal_sps) - 4;
+ fake_encoder_.SetEncodedImageData(
+ EncodedImageBuffer::Create(optimal_sps, sizeof(optimal_sps)));
- fake_encoder_.InjectEncodedImage(image, &codec_specific_info, &fragmentation);
- EXPECT_TRUE(sink_.WaitForFrame(kDefaultTimeoutMs));
+ video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
+ WaitForEncodedFrame(1);
EXPECT_THAT(sink_.GetLastEncodedImageData(),
testing::ElementsAreArray(optimal_sps));
@@ -5168,27 +5454,25 @@ TEST_F(VideoStreamEncoderTest, DoesNotRewriteH264BitstreamWithOptimalSps) {
}
TEST_F(VideoStreamEncoderTest, RewritesH264BitstreamWithNonOptimalSps) {
+ // SPS does not contain VUI, the bitstream is will be rewritten with added
+ // VUI with restrictions on the maximum number of reordered pictures to
+ // enable faster decoding.
uint8_t original_sps[] = {0, 0, 0, 1, H264::NaluType::kSps,
0x00, 0x00, 0x03, 0x03, 0xF4,
0x05, 0x03, 0xC7, 0xC0};
-
- // Configure internal source factory and setup test again.
- encoder_factory_.SetHasInternalSource(true);
ResetEncoder("H264", 1, 1, 1, false);
- EncodedImage image(original_sps, sizeof(original_sps), sizeof(original_sps));
- image._frameType = VideoFrameType::kVideoFrameKey;
-
- CodecSpecificInfo codec_specific_info;
- codec_specific_info.codecType = kVideoCodecH264;
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
- RTPFragmentationHeader fragmentation;
- fragmentation.VerifyAndAllocateFragmentationHeader(1);
- fragmentation.fragmentationOffset[0] = 4;
- fragmentation.fragmentationLength[0] = sizeof(original_sps) - 4;
+ fake_encoder_.SetEncodedImageData(
+ EncodedImageBuffer::Create(original_sps, sizeof(original_sps)));
- fake_encoder_.InjectEncodedImage(image, &codec_specific_info, &fragmentation);
- EXPECT_TRUE(sink_.WaitForFrame(kDefaultTimeoutMs));
+ video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
+ WaitForEncodedFrame(1);
EXPECT_THAT(sink_.GetLastEncodedImageData(),
testing::ElementsAreArray(optimal_sps));
@@ -5205,7 +5489,7 @@ TEST_F(VideoStreamEncoderTest, CopiesVideoFrameMetadataAfterDownscale) {
const int kFrameHeight = 720;
const int kTargetBitrateBps = 300000; // To low for HD resolution.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -5246,7 +5530,7 @@ TEST_F(VideoStreamEncoderTest, BandwidthAllocationLowerBound) {
const int kFrameHeight = 180;
// Initial rate.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/DataRate::KilobitsPerSec(300),
/*stable_target_bitrate=*/DataRate::KilobitsPerSec(300),
/*link_allocation=*/DataRate::KilobitsPerSec(300),
@@ -5265,7 +5549,7 @@ TEST_F(VideoStreamEncoderTest, BandwidthAllocationLowerBound) {
VideoCodec codec_config = fake_encoder_.codec_config();
DataRate min_rate = DataRate::KilobitsPerSec(codec_config.minBitrate);
DataRate target_rate = min_rate - DataRate::KilobitsPerSec(1);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/target_rate,
/*stable_target_bitrate=*/target_rate,
/*link_allocation=*/target_rate,
@@ -5286,7 +5570,7 @@ TEST_F(VideoStreamEncoderTest, BandwidthAllocationLowerBound) {
}
TEST_F(VideoStreamEncoderTest, EncoderRatesPropagatedOnReconfigure) {
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
@@ -5328,10 +5612,12 @@ TEST_F(VideoStreamEncoderTest, EncoderRatesPropagatedOnReconfigure) {
}
struct MockEncoderSwitchRequestCallback : public EncoderSwitchRequestCallback {
- MOCK_METHOD0(RequestEncoderFallback, void());
- MOCK_METHOD1(RequestEncoderSwitch, void(const Config& conf));
- MOCK_METHOD1(RequestEncoderSwitch,
- void(const webrtc::SdpVideoFormat& format));
+ MOCK_METHOD(void, RequestEncoderFallback, (), (override));
+ MOCK_METHOD(void, RequestEncoderSwitch, (const Config& conf), (override));
+ MOCK_METHOD(void,
+ RequestEncoderSwitch,
+ (const webrtc::SdpVideoFormat& format),
+ (override));
};
TEST_F(VideoStreamEncoderTest, BitrateEncoderSwitch) {
@@ -5360,7 +5646,7 @@ TEST_F(VideoStreamEncoderTest, BitrateEncoderSwitch) {
Field(&Config::param, "ping"),
Field(&Config::value, "pong")))));
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/DataRate::KilobitsPerSec(50),
/*stable_target_bitrate=*/DataRate::KilobitsPerSec(kDontCare),
/*link_allocation=*/DataRate::KilobitsPerSec(kDontCare),
@@ -5395,7 +5681,7 @@ TEST_F(VideoStreamEncoderTest, VideoSuspendedNoEncoderSwitch) {
EXPECT_CALL(switch_callback, RequestEncoderSwitch(Matcher<const Config&>(_)))
.Times(0);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/DataRate::KilobitsPerSec(0),
/*stable_target_bitrate=*/DataRate::KilobitsPerSec(0),
/*link_allocation=*/DataRate::KilobitsPerSec(kDontCare),
@@ -5427,7 +5713,7 @@ TEST_F(VideoStreamEncoderTest, ResolutionEncoderSwitch) {
// The VideoStreamEncoder needs some bitrate before it can start encoding,
// setting some bitrate so that subsequent calls to WaitForEncodedFrame does
// not fail.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
/*stable_target_bitrate=*/
DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
@@ -5495,7 +5781,7 @@ TEST_F(VideoStreamEncoderTest, EncoderSelectorBitrateSwitch) {
RequestEncoderSwitch(Matcher<const SdpVideoFormat&>(
Field(&SdpVideoFormat::name, "AV1"))));
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/DataRate::KilobitsPerSec(50),
/*stable_target_bitrate=*/DataRate::KilobitsPerSec(kDontCare),
/*link_allocation=*/DataRate::KilobitsPerSec(kDontCare),
@@ -5525,7 +5811,7 @@ TEST_F(VideoStreamEncoderTest, EncoderSelectorBrokenEncoderSwitch) {
// The VideoStreamEncoder needs some bitrate before it can start encoding,
// setting some bitrate so that subsequent calls to WaitForEncodedFrame does
// not fail.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
/*stable_target_bitrate=*/
DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
@@ -5566,7 +5852,7 @@ TEST_F(VideoStreamEncoderTest,
// Set initial rate.
auto rate = DataRate::KilobitsPerSec(100);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/rate,
/*stable_target_bitrate=*/rate,
/*link_allocation=*/rate,
@@ -5584,7 +5870,7 @@ TEST_F(VideoStreamEncoderTest,
// Change of target bitrate propagates to the encoder.
auto new_stable_rate = rate - DataRate::KilobitsPerSec(5);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/new_stable_rate,
/*stable_target_bitrate=*/new_stable_rate,
/*link_allocation=*/rate,
@@ -5603,7 +5889,7 @@ TEST_F(VideoStreamEncoderTest,
// Set initial rate.
auto rate = DataRate::KilobitsPerSec(100);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/rate,
/*stable_target_bitrate=*/rate,
/*link_allocation=*/rate,
@@ -5622,7 +5908,7 @@ TEST_F(VideoStreamEncoderTest,
// Set a higher target rate without changing the link_allocation. Should not
// reset encoder's rate.
auto new_stable_rate = rate - DataRate::KilobitsPerSec(5);
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
/*target_bitrate=*/rate,
/*stable_target_bitrate=*/new_stable_rate,
/*link_allocation=*/rate,
@@ -5649,13 +5935,13 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) {
video_source_.set_adaptation_enabled(true);
// BALANCED degradation preference is required for this feature.
- video_stream_encoder_->OnBitrateUpdated(
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
video_stream_encoder_->SetSource(&video_source_,
webrtc::DegradationPreference::BALANCED);
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
VideoFrame frame = CreateFrame(1, kWidth, kHeight);
frame.set_update_rect(VideoFrame::UpdateRect{0, 0, kWidth, kHeight});
@@ -5674,7 +5960,7 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) {
rtc::VideoSinkWants expected;
expected.max_framerate_fps = kFramerateFps;
expected.max_pixel_count = 1280 * 720 + 1;
- VerifyFpsEqResolutionLt(video_source_.sink_wants(), expected);
+ EXPECT_THAT(video_source_.sink_wants(), FpsEqResolutionLt(expected));
// Pass one frame with no known update.
// Resolution cap should be removed immediately.
@@ -5687,7 +5973,38 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) {
WaitForEncodedFrame(timestamp_ms);
// Resolution should be unlimited now.
- VerifyFpsEqResolutionMax(video_source_.sink_wants(), kFramerateFps);
+ EXPECT_THAT(video_source_.sink_wants(),
+ FpsMatchesResolutionMax(Eq(kFramerateFps)));
+
+ video_stream_encoder_->Stop();
+}
+
+TEST_F(VideoStreamEncoderTest, ConfiguresVp9SvcAtOddResolutions) {
+ const int kWidth = 720; // 540p adapted down.
+ const int kHeight = 405;
+ const int kNumFrames = 3;
+ // Works on screenshare mode.
+ ResetEncoder("VP9", /*num_streams=*/1, /*num_temporal_layers=*/1,
+ /*num_spatial_layers=*/2, /*screenshare=*/true);
+
+ video_source_.set_adaptation_enabled(true);
+
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
+
+ VideoFrame frame = CreateFrame(1, kWidth, kHeight);
+
+ // Pass enough frames with the full update to trigger animation detection.
+ for (int i = 0; i < kNumFrames; ++i) {
+ int64_t timestamp_ms =
+ fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
+ frame.set_ntp_time_ms(timestamp_ms);
+ frame.set_timestamp_us(timestamp_ms * 1000);
+ video_source_.IncomingCapturedFrame(frame);
+ WaitForEncodedFrame(timestamp_ms);
+ }
video_stream_encoder_->Stop();
}
diff --git a/webrtc.gni b/webrtc.gni
index 21fe0baa7b..100c5851a5 100644
--- a/webrtc.gni
+++ b/webrtc.gni
@@ -115,7 +115,7 @@ declare_args() {
# By default it's only enabled on desktop Linux (excludes ChromeOS) and
# only when using the sysroot as PipeWire is not available in older and
# supported Ubuntu and Debian distributions.
- rtc_use_pipewire = is_desktop_linux && use_sysroot && !is_chromecast
+ rtc_use_pipewire = is_desktop_linux && use_sysroot
# Set this to link PipeWire directly instead of using the dlopen.
rtc_link_pipewire = false
@@ -155,6 +155,9 @@ declare_args() {
rtc_use_h264 =
proprietary_codecs && !is_android && !is_ios && !(is_win && !is_clang)
+ # Enable this flag to make webrtc::Mutex be implemented by absl::Mutex.
+ rtc_use_absl_mutex = false
+
# By default, use normal platform audio support or dummy audio, but don't
# use file-based audio playout and record.
rtc_use_dummy_audio_file_devices = false
@@ -323,16 +326,19 @@ set_defaults("rtc_test") {
set_defaults("rtc_library") {
configs = rtc_add_configs
suppressed_configs = []
+ absl_deps = []
}
set_defaults("rtc_source_set") {
configs = rtc_add_configs
suppressed_configs = []
+ absl_deps = []
}
set_defaults("rtc_static_library") {
configs = rtc_add_configs
suppressed_configs = []
+ absl_deps = []
}
set_defaults("rtc_executable") {
@@ -432,7 +438,8 @@ template("rtc_test") {
}
if (!build_with_chromium && is_android) {
android_manifest = webrtc_root + "test/android/AndroidManifest.xml"
- min_sdk_version = 16
+ min_sdk_version = 21
+ target_sdk_version = 23
deps += [ webrtc_root + "test:native_test_java" ]
}
}
@@ -525,6 +532,20 @@ template("rtc_source_set") {
if (defined(invoker.public_configs)) {
public_configs += invoker.public_configs
}
+
+ # If absl_deps is [], no action is needed. If not [], then it needs to be
+ # converted to //third_party/abseil-cpp:absl when build_with_chromium=true
+ # otherwise it just needs to be added to deps.
+ if (absl_deps != []) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (build_with_chromium) {
+ deps += [ "//third_party/abseil-cpp:absl" ]
+ } else {
+ deps += absl_deps
+ }
+ }
}
}
@@ -600,6 +621,20 @@ template("rtc_static_library") {
if (defined(invoker.public_configs)) {
public_configs += invoker.public_configs
}
+
+ # If absl_deps is [], no action is needed. If not [], then it needs to be
+ # converted to //third_party/abseil-cpp:absl when build_with_chromium=true
+ # otherwise it just needs to be added to deps.
+ if (absl_deps != []) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (build_with_chromium) {
+ deps += [ "//third_party/abseil-cpp:absl" ]
+ } else {
+ deps += absl_deps
+ }
+ }
}
}
@@ -712,6 +747,20 @@ template("rtc_library") {
if (defined(invoker.public_configs)) {
public_configs += invoker.public_configs
}
+
+ # If absl_deps is [], no action is needed. If not [], then it needs to be
+ # converted to //third_party/abseil-cpp:absl when build_with_chromium=true
+ # otherwise it just needs to be added to deps.
+ if (absl_deps != []) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (build_with_chromium) {
+ deps += [ "//third_party/abseil-cpp:absl" ]
+ } else {
+ deps += absl_deps
+ }
+ }
}
}
@@ -992,14 +1041,6 @@ if (is_android) {
# Treat warnings as errors.
errorprone_args += [ "-Werror" ]
- # WebRTC supports API level 16 while Chromium only supports 19.
- # (the manifest defines minimum supported SDK version)
- if (defined(invoker.min_sdk_version)) {
- min_sdk_version = invoker.min_sdk_version
- } else {
- min_sdk_version = 16
- }
-
# Add any arguments defined by the invoker.
if (defined(invoker.errorprone_args)) {
errorprone_args += invoker.errorprone_args
@@ -1010,6 +1051,7 @@ if (is_android) {
}
no_build_hooks = true
+ not_needed([ "android_manifest" ])
}
}
@@ -1028,6 +1070,9 @@ if (is_android) {
errorprone_args = []
errorprone_args += [ "-Werror" ]
+ # Use WebRTC-specific android lint suppressions file.
+ lint_suppressions_file = "//tools_webrtc/android/suppressions.xml"
+
if (!defined(deps)) {
deps = []
}
diff --git a/whitespace.txt b/whitespace.txt
index 01f2e1c2c9..daecb0a09d 100644
--- a/whitespace.txt
+++ b/whitespace.txt
@@ -3,4 +3,5 @@ Try to write something funny. And please don't add trailing whitespace.
Once upon a time there was an elephant in Stockholm.
Everyone knew about it, but nobody dared say anything.
+In the end it didn't make a difference since everyone was working from home.